commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6b15bf92f8995542361ce1fe57f7b101f9ceba5e
|
flask_jsonapi/filters_schema.py
|
flask_jsonapi/filters_schema.py
|
import contextlib
import flask
class FilterSchema:
def __init__(self, fields: dict):
self.fields = fields
def parse(self):
result = {}
for name, field in self.fields.items():
with contextlib.suppress(KeyError):
result[name] = field.parse(name)
return result
class FilterField:
def __init__(self, *, field_name=None, parse_value=str):
self.field_name_override = field_name
self._parse_value = parse_value
def parse(self, field_name):
field_name = self.field_name_override or field_name
value_string = flask.request.args['filter[{}]'.format(field_name)]
return self.parse_value(value_string)
def parse_value(self, value_string):
return self._parse_value(value_string)
class ListFilterField(FilterField):
def parse_value(self, value_string):
return [self._parse_value(part) for part in value_string.split(',')]
|
import contextlib
import flask
from flask_jsonapi import exceptions
class FilterSchema:
def __init__(self, fields: dict):
self.fields = fields
def parse(self):
result = {}
for name, field in self.fields.items():
with contextlib.suppress(KeyError):
result[name] = field.parse(name)
return result
class FilterField:
def __init__(self, *, field_name=None, parse_value=str):
self.field_name_override = field_name
self._parse_value = parse_value
def parse(self, field_name):
field_name = self.field_name_override or field_name
value_string = flask.request.args['filter[{}]'.format(field_name)]
try:
return self.parse_value(value_string)
except ValueError as e:
raise exceptions.InvalidFilters('Error parsing {} filter: {}'.format(field_name, e))
def parse_value(self, value_string):
return self._parse_value(value_string)
class ListFilterField(FilterField):
def parse_value(self, value_string):
return [self._parse_value(part) for part in value_string.split(',')]
|
Return jsonapi error when parsing filters failed.
|
Return jsonapi error when parsing filters failed.
Change-Id: I4bd26823d9e29b31ab8fdc47b8ef2bb65071d27b
Reviewed-on: https://review.socialwifi.com/14069
Reviewed-by: Piotr Maliński <5f24252672783b9dd319151f284628d8f524ff27@socialwifi.com>
Tested-by: Jakub Skiepko <27b910087fee73ba587f81728e9a4e87eb24c8cc@socialwifi.com>
|
Python
|
bsd-3-clause
|
maruqu/flask-jsonapi
|
import contextlib
import flask
class FilterSchema:
def __init__(self, fields: dict):
self.fields = fields
def parse(self):
result = {}
for name, field in self.fields.items():
with contextlib.suppress(KeyError):
result[name] = field.parse(name)
return result
class FilterField:
def __init__(self, *, field_name=None, parse_value=str):
self.field_name_override = field_name
self._parse_value = parse_value
def parse(self, field_name):
field_name = self.field_name_override or field_name
value_string = flask.request.args['filter[{}]'.format(field_name)]
return self.parse_value(value_string)
def parse_value(self, value_string):
return self._parse_value(value_string)
class ListFilterField(FilterField):
def parse_value(self, value_string):
return [self._parse_value(part) for part in value_string.split(',')]
Return jsonapi error when parsing filters failed.
Change-Id: I4bd26823d9e29b31ab8fdc47b8ef2bb65071d27b
Reviewed-on: https://review.socialwifi.com/14069
Reviewed-by: Piotr Maliński <5f24252672783b9dd319151f284628d8f524ff27@socialwifi.com>
Tested-by: Jakub Skiepko <27b910087fee73ba587f81728e9a4e87eb24c8cc@socialwifi.com>
|
import contextlib
import flask
from flask_jsonapi import exceptions
class FilterSchema:
def __init__(self, fields: dict):
self.fields = fields
def parse(self):
result = {}
for name, field in self.fields.items():
with contextlib.suppress(KeyError):
result[name] = field.parse(name)
return result
class FilterField:
def __init__(self, *, field_name=None, parse_value=str):
self.field_name_override = field_name
self._parse_value = parse_value
def parse(self, field_name):
field_name = self.field_name_override or field_name
value_string = flask.request.args['filter[{}]'.format(field_name)]
try:
return self.parse_value(value_string)
except ValueError as e:
raise exceptions.InvalidFilters('Error parsing {} filter: {}'.format(field_name, e))
def parse_value(self, value_string):
return self._parse_value(value_string)
class ListFilterField(FilterField):
def parse_value(self, value_string):
return [self._parse_value(part) for part in value_string.split(',')]
|
<commit_before>import contextlib
import flask
class FilterSchema:
def __init__(self, fields: dict):
self.fields = fields
def parse(self):
result = {}
for name, field in self.fields.items():
with contextlib.suppress(KeyError):
result[name] = field.parse(name)
return result
class FilterField:
def __init__(self, *, field_name=None, parse_value=str):
self.field_name_override = field_name
self._parse_value = parse_value
def parse(self, field_name):
field_name = self.field_name_override or field_name
value_string = flask.request.args['filter[{}]'.format(field_name)]
return self.parse_value(value_string)
def parse_value(self, value_string):
return self._parse_value(value_string)
class ListFilterField(FilterField):
def parse_value(self, value_string):
return [self._parse_value(part) for part in value_string.split(',')]
<commit_msg>Return jsonapi error when parsing filters failed.
Change-Id: I4bd26823d9e29b31ab8fdc47b8ef2bb65071d27b
Reviewed-on: https://review.socialwifi.com/14069
Reviewed-by: Piotr Maliński <5f24252672783b9dd319151f284628d8f524ff27@socialwifi.com>
Tested-by: Jakub Skiepko <27b910087fee73ba587f81728e9a4e87eb24c8cc@socialwifi.com><commit_after>
|
import contextlib
import flask
from flask_jsonapi import exceptions
class FilterSchema:
def __init__(self, fields: dict):
self.fields = fields
def parse(self):
result = {}
for name, field in self.fields.items():
with contextlib.suppress(KeyError):
result[name] = field.parse(name)
return result
class FilterField:
def __init__(self, *, field_name=None, parse_value=str):
self.field_name_override = field_name
self._parse_value = parse_value
def parse(self, field_name):
field_name = self.field_name_override or field_name
value_string = flask.request.args['filter[{}]'.format(field_name)]
try:
return self.parse_value(value_string)
except ValueError as e:
raise exceptions.InvalidFilters('Error parsing {} filter: {}'.format(field_name, e))
def parse_value(self, value_string):
return self._parse_value(value_string)
class ListFilterField(FilterField):
def parse_value(self, value_string):
return [self._parse_value(part) for part in value_string.split(',')]
|
import contextlib
import flask
class FilterSchema:
def __init__(self, fields: dict):
self.fields = fields
def parse(self):
result = {}
for name, field in self.fields.items():
with contextlib.suppress(KeyError):
result[name] = field.parse(name)
return result
class FilterField:
def __init__(self, *, field_name=None, parse_value=str):
self.field_name_override = field_name
self._parse_value = parse_value
def parse(self, field_name):
field_name = self.field_name_override or field_name
value_string = flask.request.args['filter[{}]'.format(field_name)]
return self.parse_value(value_string)
def parse_value(self, value_string):
return self._parse_value(value_string)
class ListFilterField(FilterField):
def parse_value(self, value_string):
return [self._parse_value(part) for part in value_string.split(',')]
Return jsonapi error when parsing filters failed.
Change-Id: I4bd26823d9e29b31ab8fdc47b8ef2bb65071d27b
Reviewed-on: https://review.socialwifi.com/14069
Reviewed-by: Piotr Maliński <5f24252672783b9dd319151f284628d8f524ff27@socialwifi.com>
Tested-by: Jakub Skiepko <27b910087fee73ba587f81728e9a4e87eb24c8cc@socialwifi.com>import contextlib
import flask
from flask_jsonapi import exceptions
class FilterSchema:
def __init__(self, fields: dict):
self.fields = fields
def parse(self):
result = {}
for name, field in self.fields.items():
with contextlib.suppress(KeyError):
result[name] = field.parse(name)
return result
class FilterField:
def __init__(self, *, field_name=None, parse_value=str):
self.field_name_override = field_name
self._parse_value = parse_value
def parse(self, field_name):
field_name = self.field_name_override or field_name
value_string = flask.request.args['filter[{}]'.format(field_name)]
try:
return self.parse_value(value_string)
except ValueError as e:
raise exceptions.InvalidFilters('Error parsing {} filter: {}'.format(field_name, e))
def parse_value(self, value_string):
return self._parse_value(value_string)
class ListFilterField(FilterField):
def parse_value(self, value_string):
return [self._parse_value(part) for part in value_string.split(',')]
|
<commit_before>import contextlib
import flask
class FilterSchema:
def __init__(self, fields: dict):
self.fields = fields
def parse(self):
result = {}
for name, field in self.fields.items():
with contextlib.suppress(KeyError):
result[name] = field.parse(name)
return result
class FilterField:
def __init__(self, *, field_name=None, parse_value=str):
self.field_name_override = field_name
self._parse_value = parse_value
def parse(self, field_name):
field_name = self.field_name_override or field_name
value_string = flask.request.args['filter[{}]'.format(field_name)]
return self.parse_value(value_string)
def parse_value(self, value_string):
return self._parse_value(value_string)
class ListFilterField(FilterField):
def parse_value(self, value_string):
return [self._parse_value(part) for part in value_string.split(',')]
<commit_msg>Return jsonapi error when parsing filters failed.
Change-Id: I4bd26823d9e29b31ab8fdc47b8ef2bb65071d27b
Reviewed-on: https://review.socialwifi.com/14069
Reviewed-by: Piotr Maliński <5f24252672783b9dd319151f284628d8f524ff27@socialwifi.com>
Tested-by: Jakub Skiepko <27b910087fee73ba587f81728e9a4e87eb24c8cc@socialwifi.com><commit_after>import contextlib
import flask
from flask_jsonapi import exceptions
class FilterSchema:
def __init__(self, fields: dict):
self.fields = fields
def parse(self):
result = {}
for name, field in self.fields.items():
with contextlib.suppress(KeyError):
result[name] = field.parse(name)
return result
class FilterField:
def __init__(self, *, field_name=None, parse_value=str):
self.field_name_override = field_name
self._parse_value = parse_value
def parse(self, field_name):
field_name = self.field_name_override or field_name
value_string = flask.request.args['filter[{}]'.format(field_name)]
try:
return self.parse_value(value_string)
except ValueError as e:
raise exceptions.InvalidFilters('Error parsing {} filter: {}'.format(field_name, e))
def parse_value(self, value_string):
return self._parse_value(value_string)
class ListFilterField(FilterField):
def parse_value(self, value_string):
return [self._parse_value(part) for part in value_string.split(',')]
|
6e3fc4bea60130b02e0f35cec4d656faed9229cf
|
smarkets/__init__.py
|
smarkets/__init__.py
|
# Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '1.5.5'
def private(something):
something.__private__ = True
return something
__all__ = ()
|
# Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '2.0.0'
def private(something):
something.__private__ = True
return something
__all__ = ()
|
Change version number to reflect most recent changes
|
Change version number to reflect most recent changes
|
Python
|
mit
|
smarkets/smk_python_sdk
|
# Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '1.5.5'
def private(something):
something.__private__ = True
return something
__all__ = ()
Change version number to reflect most recent changes
|
# Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '2.0.0'
def private(something):
something.__private__ = True
return something
__all__ = ()
|
<commit_before># Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '1.5.5'
def private(something):
something.__private__ = True
return something
__all__ = ()
<commit_msg>Change version number to reflect most recent changes<commit_after>
|
# Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '2.0.0'
def private(something):
something.__private__ = True
return something
__all__ = ()
|
# Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '1.5.5'
def private(something):
something.__private__ = True
return something
__all__ = ()
Change version number to reflect most recent changes# Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '2.0.0'
def private(something):
something.__private__ = True
return something
__all__ = ()
|
<commit_before># Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '1.5.5'
def private(something):
something.__private__ = True
return something
__all__ = ()
<commit_msg>Change version number to reflect most recent changes<commit_after># Copyright (C) 2011 Smarkets Limited <support@smarkets.com>
#
# This module is released under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
__version__ = '2.0.0'
def private(something):
something.__private__ = True
return something
__all__ = ()
|
3c901a198f6396a0c48a0766618b9971e795530f
|
board/views.py
|
board/views.py
|
from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from board.forms import PostCreateForm
from board.mixins import BoardMixin, UserLoggingMixin
from board.models import Board, Post
class PostCreateView(BoardMixin, UserLoggingMixin, CreateView):
model = Post
form_class = PostCreateForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['board'] = self.board
return kwargs
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
kwargs['board'] = self.object.board
return super().get_context_data(**kwargs)
class PostListView(BoardMixin, ListView):
model = Post
pass
|
from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from board.forms import PostCreateForm
from board.mixins import BoardMixin, UserLoggingMixin
from board.models import Board, Post
class PostCreateView(BoardMixin, UserLoggingMixin, CreateView):
model = Post
form_class = PostCreateForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['board'] = self.board
return kwargs
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
kwargs['board'] = self.object.board
return super().get_context_data(**kwargs)
class PostListView(BoardMixin, ListView):
pagenate_by = 20
def get_queryset(self):
return Post.objects.filter(board=self.board).order_by('-created_time')
|
Add filtering and ordering to PostListView
|
Add filtering and ordering to PostListView
|
Python
|
mit
|
devunt/hydrocarbon,devunt/hydrocarbon,devunt/hydrocarbon
|
from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from board.forms import PostCreateForm
from board.mixins import BoardMixin, UserLoggingMixin
from board.models import Board, Post
class PostCreateView(BoardMixin, UserLoggingMixin, CreateView):
model = Post
form_class = PostCreateForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['board'] = self.board
return kwargs
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
kwargs['board'] = self.object.board
return super().get_context_data(**kwargs)
class PostListView(BoardMixin, ListView):
model = Post
pass
Add filtering and ordering to PostListView
|
from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from board.forms import PostCreateForm
from board.mixins import BoardMixin, UserLoggingMixin
from board.models import Board, Post
class PostCreateView(BoardMixin, UserLoggingMixin, CreateView):
model = Post
form_class = PostCreateForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['board'] = self.board
return kwargs
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
kwargs['board'] = self.object.board
return super().get_context_data(**kwargs)
class PostListView(BoardMixin, ListView):
pagenate_by = 20
def get_queryset(self):
return Post.objects.filter(board=self.board).order_by('-created_time')
|
<commit_before>from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from board.forms import PostCreateForm
from board.mixins import BoardMixin, UserLoggingMixin
from board.models import Board, Post
class PostCreateView(BoardMixin, UserLoggingMixin, CreateView):
model = Post
form_class = PostCreateForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['board'] = self.board
return kwargs
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
kwargs['board'] = self.object.board
return super().get_context_data(**kwargs)
class PostListView(BoardMixin, ListView):
model = Post
pass
<commit_msg>Add filtering and ordering to PostListView<commit_after>
|
from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from board.forms import PostCreateForm
from board.mixins import BoardMixin, UserLoggingMixin
from board.models import Board, Post
class PostCreateView(BoardMixin, UserLoggingMixin, CreateView):
model = Post
form_class = PostCreateForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['board'] = self.board
return kwargs
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
kwargs['board'] = self.object.board
return super().get_context_data(**kwargs)
class PostListView(BoardMixin, ListView):
pagenate_by = 20
def get_queryset(self):
return Post.objects.filter(board=self.board).order_by('-created_time')
|
from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from board.forms import PostCreateForm
from board.mixins import BoardMixin, UserLoggingMixin
from board.models import Board, Post
class PostCreateView(BoardMixin, UserLoggingMixin, CreateView):
model = Post
form_class = PostCreateForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['board'] = self.board
return kwargs
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
kwargs['board'] = self.object.board
return super().get_context_data(**kwargs)
class PostListView(BoardMixin, ListView):
model = Post
pass
Add filtering and ordering to PostListViewfrom django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from board.forms import PostCreateForm
from board.mixins import BoardMixin, UserLoggingMixin
from board.models import Board, Post
class PostCreateView(BoardMixin, UserLoggingMixin, CreateView):
model = Post
form_class = PostCreateForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['board'] = self.board
return kwargs
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
kwargs['board'] = self.object.board
return super().get_context_data(**kwargs)
class PostListView(BoardMixin, ListView):
pagenate_by = 20
def get_queryset(self):
return Post.objects.filter(board=self.board).order_by('-created_time')
|
<commit_before>from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from board.forms import PostCreateForm
from board.mixins import BoardMixin, UserLoggingMixin
from board.models import Board, Post
class PostCreateView(BoardMixin, UserLoggingMixin, CreateView):
model = Post
form_class = PostCreateForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['board'] = self.board
return kwargs
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
kwargs['board'] = self.object.board
return super().get_context_data(**kwargs)
class PostListView(BoardMixin, ListView):
model = Post
pass
<commit_msg>Add filtering and ordering to PostListView<commit_after>from django.core.urlresolvers import reverse
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from board.forms import PostCreateForm
from board.mixins import BoardMixin, UserLoggingMixin
from board.models import Board, Post
class PostCreateView(BoardMixin, UserLoggingMixin, CreateView):
model = Post
form_class = PostCreateForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['board'] = self.board
return kwargs
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
kwargs['board'] = self.object.board
return super().get_context_data(**kwargs)
class PostListView(BoardMixin, ListView):
pagenate_by = 20
def get_queryset(self):
return Post.objects.filter(board=self.board).order_by('-created_time')
|
d0feed675897570d92eeb7b801b8ba094171bee0
|
send_email.py
|
send_email.py
|
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = 'username@gmail.com'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = 'to@somewhere.com'
FROM = 'username@gmail.com'
SUBJECT = 'KGS - DuelGo Results'
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
|
import datetime
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = 'username@gmail.com'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = 'to@somewhere.com'
FROM = 'username@gmail.com'
SUBJECT = 'KGS - DuelGo Results - {}'.format(datetime.datetime.now().strftime('%m/%d/%Y %I:%M %p'))
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
|
Update email subject with datetime so as to not have it end up in a thread in email client.
|
Update email subject with datetime so as to not have it end up in a thread in email client.
|
Python
|
agpl-3.0
|
v01d-cypher/kgs_league_scorer,v01d-cypher/kgs_league_scorer
|
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = 'username@gmail.com'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = 'to@somewhere.com'
FROM = 'username@gmail.com'
SUBJECT = 'KGS - DuelGo Results'
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
Update email subject with datetime so as to not have it end up in a thread in email client.
|
import datetime
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = 'username@gmail.com'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = 'to@somewhere.com'
FROM = 'username@gmail.com'
SUBJECT = 'KGS - DuelGo Results - {}'.format(datetime.datetime.now().strftime('%m/%d/%Y %I:%M %p'))
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
|
<commit_before>import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = 'username@gmail.com'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = 'to@somewhere.com'
FROM = 'username@gmail.com'
SUBJECT = 'KGS - DuelGo Results'
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
<commit_msg>Update email subject with datetime so as to not have it end up in a thread in email client.<commit_after>
|
import datetime
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = 'username@gmail.com'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = 'to@somewhere.com'
FROM = 'username@gmail.com'
SUBJECT = 'KGS - DuelGo Results - {}'.format(datetime.datetime.now().strftime('%m/%d/%Y %I:%M %p'))
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
|
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = 'username@gmail.com'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = 'to@somewhere.com'
FROM = 'username@gmail.com'
SUBJECT = 'KGS - DuelGo Results'
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
Update email subject with datetime so as to not have it end up in a thread in email client.import datetime
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = 'username@gmail.com'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = 'to@somewhere.com'
FROM = 'username@gmail.com'
SUBJECT = 'KGS - DuelGo Results - {}'.format(datetime.datetime.now().strftime('%m/%d/%Y %I:%M %p'))
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
|
<commit_before>import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = 'username@gmail.com'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = 'to@somewhere.com'
FROM = 'username@gmail.com'
SUBJECT = 'KGS - DuelGo Results'
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
<commit_msg>Update email subject with datetime so as to not have it end up in a thread in email client.<commit_after>import datetime
import smtplib
from email.mime.text import MIMEText
def process_email(data):
table_template = open('table_template.html', 'r').read()
same_guild_html = []
for game in data['same_guild']:
tt = table_template
same_guild_html.append(tt.format(**game))
games_html = []
for game in data['games']:
tt = table_template
games_html.append(tt.format(**game))
send(same_guild_html + games_html)
def connect():
gmail_user = 'username@gmail.com'
gmail_passwd = 'passsword'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(gmail_user, gmail_passwd)
return server
def send(data):
TO = 'to@somewhere.com'
FROM = 'username@gmail.com'
SUBJECT = 'KGS - DuelGo Results - {}'.format(datetime.datetime.now().strftime('%m/%d/%Y %I:%M %p'))
msg = MIMEText('\n'.join(data), 'html')
msg['To'] = TO
msg['From'] = FROM
msg['Subject'] = SUBJECT
server = connect()
try:
server.sendmail(FROM, [TO], msg.as_string())
print('\t ... sent')
except:
print('\t ... ERROR sending')
server.quit()
|
198d1d8827ffc04bf7f33e99bc929a33c8a7ba8c
|
src/sana/core/models/__init__.py
|
src/sana/core/models/__init__.py
|
"""
Data models for the core Sana data engine. These should be extended as
required.
:Authors: Sana dev team
:Version: 2.0
"""
from sana.core.models.concept import Concept, Relationship, RelationshipCategory
from sana.core.models.device import Device
from sana.core.models.encounter import Encounter
from sana.core.models.events import Event
from sana.core.models.notification import Notification
from sana.core.models.observation import Observation
from sana.core.models.observer import Observer
from sana.core.models.procedure import Procedure
from sana.core.models.subject import Subject
__all__ = ['Concept', 'Relationship','RelationshipCategory',
'Device',
'Encounter',
'Event',
'Notification',
'Observation',
'Observer',
'Procedure',
'Subject',]
|
"""
Data models for the core Sana data engine. These should be extended as
required.
:Authors: Sana dev team
:Version: 2.0
"""
from .concept import Concept, Relationship, RelationshipCategory
from .device import Device
from .encounter import Encounter
from .events import Event
from .notification import Notification
from .observation import Observation
from .observer import Observer
from .procedure import Procedure
from .subject import Patient
__all__ = ['Concept', 'Relationship','RelationshipCategory',
'Device',
'Encounter',
'Event',
'Notification',
'Observation',
'Observer',
'Procedure',
'Patient',]
|
Update to use relative imports.
|
Update to use relative imports.
|
Python
|
bsd-3-clause
|
SanaMobile/sana.mds,rryan/sana.mds,SanaMobile/sana.mds,SanaMobile/sana.mds,rryan/sana.mds,SanaMobile/sana.mds
|
"""
Data models for the core Sana data engine. These should be extended as
required.
:Authors: Sana dev team
:Version: 2.0
"""
from sana.core.models.concept import Concept, Relationship, RelationshipCategory
from sana.core.models.device import Device
from sana.core.models.encounter import Encounter
from sana.core.models.events import Event
from sana.core.models.notification import Notification
from sana.core.models.observation import Observation
from sana.core.models.observer import Observer
from sana.core.models.procedure import Procedure
from sana.core.models.subject import Subject
__all__ = ['Concept', 'Relationship','RelationshipCategory',
'Device',
'Encounter',
'Event',
'Notification',
'Observation',
'Observer',
'Procedure',
'Subject',]
Update to use relative imports.
|
"""
Data models for the core Sana data engine. These should be extended as
required.
:Authors: Sana dev team
:Version: 2.0
"""
from .concept import Concept, Relationship, RelationshipCategory
from .device import Device
from .encounter import Encounter
from .events import Event
from .notification import Notification
from .observation import Observation
from .observer import Observer
from .procedure import Procedure
from .subject import Patient
__all__ = ['Concept', 'Relationship','RelationshipCategory',
'Device',
'Encounter',
'Event',
'Notification',
'Observation',
'Observer',
'Procedure',
'Patient',]
|
<commit_before>"""
Data models for the core Sana data engine. These should be extended as
required.
:Authors: Sana dev team
:Version: 2.0
"""
from sana.core.models.concept import Concept, Relationship, RelationshipCategory
from sana.core.models.device import Device
from sana.core.models.encounter import Encounter
from sana.core.models.events import Event
from sana.core.models.notification import Notification
from sana.core.models.observation import Observation
from sana.core.models.observer import Observer
from sana.core.models.procedure import Procedure
from sana.core.models.subject import Subject
__all__ = ['Concept', 'Relationship','RelationshipCategory',
'Device',
'Encounter',
'Event',
'Notification',
'Observation',
'Observer',
'Procedure',
'Subject',]
<commit_msg>Update to use relative imports.<commit_after>
|
"""
Data models for the core Sana data engine. These should be extended as
required.
:Authors: Sana dev team
:Version: 2.0
"""
from .concept import Concept, Relationship, RelationshipCategory
from .device import Device
from .encounter import Encounter
from .events import Event
from .notification import Notification
from .observation import Observation
from .observer import Observer
from .procedure import Procedure
from .subject import Patient
__all__ = ['Concept', 'Relationship','RelationshipCategory',
'Device',
'Encounter',
'Event',
'Notification',
'Observation',
'Observer',
'Procedure',
'Patient',]
|
"""
Data models for the core Sana data engine. These should be extended as
required.
:Authors: Sana dev team
:Version: 2.0
"""
from sana.core.models.concept import Concept, Relationship, RelationshipCategory
from sana.core.models.device import Device
from sana.core.models.encounter import Encounter
from sana.core.models.events import Event
from sana.core.models.notification import Notification
from sana.core.models.observation import Observation
from sana.core.models.observer import Observer
from sana.core.models.procedure import Procedure
from sana.core.models.subject import Subject
__all__ = ['Concept', 'Relationship','RelationshipCategory',
'Device',
'Encounter',
'Event',
'Notification',
'Observation',
'Observer',
'Procedure',
'Subject',]
Update to use relative imports."""
Data models for the core Sana data engine. These should be extended as
required.
:Authors: Sana dev team
:Version: 2.0
"""
from .concept import Concept, Relationship, RelationshipCategory
from .device import Device
from .encounter import Encounter
from .events import Event
from .notification import Notification
from .observation import Observation
from .observer import Observer
from .procedure import Procedure
from .subject import Patient
__all__ = ['Concept', 'Relationship','RelationshipCategory',
'Device',
'Encounter',
'Event',
'Notification',
'Observation',
'Observer',
'Procedure',
'Patient',]
|
<commit_before>"""
Data models for the core Sana data engine. These should be extended as
required.
:Authors: Sana dev team
:Version: 2.0
"""
from sana.core.models.concept import Concept, Relationship, RelationshipCategory
from sana.core.models.device import Device
from sana.core.models.encounter import Encounter
from sana.core.models.events import Event
from sana.core.models.notification import Notification
from sana.core.models.observation import Observation
from sana.core.models.observer import Observer
from sana.core.models.procedure import Procedure
from sana.core.models.subject import Subject
__all__ = ['Concept', 'Relationship','RelationshipCategory',
'Device',
'Encounter',
'Event',
'Notification',
'Observation',
'Observer',
'Procedure',
'Subject',]
<commit_msg>Update to use relative imports.<commit_after>"""
Data models for the core Sana data engine. These should be extended as
required.
:Authors: Sana dev team
:Version: 2.0
"""
from .concept import Concept, Relationship, RelationshipCategory
from .device import Device
from .encounter import Encounter
from .events import Event
from .notification import Notification
from .observation import Observation
from .observer import Observer
from .procedure import Procedure
from .subject import Patient
__all__ = ['Concept', 'Relationship','RelationshipCategory',
'Device',
'Encounter',
'Event',
'Notification',
'Observation',
'Observer',
'Procedure',
'Patient',]
|
5531cac216918d4482858b5eb487003c67c96406
|
bluebottle/auth/tests/test_api.py
|
bluebottle/auth/tests/test_api.py
|
import json
import mock
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from django.core.urlresolvers import reverse
class UserTokenTestCase(BluebottleTestCase):
def setUp(self):
super(UserTokenTestCase, self).setUp()
self.init_projects()
self.user = BlueBottleUserFactory.create()
def test_authenticate_user(self):
"""
Test that we get a token from API when using credentials.
"""
res = self.client.post(
reverse("token-auth"),
data={'email': self.user.email, 'password': 'testing'}
)
self.assertEqual(res.status_code, status.HTTP_200_OK)
|
import json
import mock
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from django.core.urlresolvers import reverse
class UserTokenTestCase(BluebottleTestCase):
def setUp(self):
super(UserTokenTestCase, self).setUp()
self.init_projects()
self.user = BlueBottleUserFactory.create()
def test_authenticate_user(self):
"""
Test that we get a token from API when using credentials.
"""
response = self.client.post(
reverse("token-auth"),
data={'email': self.user.email, 'password': 'testing'}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, 'token')
|
Add tests for API login
|
Add tests for API login
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
import json
import mock
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from django.core.urlresolvers import reverse
class UserTokenTestCase(BluebottleTestCase):
def setUp(self):
super(UserTokenTestCase, self).setUp()
self.init_projects()
self.user = BlueBottleUserFactory.create()
def test_authenticate_user(self):
"""
Test that we get a token from API when using credentials.
"""
res = self.client.post(
reverse("token-auth"),
data={'email': self.user.email, 'password': 'testing'}
)
self.assertEqual(res.status_code, status.HTTP_200_OK)
Add tests for API login
|
import json
import mock
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from django.core.urlresolvers import reverse
class UserTokenTestCase(BluebottleTestCase):
def setUp(self):
super(UserTokenTestCase, self).setUp()
self.init_projects()
self.user = BlueBottleUserFactory.create()
def test_authenticate_user(self):
"""
Test that we get a token from API when using credentials.
"""
response = self.client.post(
reverse("token-auth"),
data={'email': self.user.email, 'password': 'testing'}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, 'token')
|
<commit_before>import json
import mock
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from django.core.urlresolvers import reverse
class UserTokenTestCase(BluebottleTestCase):
def setUp(self):
super(UserTokenTestCase, self).setUp()
self.init_projects()
self.user = BlueBottleUserFactory.create()
def test_authenticate_user(self):
"""
Test that we get a token from API when using credentials.
"""
res = self.client.post(
reverse("token-auth"),
data={'email': self.user.email, 'password': 'testing'}
)
self.assertEqual(res.status_code, status.HTTP_200_OK)
<commit_msg>Add tests for API login<commit_after>
|
import json
import mock
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from django.core.urlresolvers import reverse
class UserTokenTestCase(BluebottleTestCase):
def setUp(self):
super(UserTokenTestCase, self).setUp()
self.init_projects()
self.user = BlueBottleUserFactory.create()
def test_authenticate_user(self):
"""
Test that we get a token from API when using credentials.
"""
response = self.client.post(
reverse("token-auth"),
data={'email': self.user.email, 'password': 'testing'}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, 'token')
|
import json
import mock
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from django.core.urlresolvers import reverse
class UserTokenTestCase(BluebottleTestCase):
def setUp(self):
super(UserTokenTestCase, self).setUp()
self.init_projects()
self.user = BlueBottleUserFactory.create()
def test_authenticate_user(self):
"""
Test that we get a token from API when using credentials.
"""
res = self.client.post(
reverse("token-auth"),
data={'email': self.user.email, 'password': 'testing'}
)
self.assertEqual(res.status_code, status.HTTP_200_OK)
Add tests for API loginimport json
import mock
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from django.core.urlresolvers import reverse
class UserTokenTestCase(BluebottleTestCase):
def setUp(self):
super(UserTokenTestCase, self).setUp()
self.init_projects()
self.user = BlueBottleUserFactory.create()
def test_authenticate_user(self):
"""
Test that we get a token from API when using credentials.
"""
response = self.client.post(
reverse("token-auth"),
data={'email': self.user.email, 'password': 'testing'}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, 'token')
|
<commit_before>import json
import mock
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from django.core.urlresolvers import reverse
class UserTokenTestCase(BluebottleTestCase):
def setUp(self):
super(UserTokenTestCase, self).setUp()
self.init_projects()
self.user = BlueBottleUserFactory.create()
def test_authenticate_user(self):
"""
Test that we get a token from API when using credentials.
"""
res = self.client.post(
reverse("token-auth"),
data={'email': self.user.email, 'password': 'testing'}
)
self.assertEqual(res.status_code, status.HTTP_200_OK)
<commit_msg>Add tests for API login<commit_after>import json
import mock
from rest_framework import status
from bluebottle.test.utils import BluebottleTestCase
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from django.core.urlresolvers import reverse
class UserTokenTestCase(BluebottleTestCase):
def setUp(self):
super(UserTokenTestCase, self).setUp()
self.init_projects()
self.user = BlueBottleUserFactory.create()
def test_authenticate_user(self):
"""
Test that we get a token from API when using credentials.
"""
response = self.client.post(
reverse("token-auth"),
data={'email': self.user.email, 'password': 'testing'}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, 'token')
|
622e1e780b84a8e04c5af2d6758fb457ff92ea93
|
polymorphic/formsets/utils.py
|
polymorphic/formsets/utils.py
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
"""
if django.VERSION >= (2, 2):
dest._css_lists += media._css_lists
dest._js_lists += media._js_lists
elif django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
Fix media-combining in formsets on Django 2.2
|
Fix media-combining in formsets on Django 2.2
|
Python
|
bsd-3-clause
|
chrisglass/django_polymorphic,chrisglass/django_polymorphic
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
Fix media-combining in formsets on Django 2.2
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
"""
if django.VERSION >= (2, 2):
dest._css_lists += media._css_lists
dest._js_lists += media._js_lists
elif django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
<commit_before>"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
<commit_msg>Fix media-combining in formsets on Django 2.2<commit_after>
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
"""
if django.VERSION >= (2, 2):
dest._css_lists += media._css_lists
dest._js_lists += media._js_lists
elif django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
Fix media-combining in formsets on Django 2.2"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
"""
if django.VERSION >= (2, 2):
dest._css_lists += media._css_lists
dest._js_lists += media._js_lists
elif django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
<commit_before>"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
<commit_msg>Fix media-combining in formsets on Django 2.2<commit_after>"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
"""
if django.VERSION >= (2, 2):
dest._css_lists += media._css_lists
dest._js_lists += media._js_lists
elif django.VERSION >= (2, 0):
combined = dest + media
dest._css = combined._css
dest._js = combined._js
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
3cbe02f1a5410148269113f7b8f41949086c9ac1
|
instance/tasks.py
|
instance/tasks.py
|
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from pprint import pprint
from django.conf import settings
from huey.djhuey import task
from .ansible import run_ansible_playbook, get_inventory_str, get_vars_str
from .gandi import GandiAPI
from .openstack import create_sandbox_server, get_nova_client, get_server_public_ip, sleep_until_port_open
# Tasks #######################################################################
@task()
def create_sandbox_instance(subdomain, instance_name):
nova = get_nova_client()
gandi = GandiAPI()
# Create server
server = create_sandbox_server(nova, subdomain)
# Update DNS
server_ip = get_server_public_ip(server)
gandi.set_dns_record(type='A', name=subdomain, value=server_ip)
# Run ansible sandbox playbook
sleep_until_port_open(server_ip, 22)
with run_ansible_playbook(
get_inventory_str(server_ip),
get_vars_str(
instance_name,
'{}.{}'.format(subdomain, settings.INSTANCES_BASE_DOMAIN)),
'edx_sandbox.yml',
username='admin',
) as processus:
for line in processus.stdout:
pprint(line.rstrip())
|
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from pprint import pprint
from django.conf import settings
from huey.djhuey import task
from .ansible import run_ansible_playbook, get_inventory_str, get_vars_str
from .gandi import GandiAPI
from .openstack import create_sandbox_server, get_nova_client, get_server_public_ip, sleep_until_port_open
# Tasks #######################################################################
@task()
def create_sandbox_instance(subdomain, instance_name):
nova = get_nova_client()
gandi = GandiAPI()
# Create server
server = create_sandbox_server(nova, subdomain)
# Update DNS
server_ip = get_server_public_ip(server)
gandi.set_dns_record(type='A', name=subdomain, value=server_ip)
# Run ansible sandbox playbook
sleep_until_port_open(server_ip, 22)
log_lines = []
with run_ansible_playbook(
get_inventory_str(server_ip),
get_vars_str(
instance_name,
'{}.{}'.format(subdomain, settings.INSTANCES_BASE_DOMAIN)),
'edx_sandbox.yml',
username='admin',
) as processus:
for line in processus.stdout:
line = line.rstrip()
log_lines.append(line)
pprint(line)
return log_lines
|
Return command output log in `create_sandbox_instance()`
|
Return command output log in `create_sandbox_instance()`
|
Python
|
agpl-3.0
|
brousch/opencraft,omarkhan/opencraft,omarkhan/opencraft,open-craft/opencraft,brousch/opencraft,open-craft/opencraft,omarkhan/opencraft,open-craft/opencraft,omarkhan/opencraft,brousch/opencraft,open-craft/opencraft,open-craft/opencraft
|
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from pprint import pprint
from django.conf import settings
from huey.djhuey import task
from .ansible import run_ansible_playbook, get_inventory_str, get_vars_str
from .gandi import GandiAPI
from .openstack import create_sandbox_server, get_nova_client, get_server_public_ip, sleep_until_port_open
# Tasks #######################################################################
@task()
def create_sandbox_instance(subdomain, instance_name):
nova = get_nova_client()
gandi = GandiAPI()
# Create server
server = create_sandbox_server(nova, subdomain)
# Update DNS
server_ip = get_server_public_ip(server)
gandi.set_dns_record(type='A', name=subdomain, value=server_ip)
# Run ansible sandbox playbook
sleep_until_port_open(server_ip, 22)
with run_ansible_playbook(
get_inventory_str(server_ip),
get_vars_str(
instance_name,
'{}.{}'.format(subdomain, settings.INSTANCES_BASE_DOMAIN)),
'edx_sandbox.yml',
username='admin',
) as processus:
for line in processus.stdout:
pprint(line.rstrip())
Return command output log in `create_sandbox_instance()`
|
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from pprint import pprint
from django.conf import settings
from huey.djhuey import task
from .ansible import run_ansible_playbook, get_inventory_str, get_vars_str
from .gandi import GandiAPI
from .openstack import create_sandbox_server, get_nova_client, get_server_public_ip, sleep_until_port_open
# Tasks #######################################################################
@task()
def create_sandbox_instance(subdomain, instance_name):
nova = get_nova_client()
gandi = GandiAPI()
# Create server
server = create_sandbox_server(nova, subdomain)
# Update DNS
server_ip = get_server_public_ip(server)
gandi.set_dns_record(type='A', name=subdomain, value=server_ip)
# Run ansible sandbox playbook
sleep_until_port_open(server_ip, 22)
log_lines = []
with run_ansible_playbook(
get_inventory_str(server_ip),
get_vars_str(
instance_name,
'{}.{}'.format(subdomain, settings.INSTANCES_BASE_DOMAIN)),
'edx_sandbox.yml',
username='admin',
) as processus:
for line in processus.stdout:
line = line.rstrip()
log_lines.append(line)
pprint(line)
return log_lines
|
<commit_before># -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from pprint import pprint
from django.conf import settings
from huey.djhuey import task
from .ansible import run_ansible_playbook, get_inventory_str, get_vars_str
from .gandi import GandiAPI
from .openstack import create_sandbox_server, get_nova_client, get_server_public_ip, sleep_until_port_open
# Tasks #######################################################################
@task()
def create_sandbox_instance(subdomain, instance_name):
nova = get_nova_client()
gandi = GandiAPI()
# Create server
server = create_sandbox_server(nova, subdomain)
# Update DNS
server_ip = get_server_public_ip(server)
gandi.set_dns_record(type='A', name=subdomain, value=server_ip)
# Run ansible sandbox playbook
sleep_until_port_open(server_ip, 22)
with run_ansible_playbook(
get_inventory_str(server_ip),
get_vars_str(
instance_name,
'{}.{}'.format(subdomain, settings.INSTANCES_BASE_DOMAIN)),
'edx_sandbox.yml',
username='admin',
) as processus:
for line in processus.stdout:
pprint(line.rstrip())
<commit_msg>Return command output log in `create_sandbox_instance()`<commit_after>
|
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from pprint import pprint
from django.conf import settings
from huey.djhuey import task
from .ansible import run_ansible_playbook, get_inventory_str, get_vars_str
from .gandi import GandiAPI
from .openstack import create_sandbox_server, get_nova_client, get_server_public_ip, sleep_until_port_open
# Tasks #######################################################################
@task()
def create_sandbox_instance(subdomain, instance_name):
nova = get_nova_client()
gandi = GandiAPI()
# Create server
server = create_sandbox_server(nova, subdomain)
# Update DNS
server_ip = get_server_public_ip(server)
gandi.set_dns_record(type='A', name=subdomain, value=server_ip)
# Run ansible sandbox playbook
sleep_until_port_open(server_ip, 22)
log_lines = []
with run_ansible_playbook(
get_inventory_str(server_ip),
get_vars_str(
instance_name,
'{}.{}'.format(subdomain, settings.INSTANCES_BASE_DOMAIN)),
'edx_sandbox.yml',
username='admin',
) as processus:
for line in processus.stdout:
line = line.rstrip()
log_lines.append(line)
pprint(line)
return log_lines
|
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from pprint import pprint
from django.conf import settings
from huey.djhuey import task
from .ansible import run_ansible_playbook, get_inventory_str, get_vars_str
from .gandi import GandiAPI
from .openstack import create_sandbox_server, get_nova_client, get_server_public_ip, sleep_until_port_open
# Tasks #######################################################################
@task()
def create_sandbox_instance(subdomain, instance_name):
nova = get_nova_client()
gandi = GandiAPI()
# Create server
server = create_sandbox_server(nova, subdomain)
# Update DNS
server_ip = get_server_public_ip(server)
gandi.set_dns_record(type='A', name=subdomain, value=server_ip)
# Run ansible sandbox playbook
sleep_until_port_open(server_ip, 22)
with run_ansible_playbook(
get_inventory_str(server_ip),
get_vars_str(
instance_name,
'{}.{}'.format(subdomain, settings.INSTANCES_BASE_DOMAIN)),
'edx_sandbox.yml',
username='admin',
) as processus:
for line in processus.stdout:
pprint(line.rstrip())
Return command output log in `create_sandbox_instance()`# -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from pprint import pprint
from django.conf import settings
from huey.djhuey import task
from .ansible import run_ansible_playbook, get_inventory_str, get_vars_str
from .gandi import GandiAPI
from .openstack import create_sandbox_server, get_nova_client, get_server_public_ip, sleep_until_port_open
# Tasks #######################################################################
@task()
def create_sandbox_instance(subdomain, instance_name):
nova = get_nova_client()
gandi = GandiAPI()
# Create server
server = create_sandbox_server(nova, subdomain)
# Update DNS
server_ip = get_server_public_ip(server)
gandi.set_dns_record(type='A', name=subdomain, value=server_ip)
# Run ansible sandbox playbook
sleep_until_port_open(server_ip, 22)
log_lines = []
with run_ansible_playbook(
get_inventory_str(server_ip),
get_vars_str(
instance_name,
'{}.{}'.format(subdomain, settings.INSTANCES_BASE_DOMAIN)),
'edx_sandbox.yml',
username='admin',
) as processus:
for line in processus.stdout:
line = line.rstrip()
log_lines.append(line)
pprint(line)
return log_lines
|
<commit_before># -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from pprint import pprint
from django.conf import settings
from huey.djhuey import task
from .ansible import run_ansible_playbook, get_inventory_str, get_vars_str
from .gandi import GandiAPI
from .openstack import create_sandbox_server, get_nova_client, get_server_public_ip, sleep_until_port_open
# Tasks #######################################################################
@task()
def create_sandbox_instance(subdomain, instance_name):
nova = get_nova_client()
gandi = GandiAPI()
# Create server
server = create_sandbox_server(nova, subdomain)
# Update DNS
server_ip = get_server_public_ip(server)
gandi.set_dns_record(type='A', name=subdomain, value=server_ip)
# Run ansible sandbox playbook
sleep_until_port_open(server_ip, 22)
with run_ansible_playbook(
get_inventory_str(server_ip),
get_vars_str(
instance_name,
'{}.{}'.format(subdomain, settings.INSTANCES_BASE_DOMAIN)),
'edx_sandbox.yml',
username='admin',
) as processus:
for line in processus.stdout:
pprint(line.rstrip())
<commit_msg>Return command output log in `create_sandbox_instance()`<commit_after># -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from pprint import pprint
from django.conf import settings
from huey.djhuey import task
from .ansible import run_ansible_playbook, get_inventory_str, get_vars_str
from .gandi import GandiAPI
from .openstack import create_sandbox_server, get_nova_client, get_server_public_ip, sleep_until_port_open
# Tasks #######################################################################
@task()
def create_sandbox_instance(subdomain, instance_name):
nova = get_nova_client()
gandi = GandiAPI()
# Create server
server = create_sandbox_server(nova, subdomain)
# Update DNS
server_ip = get_server_public_ip(server)
gandi.set_dns_record(type='A', name=subdomain, value=server_ip)
# Run ansible sandbox playbook
sleep_until_port_open(server_ip, 22)
log_lines = []
with run_ansible_playbook(
get_inventory_str(server_ip),
get_vars_str(
instance_name,
'{}.{}'.format(subdomain, settings.INSTANCES_BASE_DOMAIN)),
'edx_sandbox.yml',
username='admin',
) as processus:
for line in processus.stdout:
line = line.rstrip()
log_lines.append(line)
pprint(line)
return log_lines
|
20115684ea5ab52e0c51f43fd85aa9945560d103
|
interleave-pdf.py
|
interleave-pdf.py
|
import PyPDF2
from formlayout import fedit
def main():
paths = [('Input', ''), ('Output', '')]
pathsRead = fedit(paths,
title="Interleave pdf",
comment="Enter the full path to the source pdf and a path to output the result."
)
# Full path to files should be specified eg C:\Users\Sam\Documents\Input.pdf and C:\Users\Sam\Documents\Input.pdf
document = PyPDF2.PdfFileReader(pathsRead[0])
writer = PyPDF2.PdfFileWriter()
for page in document.pages:
writer.addPage(page)
writer.addBlankPage()
outputStream = open(pathsRead[1], 'wb')
writer.write(outputStream)
outputStream.close()
if __name__ == "__main__":
main()
|
import PyPDF2
from tkinter import *
from tkinter.filedialog import askopenfilename
from tkinter.filedialog import asksaveasfilename
class Application(Frame):
def __init__(self):
self.input_path = None;
self.output_path = None;
Frame.__init__(self)
self.master.resizable(False, False)
self.master.title('Interleave PDF')
self.grid()
self.button = Button(self, text="Select input", command=self.load_file, width=12)
self.button.grid(row=1, column=0, sticky=W)
self.button = Button(self, text="Select output", command=self.save_file, width=12)
self.button.grid(row=1, column=2, sticky=W)
self.button = Button(self, text="Interleave", command=self.interleave, width=12)
self.button.grid(row=1, column=3, sticky=W)
def load_file(self):
self.input_path = askopenfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*")))
def save_file(self):
self.output_path = asksaveasfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*")))
def interleave(self):
if self.input_path and self.output_path:
document = PyPDF2.PdfFileReader(self.input_path)
writer = PyPDF2.PdfFileWriter()
for page in document.pages:
writer.addPage(page)
writer.addBlankPage()
outputStream = open(self.output_path, 'wb')
writer.write(outputStream)
outputStream.close()
if __name__ == "__main__":
Application().mainloop()
|
Replace formlayout GUI with tkinter
|
Replace formlayout GUI with tkinter
Separate buttons for selecting input and output, and for running
the interleave procedure.
|
Python
|
mit
|
sproberts92/interleave-pdf
|
import PyPDF2
from formlayout import fedit
def main():
paths = [('Input', ''), ('Output', '')]
pathsRead = fedit(paths,
title="Interleave pdf",
comment="Enter the full path to the source pdf and a path to output the result."
)
# Full path to files should be specified eg C:\Users\Sam\Documents\Input.pdf and C:\Users\Sam\Documents\Input.pdf
document = PyPDF2.PdfFileReader(pathsRead[0])
writer = PyPDF2.PdfFileWriter()
for page in document.pages:
writer.addPage(page)
writer.addBlankPage()
outputStream = open(pathsRead[1], 'wb')
writer.write(outputStream)
outputStream.close()
if __name__ == "__main__":
main()
Replace formlayout GUI with tkinter
Separate buttons for selecting input and output, and for running
the interleave procedure.
|
import PyPDF2
from tkinter import *
from tkinter.filedialog import askopenfilename
from tkinter.filedialog import asksaveasfilename
class Application(Frame):
def __init__(self):
self.input_path = None;
self.output_path = None;
Frame.__init__(self)
self.master.resizable(False, False)
self.master.title('Interleave PDF')
self.grid()
self.button = Button(self, text="Select input", command=self.load_file, width=12)
self.button.grid(row=1, column=0, sticky=W)
self.button = Button(self, text="Select output", command=self.save_file, width=12)
self.button.grid(row=1, column=2, sticky=W)
self.button = Button(self, text="Interleave", command=self.interleave, width=12)
self.button.grid(row=1, column=3, sticky=W)
def load_file(self):
self.input_path = askopenfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*")))
def save_file(self):
self.output_path = asksaveasfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*")))
def interleave(self):
if self.input_path and self.output_path:
document = PyPDF2.PdfFileReader(self.input_path)
writer = PyPDF2.PdfFileWriter()
for page in document.pages:
writer.addPage(page)
writer.addBlankPage()
outputStream = open(self.output_path, 'wb')
writer.write(outputStream)
outputStream.close()
if __name__ == "__main__":
Application().mainloop()
|
<commit_before>import PyPDF2
from formlayout import fedit
def main():
paths = [('Input', ''), ('Output', '')]
pathsRead = fedit(paths,
title="Interleave pdf",
comment="Enter the full path to the source pdf and a path to output the result."
)
# Full path to files should be specified eg C:\Users\Sam\Documents\Input.pdf and C:\Users\Sam\Documents\Input.pdf
document = PyPDF2.PdfFileReader(pathsRead[0])
writer = PyPDF2.PdfFileWriter()
for page in document.pages:
writer.addPage(page)
writer.addBlankPage()
outputStream = open(pathsRead[1], 'wb')
writer.write(outputStream)
outputStream.close()
if __name__ == "__main__":
main()
<commit_msg>Replace formlayout GUI with tkinter
Separate buttons for selecting input and output, and for running
the interleave procedure.<commit_after>
|
import PyPDF2
from tkinter import *
from tkinter.filedialog import askopenfilename
from tkinter.filedialog import asksaveasfilename
class Application(Frame):
def __init__(self):
self.input_path = None;
self.output_path = None;
Frame.__init__(self)
self.master.resizable(False, False)
self.master.title('Interleave PDF')
self.grid()
self.button = Button(self, text="Select input", command=self.load_file, width=12)
self.button.grid(row=1, column=0, sticky=W)
self.button = Button(self, text="Select output", command=self.save_file, width=12)
self.button.grid(row=1, column=2, sticky=W)
self.button = Button(self, text="Interleave", command=self.interleave, width=12)
self.button.grid(row=1, column=3, sticky=W)
def load_file(self):
self.input_path = askopenfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*")))
def save_file(self):
self.output_path = asksaveasfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*")))
def interleave(self):
if self.input_path and self.output_path:
document = PyPDF2.PdfFileReader(self.input_path)
writer = PyPDF2.PdfFileWriter()
for page in document.pages:
writer.addPage(page)
writer.addBlankPage()
outputStream = open(self.output_path, 'wb')
writer.write(outputStream)
outputStream.close()
if __name__ == "__main__":
Application().mainloop()
|
import PyPDF2
from formlayout import fedit
def main():
paths = [('Input', ''), ('Output', '')]
pathsRead = fedit(paths,
title="Interleave pdf",
comment="Enter the full path to the source pdf and a path to output the result."
)
# Full path to files should be specified eg C:\Users\Sam\Documents\Input.pdf and C:\Users\Sam\Documents\Input.pdf
document = PyPDF2.PdfFileReader(pathsRead[0])
writer = PyPDF2.PdfFileWriter()
for page in document.pages:
writer.addPage(page)
writer.addBlankPage()
outputStream = open(pathsRead[1], 'wb')
writer.write(outputStream)
outputStream.close()
if __name__ == "__main__":
main()
Replace formlayout GUI with tkinter
Separate buttons for selecting input and output, and for running
the interleave procedure.import PyPDF2
from tkinter import *
from tkinter.filedialog import askopenfilename
from tkinter.filedialog import asksaveasfilename
class Application(Frame):
def __init__(self):
self.input_path = None;
self.output_path = None;
Frame.__init__(self)
self.master.resizable(False, False)
self.master.title('Interleave PDF')
self.grid()
self.button = Button(self, text="Select input", command=self.load_file, width=12)
self.button.grid(row=1, column=0, sticky=W)
self.button = Button(self, text="Select output", command=self.save_file, width=12)
self.button.grid(row=1, column=2, sticky=W)
self.button = Button(self, text="Interleave", command=self.interleave, width=12)
self.button.grid(row=1, column=3, sticky=W)
def load_file(self):
self.input_path = askopenfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*")))
def save_file(self):
self.output_path = asksaveasfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*")))
def interleave(self):
if self.input_path and self.output_path:
document = PyPDF2.PdfFileReader(self.input_path)
writer = PyPDF2.PdfFileWriter()
for page in document.pages:
writer.addPage(page)
writer.addBlankPage()
outputStream = open(self.output_path, 'wb')
writer.write(outputStream)
outputStream.close()
if __name__ == "__main__":
Application().mainloop()
|
<commit_before>import PyPDF2
from formlayout import fedit
def main():
paths = [('Input', ''), ('Output', '')]
pathsRead = fedit(paths,
title="Interleave pdf",
comment="Enter the full path to the source pdf and a path to output the result."
)
# Full path to files should be specified eg C:\Users\Sam\Documents\Input.pdf and C:\Users\Sam\Documents\Input.pdf
document = PyPDF2.PdfFileReader(pathsRead[0])
writer = PyPDF2.PdfFileWriter()
for page in document.pages:
writer.addPage(page)
writer.addBlankPage()
outputStream = open(pathsRead[1], 'wb')
writer.write(outputStream)
outputStream.close()
if __name__ == "__main__":
main()
<commit_msg>Replace formlayout GUI with tkinter
Separate buttons for selecting input and output, and for running
the interleave procedure.<commit_after>import PyPDF2
from tkinter import *
from tkinter.filedialog import askopenfilename
from tkinter.filedialog import asksaveasfilename
class Application(Frame):
def __init__(self):
self.input_path = None;
self.output_path = None;
Frame.__init__(self)
self.master.resizable(False, False)
self.master.title('Interleave PDF')
self.grid()
self.button = Button(self, text="Select input", command=self.load_file, width=12)
self.button.grid(row=1, column=0, sticky=W)
self.button = Button(self, text="Select output", command=self.save_file, width=12)
self.button.grid(row=1, column=2, sticky=W)
self.button = Button(self, text="Interleave", command=self.interleave, width=12)
self.button.grid(row=1, column=3, sticky=W)
def load_file(self):
self.input_path = askopenfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*")))
def save_file(self):
self.output_path = asksaveasfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*")))
def interleave(self):
if self.input_path and self.output_path:
document = PyPDF2.PdfFileReader(self.input_path)
writer = PyPDF2.PdfFileWriter()
for page in document.pages:
writer.addPage(page)
writer.addBlankPage()
outputStream = open(self.output_path, 'wb')
writer.write(outputStream)
outputStream.close()
if __name__ == "__main__":
Application().mainloop()
|
89af0ed8bf7f62f6a48d7dd5b09a3fa46a2cf9c7
|
spyder_terminal/__init__.py
|
spyder_terminal/__init__.py
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO))
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 2, 3)
__version__ = '.'.join(map(str, VERSION_INFO))
|
Set release version to v0.2.3
|
Set release version to v0.2.3
|
Python
|
mit
|
andfoy/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO))
Set release version to v0.2.3
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 2, 3)
__version__ = '.'.join(map(str, VERSION_INFO))
|
<commit_before># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO))
<commit_msg>Set release version to v0.2.3<commit_after>
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 2, 3)
__version__ = '.'.join(map(str, VERSION_INFO))
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO))
Set release version to v0.2.3# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 2, 3)
__version__ = '.'.join(map(str, VERSION_INFO))
|
<commit_before># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO))
<commit_msg>Set release version to v0.2.3<commit_after># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 2, 3)
__version__ = '.'.join(map(str, VERSION_INFO))
|
232f2961d8ff26f7263df5ab59c8b36ac8bd9b43
|
stars/serializers.py
|
stars/serializers.py
|
from .models import Star
from employees.models import Employee
from rest_framework import serializers
class EmployeeSimpleSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'username', 'first_name', 'last_name')
class StarSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'date', 'text', 'from_user', 'to_user', 'category', 'subcategory')
class StarSmallSerializer(serializers.ModelSerializer):
from_user = EmployeeSimpleSerializer()
class Meta:
model = Star
depth = 1
fields = ('pk', 'date', 'text', 'category', 'from_user')
class StarSwaggerSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'category', 'subcategory', 'text')
class StarEmployeesSubcategoriesSerializer(serializers.Serializer):
subcategory__pk = serializers.IntegerField()
subcategory__name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
class StarTopEmployeeLists(serializers.Serializer):
to_user__id = serializers.IntegerField()
to_user__username = serializers.CharField(max_length=100)
to_user__first_name = serializers.CharField(max_length=100)
to_user__last_name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
|
from .models import Star
from employees.models import Employee
from rest_framework import serializers
class EmployeeSimpleSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'username', 'first_name', 'last_name')
class StarSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'date', 'text', 'from_user', 'to_user', 'category', 'subcategory')
class StarSmallSerializer(serializers.ModelSerializer):
from_user = EmployeeSimpleSerializer()
class Meta:
model = Star
depth = 1
fields = ('pk', 'date', 'text', 'category', 'from_user')
class StarSwaggerSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'category', 'subcategory', 'text')
class StarEmployeesSubcategoriesSerializer(serializers.Serializer):
pk = serializers.IntegerField(source='subcategory__pk')
name = serializers.CharField(max_length=100, source='subcategory__name')
num_stars = serializers.IntegerField()
class StarTopEmployeeLists(serializers.Serializer):
to_user__id = serializers.IntegerField()
to_user__username = serializers.CharField(max_length=100)
to_user__first_name = serializers.CharField(max_length=100)
to_user__last_name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
|
Replace subcategory__ prefix to endpoint response field names.
|
Replace subcategory__ prefix to endpoint response field names.
|
Python
|
apache-2.0
|
belatrix/BackendAllStars
|
from .models import Star
from employees.models import Employee
from rest_framework import serializers
class EmployeeSimpleSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'username', 'first_name', 'last_name')
class StarSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'date', 'text', 'from_user', 'to_user', 'category', 'subcategory')
class StarSmallSerializer(serializers.ModelSerializer):
from_user = EmployeeSimpleSerializer()
class Meta:
model = Star
depth = 1
fields = ('pk', 'date', 'text', 'category', 'from_user')
class StarSwaggerSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'category', 'subcategory', 'text')
class StarEmployeesSubcategoriesSerializer(serializers.Serializer):
subcategory__pk = serializers.IntegerField()
subcategory__name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
class StarTopEmployeeLists(serializers.Serializer):
to_user__id = serializers.IntegerField()
to_user__username = serializers.CharField(max_length=100)
to_user__first_name = serializers.CharField(max_length=100)
to_user__last_name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
Replace subcategory__ prefix to endpoint response field names.
|
from .models import Star
from employees.models import Employee
from rest_framework import serializers
class EmployeeSimpleSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'username', 'first_name', 'last_name')
class StarSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'date', 'text', 'from_user', 'to_user', 'category', 'subcategory')
class StarSmallSerializer(serializers.ModelSerializer):
from_user = EmployeeSimpleSerializer()
class Meta:
model = Star
depth = 1
fields = ('pk', 'date', 'text', 'category', 'from_user')
class StarSwaggerSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'category', 'subcategory', 'text')
class StarEmployeesSubcategoriesSerializer(serializers.Serializer):
pk = serializers.IntegerField(source='subcategory__pk')
name = serializers.CharField(max_length=100, source='subcategory__name')
num_stars = serializers.IntegerField()
class StarTopEmployeeLists(serializers.Serializer):
to_user__id = serializers.IntegerField()
to_user__username = serializers.CharField(max_length=100)
to_user__first_name = serializers.CharField(max_length=100)
to_user__last_name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
|
<commit_before>from .models import Star
from employees.models import Employee
from rest_framework import serializers
class EmployeeSimpleSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'username', 'first_name', 'last_name')
class StarSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'date', 'text', 'from_user', 'to_user', 'category', 'subcategory')
class StarSmallSerializer(serializers.ModelSerializer):
from_user = EmployeeSimpleSerializer()
class Meta:
model = Star
depth = 1
fields = ('pk', 'date', 'text', 'category', 'from_user')
class StarSwaggerSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'category', 'subcategory', 'text')
class StarEmployeesSubcategoriesSerializer(serializers.Serializer):
subcategory__pk = serializers.IntegerField()
subcategory__name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
class StarTopEmployeeLists(serializers.Serializer):
to_user__id = serializers.IntegerField()
to_user__username = serializers.CharField(max_length=100)
to_user__first_name = serializers.CharField(max_length=100)
to_user__last_name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
<commit_msg>Replace subcategory__ prefix to endpoint response field names.<commit_after>
|
from .models import Star
from employees.models import Employee
from rest_framework import serializers
class EmployeeSimpleSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'username', 'first_name', 'last_name')
class StarSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'date', 'text', 'from_user', 'to_user', 'category', 'subcategory')
class StarSmallSerializer(serializers.ModelSerializer):
from_user = EmployeeSimpleSerializer()
class Meta:
model = Star
depth = 1
fields = ('pk', 'date', 'text', 'category', 'from_user')
class StarSwaggerSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'category', 'subcategory', 'text')
class StarEmployeesSubcategoriesSerializer(serializers.Serializer):
pk = serializers.IntegerField(source='subcategory__pk')
name = serializers.CharField(max_length=100, source='subcategory__name')
num_stars = serializers.IntegerField()
class StarTopEmployeeLists(serializers.Serializer):
to_user__id = serializers.IntegerField()
to_user__username = serializers.CharField(max_length=100)
to_user__first_name = serializers.CharField(max_length=100)
to_user__last_name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
|
from .models import Star
from employees.models import Employee
from rest_framework import serializers
class EmployeeSimpleSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'username', 'first_name', 'last_name')
class StarSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'date', 'text', 'from_user', 'to_user', 'category', 'subcategory')
class StarSmallSerializer(serializers.ModelSerializer):
from_user = EmployeeSimpleSerializer()
class Meta:
model = Star
depth = 1
fields = ('pk', 'date', 'text', 'category', 'from_user')
class StarSwaggerSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'category', 'subcategory', 'text')
class StarEmployeesSubcategoriesSerializer(serializers.Serializer):
subcategory__pk = serializers.IntegerField()
subcategory__name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
class StarTopEmployeeLists(serializers.Serializer):
to_user__id = serializers.IntegerField()
to_user__username = serializers.CharField(max_length=100)
to_user__first_name = serializers.CharField(max_length=100)
to_user__last_name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
Replace subcategory__ prefix to endpoint response field names.from .models import Star
from employees.models import Employee
from rest_framework import serializers
class EmployeeSimpleSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'username', 'first_name', 'last_name')
class StarSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'date', 'text', 'from_user', 'to_user', 'category', 'subcategory')
class StarSmallSerializer(serializers.ModelSerializer):
from_user = EmployeeSimpleSerializer()
class Meta:
model = Star
depth = 1
fields = ('pk', 'date', 'text', 'category', 'from_user')
class StarSwaggerSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'category', 'subcategory', 'text')
class StarEmployeesSubcategoriesSerializer(serializers.Serializer):
pk = serializers.IntegerField(source='subcategory__pk')
name = serializers.CharField(max_length=100, source='subcategory__name')
num_stars = serializers.IntegerField()
class StarTopEmployeeLists(serializers.Serializer):
to_user__id = serializers.IntegerField()
to_user__username = serializers.CharField(max_length=100)
to_user__first_name = serializers.CharField(max_length=100)
to_user__last_name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
|
<commit_before>from .models import Star
from employees.models import Employee
from rest_framework import serializers
class EmployeeSimpleSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'username', 'first_name', 'last_name')
class StarSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'date', 'text', 'from_user', 'to_user', 'category', 'subcategory')
class StarSmallSerializer(serializers.ModelSerializer):
from_user = EmployeeSimpleSerializer()
class Meta:
model = Star
depth = 1
fields = ('pk', 'date', 'text', 'category', 'from_user')
class StarSwaggerSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'category', 'subcategory', 'text')
class StarEmployeesSubcategoriesSerializer(serializers.Serializer):
subcategory__pk = serializers.IntegerField()
subcategory__name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
class StarTopEmployeeLists(serializers.Serializer):
to_user__id = serializers.IntegerField()
to_user__username = serializers.CharField(max_length=100)
to_user__first_name = serializers.CharField(max_length=100)
to_user__last_name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
<commit_msg>Replace subcategory__ prefix to endpoint response field names.<commit_after>from .models import Star
from employees.models import Employee
from rest_framework import serializers
class EmployeeSimpleSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'username', 'first_name', 'last_name')
class StarSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'date', 'text', 'from_user', 'to_user', 'category', 'subcategory')
class StarSmallSerializer(serializers.ModelSerializer):
from_user = EmployeeSimpleSerializer()
class Meta:
model = Star
depth = 1
fields = ('pk', 'date', 'text', 'category', 'from_user')
class StarSwaggerSerializer(serializers.ModelSerializer):
class Meta:
model = Star
fields = ('pk', 'category', 'subcategory', 'text')
class StarEmployeesSubcategoriesSerializer(serializers.Serializer):
pk = serializers.IntegerField(source='subcategory__pk')
name = serializers.CharField(max_length=100, source='subcategory__name')
num_stars = serializers.IntegerField()
class StarTopEmployeeLists(serializers.Serializer):
to_user__id = serializers.IntegerField()
to_user__username = serializers.CharField(max_length=100)
to_user__first_name = serializers.CharField(max_length=100)
to_user__last_name = serializers.CharField(max_length=100)
num_stars = serializers.IntegerField()
|
aa4a032016944f581ad7485ebdf8c39108511098
|
commandbased/commandbasedrobot.py
|
commandbased/commandbasedrobot.py
|
import hal
from wpilib.timedrobot import TimedRobot
from wpilib.command.scheduler import Scheduler
from wpilib.livewindow import LiveWindow
class CommandBasedRobot(TimedRobot):
'''
The base class for a Command-Based Robot. To use, instantiate commands and
trigger them.
'''
def startCompetition(self):
"""Initalizes the scheduler before starting robotInit()"""
self.scheduler = Scheduler.getInstance()
super().startCompetition()
def commandPeriodic(self):
'''
Run the scheduler regularly. If an error occurs during a competition,
prevent it from crashing the program.
'''
try:
self.scheduler.run()
except Exception as error:
if not self.ds.isFMSAttached():
raise
'''Just to be safe, stop all running commands.'''
self.scheduler.removeAll()
self.handleCrash(error)
autonomousPeriodic = commandPeriodic
teleopPeriodic = commandPeriodic
disabledPeriodic = commandPeriodic
def testPeriodic(self):
'''
Test mode will not run normal commands, but motors can be controlled
and sensors viewed with the SmartDashboard.
'''
LiveWindow.run()
def handleCrash(self, error):
'''
Called if an exception is raised in the Scheduler during a competition.
Writes an error message to the driver station by default. If you want
more complex behavior, override this method in your robot class.
'''
self.ds.reportError(str(error), printTrace=True)
|
from wpilib import TimedRobot
from wpilib.command import Scheduler
class CommandBasedRobot(TimedRobot):
'''
The base class for a Command-Based Robot. To use, instantiate commands and
trigger them.
'''
def startCompetition(self):
"""Initalizes the scheduler before starting robotInit()"""
self.scheduler = Scheduler.getInstance()
super().startCompetition()
def commandPeriodic(self):
'''
Run the scheduler regularly. If an error occurs during a competition,
prevent it from crashing the program.
'''
try:
self.scheduler.run()
except Exception as error:
if not self.ds.isFMSAttached():
raise
'''Just to be safe, stop all running commands.'''
self.scheduler.removeAll()
self.handleCrash(error)
autonomousPeriodic = commandPeriodic
teleopPeriodic = commandPeriodic
disabledPeriodic = commandPeriodic
# testPeriodic deliberately omitted
def handleCrash(self, error):
'''
Called if an exception is raised in the Scheduler during a competition.
Writes an error message to the driver station by default. If you want
more complex behavior, override this method in your robot class.
'''
self.ds.reportError(str(error), printTrace=True)
|
Remove LiveWindow call from CommandBasedRobot
|
Remove LiveWindow call from CommandBasedRobot
LiveWindow is automatically updated regardless of mode as part of 2018
WPILib IterativeRobot changes, so calling LiveWindow.run() manually is
unnecessary.
|
Python
|
bsd-3-clause
|
robotpy/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities
|
import hal
from wpilib.timedrobot import TimedRobot
from wpilib.command.scheduler import Scheduler
from wpilib.livewindow import LiveWindow
class CommandBasedRobot(TimedRobot):
'''
The base class for a Command-Based Robot. To use, instantiate commands and
trigger them.
'''
def startCompetition(self):
"""Initalizes the scheduler before starting robotInit()"""
self.scheduler = Scheduler.getInstance()
super().startCompetition()
def commandPeriodic(self):
'''
Run the scheduler regularly. If an error occurs during a competition,
prevent it from crashing the program.
'''
try:
self.scheduler.run()
except Exception as error:
if not self.ds.isFMSAttached():
raise
'''Just to be safe, stop all running commands.'''
self.scheduler.removeAll()
self.handleCrash(error)
autonomousPeriodic = commandPeriodic
teleopPeriodic = commandPeriodic
disabledPeriodic = commandPeriodic
def testPeriodic(self):
'''
Test mode will not run normal commands, but motors can be controlled
and sensors viewed with the SmartDashboard.
'''
LiveWindow.run()
def handleCrash(self, error):
'''
Called if an exception is raised in the Scheduler during a competition.
Writes an error message to the driver station by default. If you want
more complex behavior, override this method in your robot class.
'''
self.ds.reportError(str(error), printTrace=True)
Remove LiveWindow call from CommandBasedRobot
LiveWindow is automatically updated regardless of mode as part of 2018
WPILib IterativeRobot changes, so calling LiveWindow.run() manually is
unnecessary.
|
from wpilib import TimedRobot
from wpilib.command import Scheduler
class CommandBasedRobot(TimedRobot):
'''
The base class for a Command-Based Robot. To use, instantiate commands and
trigger them.
'''
def startCompetition(self):
"""Initalizes the scheduler before starting robotInit()"""
self.scheduler = Scheduler.getInstance()
super().startCompetition()
def commandPeriodic(self):
'''
Run the scheduler regularly. If an error occurs during a competition,
prevent it from crashing the program.
'''
try:
self.scheduler.run()
except Exception as error:
if not self.ds.isFMSAttached():
raise
'''Just to be safe, stop all running commands.'''
self.scheduler.removeAll()
self.handleCrash(error)
autonomousPeriodic = commandPeriodic
teleopPeriodic = commandPeriodic
disabledPeriodic = commandPeriodic
# testPeriodic deliberately omitted
def handleCrash(self, error):
'''
Called if an exception is raised in the Scheduler during a competition.
Writes an error message to the driver station by default. If you want
more complex behavior, override this method in your robot class.
'''
self.ds.reportError(str(error), printTrace=True)
|
<commit_before>import hal
from wpilib.timedrobot import TimedRobot
from wpilib.command.scheduler import Scheduler
from wpilib.livewindow import LiveWindow
class CommandBasedRobot(TimedRobot):
'''
The base class for a Command-Based Robot. To use, instantiate commands and
trigger them.
'''
def startCompetition(self):
"""Initalizes the scheduler before starting robotInit()"""
self.scheduler = Scheduler.getInstance()
super().startCompetition()
def commandPeriodic(self):
'''
Run the scheduler regularly. If an error occurs during a competition,
prevent it from crashing the program.
'''
try:
self.scheduler.run()
except Exception as error:
if not self.ds.isFMSAttached():
raise
'''Just to be safe, stop all running commands.'''
self.scheduler.removeAll()
self.handleCrash(error)
autonomousPeriodic = commandPeriodic
teleopPeriodic = commandPeriodic
disabledPeriodic = commandPeriodic
def testPeriodic(self):
'''
Test mode will not run normal commands, but motors can be controlled
and sensors viewed with the SmartDashboard.
'''
LiveWindow.run()
def handleCrash(self, error):
'''
Called if an exception is raised in the Scheduler during a competition.
Writes an error message to the driver station by default. If you want
more complex behavior, override this method in your robot class.
'''
self.ds.reportError(str(error), printTrace=True)
<commit_msg>Remove LiveWindow call from CommandBasedRobot
LiveWindow is automatically updated regardless of mode as part of 2018
WPILib IterativeRobot changes, so calling LiveWindow.run() manually is
unnecessary.<commit_after>
|
from wpilib import TimedRobot
from wpilib.command import Scheduler
class CommandBasedRobot(TimedRobot):
'''
The base class for a Command-Based Robot. To use, instantiate commands and
trigger them.
'''
def startCompetition(self):
"""Initalizes the scheduler before starting robotInit()"""
self.scheduler = Scheduler.getInstance()
super().startCompetition()
def commandPeriodic(self):
'''
Run the scheduler regularly. If an error occurs during a competition,
prevent it from crashing the program.
'''
try:
self.scheduler.run()
except Exception as error:
if not self.ds.isFMSAttached():
raise
'''Just to be safe, stop all running commands.'''
self.scheduler.removeAll()
self.handleCrash(error)
autonomousPeriodic = commandPeriodic
teleopPeriodic = commandPeriodic
disabledPeriodic = commandPeriodic
# testPeriodic deliberately omitted
def handleCrash(self, error):
'''
Called if an exception is raised in the Scheduler during a competition.
Writes an error message to the driver station by default. If you want
more complex behavior, override this method in your robot class.
'''
self.ds.reportError(str(error), printTrace=True)
|
import hal
from wpilib.timedrobot import TimedRobot
from wpilib.command.scheduler import Scheduler
from wpilib.livewindow import LiveWindow
class CommandBasedRobot(TimedRobot):
'''
The base class for a Command-Based Robot. To use, instantiate commands and
trigger them.
'''
def startCompetition(self):
"""Initalizes the scheduler before starting robotInit()"""
self.scheduler = Scheduler.getInstance()
super().startCompetition()
def commandPeriodic(self):
'''
Run the scheduler regularly. If an error occurs during a competition,
prevent it from crashing the program.
'''
try:
self.scheduler.run()
except Exception as error:
if not self.ds.isFMSAttached():
raise
'''Just to be safe, stop all running commands.'''
self.scheduler.removeAll()
self.handleCrash(error)
autonomousPeriodic = commandPeriodic
teleopPeriodic = commandPeriodic
disabledPeriodic = commandPeriodic
def testPeriodic(self):
'''
Test mode will not run normal commands, but motors can be controlled
and sensors viewed with the SmartDashboard.
'''
LiveWindow.run()
def handleCrash(self, error):
'''
Called if an exception is raised in the Scheduler during a competition.
Writes an error message to the driver station by default. If you want
more complex behavior, override this method in your robot class.
'''
self.ds.reportError(str(error), printTrace=True)
Remove LiveWindow call from CommandBasedRobot
LiveWindow is automatically updated regardless of mode as part of 2018
WPILib IterativeRobot changes, so calling LiveWindow.run() manually is
unnecessary.from wpilib import TimedRobot
from wpilib.command import Scheduler
class CommandBasedRobot(TimedRobot):
'''
The base class for a Command-Based Robot. To use, instantiate commands and
trigger them.
'''
def startCompetition(self):
"""Initalizes the scheduler before starting robotInit()"""
self.scheduler = Scheduler.getInstance()
super().startCompetition()
def commandPeriodic(self):
'''
Run the scheduler regularly. If an error occurs during a competition,
prevent it from crashing the program.
'''
try:
self.scheduler.run()
except Exception as error:
if not self.ds.isFMSAttached():
raise
'''Just to be safe, stop all running commands.'''
self.scheduler.removeAll()
self.handleCrash(error)
autonomousPeriodic = commandPeriodic
teleopPeriodic = commandPeriodic
disabledPeriodic = commandPeriodic
# testPeriodic deliberately omitted
def handleCrash(self, error):
'''
Called if an exception is raised in the Scheduler during a competition.
Writes an error message to the driver station by default. If you want
more complex behavior, override this method in your robot class.
'''
self.ds.reportError(str(error), printTrace=True)
|
<commit_before>import hal
from wpilib.timedrobot import TimedRobot
from wpilib.command.scheduler import Scheduler
from wpilib.livewindow import LiveWindow
class CommandBasedRobot(TimedRobot):
'''
The base class for a Command-Based Robot. To use, instantiate commands and
trigger them.
'''
def startCompetition(self):
"""Initalizes the scheduler before starting robotInit()"""
self.scheduler = Scheduler.getInstance()
super().startCompetition()
def commandPeriodic(self):
'''
Run the scheduler regularly. If an error occurs during a competition,
prevent it from crashing the program.
'''
try:
self.scheduler.run()
except Exception as error:
if not self.ds.isFMSAttached():
raise
'''Just to be safe, stop all running commands.'''
self.scheduler.removeAll()
self.handleCrash(error)
autonomousPeriodic = commandPeriodic
teleopPeriodic = commandPeriodic
disabledPeriodic = commandPeriodic
def testPeriodic(self):
'''
Test mode will not run normal commands, but motors can be controlled
and sensors viewed with the SmartDashboard.
'''
LiveWindow.run()
def handleCrash(self, error):
'''
Called if an exception is raised in the Scheduler during a competition.
Writes an error message to the driver station by default. If you want
more complex behavior, override this method in your robot class.
'''
self.ds.reportError(str(error), printTrace=True)
<commit_msg>Remove LiveWindow call from CommandBasedRobot
LiveWindow is automatically updated regardless of mode as part of 2018
WPILib IterativeRobot changes, so calling LiveWindow.run() manually is
unnecessary.<commit_after>from wpilib import TimedRobot
from wpilib.command import Scheduler
class CommandBasedRobot(TimedRobot):
'''
The base class for a Command-Based Robot. To use, instantiate commands and
trigger them.
'''
def startCompetition(self):
"""Initalizes the scheduler before starting robotInit()"""
self.scheduler = Scheduler.getInstance()
super().startCompetition()
def commandPeriodic(self):
'''
Run the scheduler regularly. If an error occurs during a competition,
prevent it from crashing the program.
'''
try:
self.scheduler.run()
except Exception as error:
if not self.ds.isFMSAttached():
raise
'''Just to be safe, stop all running commands.'''
self.scheduler.removeAll()
self.handleCrash(error)
autonomousPeriodic = commandPeriodic
teleopPeriodic = commandPeriodic
disabledPeriodic = commandPeriodic
# testPeriodic deliberately omitted
def handleCrash(self, error):
'''
Called if an exception is raised in the Scheduler during a competition.
Writes an error message to the driver station by default. If you want
more complex behavior, override this method in your robot class.
'''
self.ds.reportError(str(error), printTrace=True)
|
a8fcd8c56db0ce862c6c0ac79fc58a9e65992f6e
|
onlineweb4/context_processors.py
|
onlineweb4/context_processors.py
|
from django.conf import settings
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.utils import timezone
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# Making sure we have an end data, and that the event is over
# and that the feedback deadline is not passed (logic reused from apps.feedback.mommy)
end_date = active_feedback.content_end_date()
today_date = timezone.now().date()
if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
Add more constraints to active feedback schemas
|
Add more constraints to active feedback schemas
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
from django.conf import settings
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
Add more constraints to active feedback schemas
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.utils import timezone
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# Making sure we have an end data, and that the event is over
# and that the feedback deadline is not passed (logic reused from apps.feedback.mommy)
end_date = active_feedback.content_end_date()
today_date = timezone.now().date()
if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
<commit_before>from django.conf import settings
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
<commit_msg>Add more constraints to active feedback schemas<commit_after>
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.utils import timezone
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# Making sure we have an end data, and that the event is over
# and that the feedback deadline is not passed (logic reused from apps.feedback.mommy)
end_date = active_feedback.content_end_date()
today_date = timezone.now().date()
if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
from django.conf import settings
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
Add more constraints to active feedback schemas# -*- coding: utf-8 -*-
from django.conf import settings
from django.utils import timezone
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# Making sure we have an end data, and that the event is over
# and that the feedback deadline is not passed (logic reused from apps.feedback.mommy)
end_date = active_feedback.content_end_date()
today_date = timezone.now().date()
if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
<commit_before>from django.conf import settings
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
<commit_msg>Add more constraints to active feedback schemas<commit_after># -*- coding: utf-8 -*-
from django.conf import settings
from django.utils import timezone
from apps.feedback.models import FeedbackRelation
def context_settings(request):
context_extras = {}
if hasattr(settings, 'GOOGLE_ANALYTICS_KEY'):
context_extras['GOOGLE_ANALYTICS_KEY'] = settings.GOOGLE_ANALYTICS_KEY
if hasattr(settings, 'HOT_RELOAD'):
context_extras['HOT_RELOAD'] = settings.HOT_RELOAD
return context_extras
def feedback_notifier(request):
context_extras = {}
context_extras['feedback_pending'] = []
if not request.user.is_authenticated():
return context_extras
active_feedbacks = FeedbackRelation.objects.filter(active=True)
for active_feedback in active_feedbacks:
if active_feedback.content_object is None:
continue
# Making sure we have an end data, and that the event is over
# and that the feedback deadline is not passed (logic reused from apps.feedback.mommy)
end_date = active_feedback.content_end_date()
today_date = timezone.now().date()
if not end_date or end_date.date() >= today_date or (active_feedback.deadline - today_date).days < 0:
continue
# This method returns both bools and a list for some reason. Python crashes with the expression: x in bool,
# so we do this to fetch once and test twice
not_answered = active_feedback.not_answered()
if not_answered == False or request.user not in not_answered:
continue
context_extras['feedback_pending'].append(active_feedback)
return context_extras
|
175c1775aa7f5cd0ba2022e95389507d8a4c87dc
|
syncplay/__init__.py
|
syncplay/__init__.py
|
version = '1.6.6'
revision = ' development'
milestone = 'Yoitsu'
release_number = '87'
projectURL = 'https://syncplay.pl/'
|
version = '1.6.6'
revision = ' beta 1'
milestone = 'Yoitsu'
release_number = '88'
projectURL = 'https://syncplay.pl/'
|
Mark as 1.6.6 beta 1
|
Mark as 1.6.6 beta 1
|
Python
|
apache-2.0
|
alby128/syncplay,Syncplay/syncplay,Syncplay/syncplay,alby128/syncplay
|
version = '1.6.6'
revision = ' development'
milestone = 'Yoitsu'
release_number = '87'
projectURL = 'https://syncplay.pl/'
Mark as 1.6.6 beta 1
|
version = '1.6.6'
revision = ' beta 1'
milestone = 'Yoitsu'
release_number = '88'
projectURL = 'https://syncplay.pl/'
|
<commit_before>version = '1.6.6'
revision = ' development'
milestone = 'Yoitsu'
release_number = '87'
projectURL = 'https://syncplay.pl/'
<commit_msg>Mark as 1.6.6 beta 1<commit_after>
|
version = '1.6.6'
revision = ' beta 1'
milestone = 'Yoitsu'
release_number = '88'
projectURL = 'https://syncplay.pl/'
|
version = '1.6.6'
revision = ' development'
milestone = 'Yoitsu'
release_number = '87'
projectURL = 'https://syncplay.pl/'
Mark as 1.6.6 beta 1version = '1.6.6'
revision = ' beta 1'
milestone = 'Yoitsu'
release_number = '88'
projectURL = 'https://syncplay.pl/'
|
<commit_before>version = '1.6.6'
revision = ' development'
milestone = 'Yoitsu'
release_number = '87'
projectURL = 'https://syncplay.pl/'
<commit_msg>Mark as 1.6.6 beta 1<commit_after>version = '1.6.6'
revision = ' beta 1'
milestone = 'Yoitsu'
release_number = '88'
projectURL = 'https://syncplay.pl/'
|
1c7928a5aeff55518bfda2b9a9ef1ec2a2ef76e4
|
corehq/celery_monitoring/tests.py
|
corehq/celery_monitoring/tests.py
|
from __future__ import absolute_import
from __future__ import print_function
import datetime
from freezegun import freeze_time
from corehq.celery_monitoring.heartbeat import Heartbeat, HeartbeatNeverRecorded, \
HEARTBEAT_FREQUENCY
from testil import assert_raises, eq
def test_heartbeat():
hb = Heartbeat('celery_periodic')
hb.clear_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_blockage_duration()
seen_time = datetime.datetime.utcnow()
with freeze_time(seen_time):
hb.mark_seen()
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(seconds=0))
with freeze_time(seen_time + datetime.timedelta(minutes=10)):
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(minutes=10) - HEARTBEAT_FREQUENCY)
|
from __future__ import absolute_import
from __future__ import print_function
import datetime
from freezegun import freeze_time
from corehq.celery_monitoring.heartbeat import Heartbeat, HeartbeatNeverRecorded, \
HEARTBEAT_FREQUENCY
from testil import assert_raises, eq
from corehq.celery_monitoring.signals import TimeToStartTimer
def test_heartbeat():
hb = Heartbeat('celery_periodic')
hb.clear_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_blockage_duration()
seen_time = datetime.datetime.utcnow()
with freeze_time(seen_time):
hb.mark_seen()
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(seconds=0))
with freeze_time(seen_time + datetime.timedelta(minutes=10)):
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(minutes=10) - HEARTBEAT_FREQUENCY)
def test_time_to_start_timer():
task_id = 'abc123'
delay = datetime.timedelta(seconds=6)
start_time = datetime.datetime.utcnow()
# starts empty
eq(TimeToStartTimer(task_id).stop_and_pop_timing(), None)
with freeze_time(start_time):
TimeToStartTimer(task_id).start_timing()
with freeze_time(start_time + delay):
time_to_start = TimeToStartTimer(task_id).stop_and_pop_timing()
eq(time_to_start, delay)
# can only pop once, second time empty
eq(TimeToStartTimer(task_id).stop_and_pop_timing(), None)
|
Add simple test for celery time to start timer
|
Add simple test for celery time to start timer
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import
from __future__ import print_function
import datetime
from freezegun import freeze_time
from corehq.celery_monitoring.heartbeat import Heartbeat, HeartbeatNeverRecorded, \
HEARTBEAT_FREQUENCY
from testil import assert_raises, eq
def test_heartbeat():
hb = Heartbeat('celery_periodic')
hb.clear_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_blockage_duration()
seen_time = datetime.datetime.utcnow()
with freeze_time(seen_time):
hb.mark_seen()
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(seconds=0))
with freeze_time(seen_time + datetime.timedelta(minutes=10)):
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(minutes=10) - HEARTBEAT_FREQUENCY)
Add simple test for celery time to start timer
|
from __future__ import absolute_import
from __future__ import print_function
import datetime
from freezegun import freeze_time
from corehq.celery_monitoring.heartbeat import Heartbeat, HeartbeatNeverRecorded, \
HEARTBEAT_FREQUENCY
from testil import assert_raises, eq
from corehq.celery_monitoring.signals import TimeToStartTimer
def test_heartbeat():
hb = Heartbeat('celery_periodic')
hb.clear_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_blockage_duration()
seen_time = datetime.datetime.utcnow()
with freeze_time(seen_time):
hb.mark_seen()
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(seconds=0))
with freeze_time(seen_time + datetime.timedelta(minutes=10)):
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(minutes=10) - HEARTBEAT_FREQUENCY)
def test_time_to_start_timer():
task_id = 'abc123'
delay = datetime.timedelta(seconds=6)
start_time = datetime.datetime.utcnow()
# starts empty
eq(TimeToStartTimer(task_id).stop_and_pop_timing(), None)
with freeze_time(start_time):
TimeToStartTimer(task_id).start_timing()
with freeze_time(start_time + delay):
time_to_start = TimeToStartTimer(task_id).stop_and_pop_timing()
eq(time_to_start, delay)
# can only pop once, second time empty
eq(TimeToStartTimer(task_id).stop_and_pop_timing(), None)
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
import datetime
from freezegun import freeze_time
from corehq.celery_monitoring.heartbeat import Heartbeat, HeartbeatNeverRecorded, \
HEARTBEAT_FREQUENCY
from testil import assert_raises, eq
def test_heartbeat():
hb = Heartbeat('celery_periodic')
hb.clear_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_blockage_duration()
seen_time = datetime.datetime.utcnow()
with freeze_time(seen_time):
hb.mark_seen()
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(seconds=0))
with freeze_time(seen_time + datetime.timedelta(minutes=10)):
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(minutes=10) - HEARTBEAT_FREQUENCY)
<commit_msg>Add simple test for celery time to start timer<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
import datetime
from freezegun import freeze_time
from corehq.celery_monitoring.heartbeat import Heartbeat, HeartbeatNeverRecorded, \
HEARTBEAT_FREQUENCY
from testil import assert_raises, eq
from corehq.celery_monitoring.signals import TimeToStartTimer
def test_heartbeat():
hb = Heartbeat('celery_periodic')
hb.clear_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_blockage_duration()
seen_time = datetime.datetime.utcnow()
with freeze_time(seen_time):
hb.mark_seen()
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(seconds=0))
with freeze_time(seen_time + datetime.timedelta(minutes=10)):
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(minutes=10) - HEARTBEAT_FREQUENCY)
def test_time_to_start_timer():
task_id = 'abc123'
delay = datetime.timedelta(seconds=6)
start_time = datetime.datetime.utcnow()
# starts empty
eq(TimeToStartTimer(task_id).stop_and_pop_timing(), None)
with freeze_time(start_time):
TimeToStartTimer(task_id).start_timing()
with freeze_time(start_time + delay):
time_to_start = TimeToStartTimer(task_id).stop_and_pop_timing()
eq(time_to_start, delay)
# can only pop once, second time empty
eq(TimeToStartTimer(task_id).stop_and_pop_timing(), None)
|
from __future__ import absolute_import
from __future__ import print_function
import datetime
from freezegun import freeze_time
from corehq.celery_monitoring.heartbeat import Heartbeat, HeartbeatNeverRecorded, \
HEARTBEAT_FREQUENCY
from testil import assert_raises, eq
def test_heartbeat():
hb = Heartbeat('celery_periodic')
hb.clear_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_blockage_duration()
seen_time = datetime.datetime.utcnow()
with freeze_time(seen_time):
hb.mark_seen()
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(seconds=0))
with freeze_time(seen_time + datetime.timedelta(minutes=10)):
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(minutes=10) - HEARTBEAT_FREQUENCY)
Add simple test for celery time to start timerfrom __future__ import absolute_import
from __future__ import print_function
import datetime
from freezegun import freeze_time
from corehq.celery_monitoring.heartbeat import Heartbeat, HeartbeatNeverRecorded, \
HEARTBEAT_FREQUENCY
from testil import assert_raises, eq
from corehq.celery_monitoring.signals import TimeToStartTimer
def test_heartbeat():
hb = Heartbeat('celery_periodic')
hb.clear_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_blockage_duration()
seen_time = datetime.datetime.utcnow()
with freeze_time(seen_time):
hb.mark_seen()
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(seconds=0))
with freeze_time(seen_time + datetime.timedelta(minutes=10)):
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(minutes=10) - HEARTBEAT_FREQUENCY)
def test_time_to_start_timer():
task_id = 'abc123'
delay = datetime.timedelta(seconds=6)
start_time = datetime.datetime.utcnow()
# starts empty
eq(TimeToStartTimer(task_id).stop_and_pop_timing(), None)
with freeze_time(start_time):
TimeToStartTimer(task_id).start_timing()
with freeze_time(start_time + delay):
time_to_start = TimeToStartTimer(task_id).stop_and_pop_timing()
eq(time_to_start, delay)
# can only pop once, second time empty
eq(TimeToStartTimer(task_id).stop_and_pop_timing(), None)
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
import datetime
from freezegun import freeze_time
from corehq.celery_monitoring.heartbeat import Heartbeat, HeartbeatNeverRecorded, \
HEARTBEAT_FREQUENCY
from testil import assert_raises, eq
def test_heartbeat():
hb = Heartbeat('celery_periodic')
hb.clear_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_blockage_duration()
seen_time = datetime.datetime.utcnow()
with freeze_time(seen_time):
hb.mark_seen()
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(seconds=0))
with freeze_time(seen_time + datetime.timedelta(minutes=10)):
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(minutes=10) - HEARTBEAT_FREQUENCY)
<commit_msg>Add simple test for celery time to start timer<commit_after>from __future__ import absolute_import
from __future__ import print_function
import datetime
from freezegun import freeze_time
from corehq.celery_monitoring.heartbeat import Heartbeat, HeartbeatNeverRecorded, \
HEARTBEAT_FREQUENCY
from testil import assert_raises, eq
from corehq.celery_monitoring.signals import TimeToStartTimer
def test_heartbeat():
hb = Heartbeat('celery_periodic')
hb.clear_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_last_seen()
with assert_raises(HeartbeatNeverRecorded):
hb.get_blockage_duration()
seen_time = datetime.datetime.utcnow()
with freeze_time(seen_time):
hb.mark_seen()
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(seconds=0))
with freeze_time(seen_time + datetime.timedelta(minutes=10)):
eq(hb.get_last_seen(), seen_time)
eq(hb.get_blockage_duration(), datetime.timedelta(minutes=10) - HEARTBEAT_FREQUENCY)
def test_time_to_start_timer():
task_id = 'abc123'
delay = datetime.timedelta(seconds=6)
start_time = datetime.datetime.utcnow()
# starts empty
eq(TimeToStartTimer(task_id).stop_and_pop_timing(), None)
with freeze_time(start_time):
TimeToStartTimer(task_id).start_timing()
with freeze_time(start_time + delay):
time_to_start = TimeToStartTimer(task_id).stop_and_pop_timing()
eq(time_to_start, delay)
# can only pop once, second time empty
eq(TimeToStartTimer(task_id).stop_and_pop_timing(), None)
|
fa4e6e849eaff2611a5d978c7f7727a16a8c301e
|
daedalus/attacks/sample_attack.py
|
daedalus/attacks/sample_attack.py
|
# This file should serve as a template
# We will be importing all such files into daedalus from which any attack can be then called with required input
###########################################################################
# attack(input={})
# This function will be called from with daedalus.py
# along with the required input.
# inputs: A dictionary of parameters containing information
# about the public key, private key and any other user provided
# information necessary for execution.
# returns: A dictionary consisting of two keys:-
# errors: It should be an array of strings containing all
# errors encountered.
# results: It should be a dictionary containing all the results that
# can be derived from the given data.
def attack(input={}, errors=[], results={}):
return {'errors': errors, 'results': results}
|
# This file should serve as a template
# We will be importing all such files into daedalus from which any attack can be then called with required input
###########################################################################
# attack(input={})
# This function will be called from with daedalus.py
# along with the required input.
# inputs: A dictionary of parameters containing information
# about the public key, private key and any other user provided
# information necessary for execution.
# returns: A dictionary consisting of two keys:-
# errors: It should be an array of strings containing all
# errors encountered.
# results: It should be a dictionary containing all the results that
# can be derived from the given data.
def attack(input={}):
return {'errors': errors, 'results': results}
|
Remove extra parameters to "attack()"
|
Remove extra parameters to "attack()"
The `results` and `errors` structures aren't needed as input parameters.
All we need to ensure is that these are returned by `attack()`.
|
Python
|
mit
|
IEEE-NITK/Daedalus,IEEE-NITK/Daedalus,chinmaydd/NITK_IEEE_SaS,IEEE-NITK/Daedalus
|
# This file should serve as a template
# We will be importing all such files into daedalus from which any attack can be then called with required input
###########################################################################
# attack(input={})
# This function will be called from with daedalus.py
# along with the required input.
# inputs: A dictionary of parameters containing information
# about the public key, private key and any other user provided
# information necessary for execution.
# returns: A dictionary consisting of two keys:-
# errors: It should be an array of strings containing all
# errors encountered.
# results: It should be a dictionary containing all the results that
# can be derived from the given data.
def attack(input={}, errors=[], results={}):
return {'errors': errors, 'results': results}Remove extra parameters to "attack()"
The `results` and `errors` structures aren't needed as input parameters.
All we need to ensure is that these are returned by `attack()`.
|
# This file should serve as a template
# We will be importing all such files into daedalus from which any attack can be then called with required input
###########################################################################
# attack(input={})
# This function will be called from with daedalus.py
# along with the required input.
# inputs: A dictionary of parameters containing information
# about the public key, private key and any other user provided
# information necessary for execution.
# returns: A dictionary consisting of two keys:-
# errors: It should be an array of strings containing all
# errors encountered.
# results: It should be a dictionary containing all the results that
# can be derived from the given data.
def attack(input={}):
return {'errors': errors, 'results': results}
|
<commit_before># This file should serve as a template
# We will be importing all such files into daedalus from which any attack can be then called with required input
###########################################################################
# attack(input={})
# This function will be called from with daedalus.py
# along with the required input.
# inputs: A dictionary of parameters containing information
# about the public key, private key and any other user provided
# information necessary for execution.
# returns: A dictionary consisting of two keys:-
# errors: It should be an array of strings containing all
# errors encountered.
# results: It should be a dictionary containing all the results that
# can be derived from the given data.
def attack(input={}, errors=[], results={}):
return {'errors': errors, 'results': results}<commit_msg>Remove extra parameters to "attack()"
The `results` and `errors` structures aren't needed as input parameters.
All we need to ensure is that these are returned by `attack()`.<commit_after>
|
# This file should serve as a template
# We will be importing all such files into daedalus from which any attack can be then called with required input
###########################################################################
# attack(input={})
# This function will be called from with daedalus.py
# along with the required input.
# inputs: A dictionary of parameters containing information
# about the public key, private key and any other user provided
# information necessary for execution.
# returns: A dictionary consisting of two keys:-
# errors: It should be an array of strings containing all
# errors encountered.
# results: It should be a dictionary containing all the results that
# can be derived from the given data.
def attack(input={}):
return {'errors': errors, 'results': results}
|
# This file should serve as a template
# We will be importing all such files into daedalus from which any attack can be then called with required input
###########################################################################
# attack(input={})
# This function will be called from with daedalus.py
# along with the required input.
# inputs: A dictionary of parameters containing information
# about the public key, private key and any other user provided
# information necessary for execution.
# returns: A dictionary consisting of two keys:-
# errors: It should be an array of strings containing all
# errors encountered.
# results: It should be a dictionary containing all the results that
# can be derived from the given data.
def attack(input={}, errors=[], results={}):
return {'errors': errors, 'results': results}Remove extra parameters to "attack()"
The `results` and `errors` structures aren't needed as input parameters.
All we need to ensure is that these are returned by `attack()`.# This file should serve as a template
# We will be importing all such files into daedalus from which any attack can be then called with required input
###########################################################################
# attack(input={})
# This function will be called from with daedalus.py
# along with the required input.
# inputs: A dictionary of parameters containing information
# about the public key, private key and any other user provided
# information necessary for execution.
# returns: A dictionary consisting of two keys:-
# errors: It should be an array of strings containing all
# errors encountered.
# results: It should be a dictionary containing all the results that
# can be derived from the given data.
def attack(input={}):
return {'errors': errors, 'results': results}
|
<commit_before># This file should serve as a template
# We will be importing all such files into daedalus from which any attack can be then called with required input
###########################################################################
# attack(input={})
# This function will be called from with daedalus.py
# along with the required input.
# inputs: A dictionary of parameters containing information
# about the public key, private key and any other user provided
# information necessary for execution.
# returns: A dictionary consisting of two keys:-
# errors: It should be an array of strings containing all
# errors encountered.
# results: It should be a dictionary containing all the results that
# can be derived from the given data.
def attack(input={}, errors=[], results={}):
return {'errors': errors, 'results': results}<commit_msg>Remove extra parameters to "attack()"
The `results` and `errors` structures aren't needed as input parameters.
All we need to ensure is that these are returned by `attack()`.<commit_after># This file should serve as a template
# We will be importing all such files into daedalus from which any attack can be then called with required input
###########################################################################
# attack(input={})
# This function will be called from with daedalus.py
# along with the required input.
# inputs: A dictionary of parameters containing information
# about the public key, private key and any other user provided
# information necessary for execution.
# returns: A dictionary consisting of two keys:-
# errors: It should be an array of strings containing all
# errors encountered.
# results: It should be a dictionary containing all the results that
# can be derived from the given data.
def attack(input={}):
return {'errors': errors, 'results': results}
|
66cc9d8c6f91378fadbbc3e40fe4397e43b7b757
|
mopidy/frontends/mpd/__init__.py
|
mopidy/frontends/mpd/__init__.py
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
if 'reply_to' in message:
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
Allow reply_to to not be set in messages to the MPD frontend
|
Allow reply_to to not be set in messages to the MPD frontend
|
Python
|
apache-2.0
|
vrs01/mopidy,dbrgn/mopidy,bencevans/mopidy,swak/mopidy,diandiankan/mopidy,tkem/mopidy,glogiotatidis/mopidy,priestd09/mopidy,dbrgn/mopidy,abarisain/mopidy,vrs01/mopidy,swak/mopidy,jmarsik/mopidy,ali/mopidy,vrs01/mopidy,priestd09/mopidy,SuperStarPL/mopidy,mopidy/mopidy,liamw9534/mopidy,pacificIT/mopidy,bencevans/mopidy,woutervanwijk/mopidy,diandiankan/mopidy,adamcik/mopidy,bacontext/mopidy,jodal/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,rawdlite/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,swak/mopidy,jcass77/mopidy,dbrgn/mopidy,jodal/mopidy,jmarsik/mopidy,mopidy/mopidy,dbrgn/mopidy,mokieyue/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,priestd09/mopidy,bacontext/mopidy,ZenithDK/mopidy,mokieyue/mopidy,glogiotatidis/mopidy,quartz55/mopidy,quartz55/mopidy,ali/mopidy,glogiotatidis/mopidy,hkariti/mopidy,rawdlite/mopidy,kingosticks/mopidy,rawdlite/mopidy,pacificIT/mopidy,ali/mopidy,tkem/mopidy,tkem/mopidy,ZenithDK/mopidy,kingosticks/mopidy,rawdlite/mopidy,hkariti/mopidy,quartz55/mopidy,bacontext/mopidy,ZenithDK/mopidy,bacontext/mopidy,adamcik/mopidy,adamcik/mopidy,mokieyue/mopidy,jcass77/mopidy,jmarsik/mopidy,ZenithDK/mopidy,liamw9534/mopidy,jcass77/mopidy,bencevans/mopidy,tkem/mopidy,swak/mopidy,hkariti/mopidy,ali/mopidy,pacificIT/mopidy,diandiankan/mopidy,vrs01/mopidy,jodal/mopidy,mokieyue/mopidy,abarisain/mopidy,woutervanwijk/mopidy,quartz55/mopidy,bencevans/mopidy,jmarsik/mopidy,hkariti/mopidy,mopidy/mopidy
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
Allow reply_to to not be set in messages to the MPD frontend
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
if 'reply_to' in message:
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
<commit_before>import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
<commit_msg>Allow reply_to to not be set in messages to the MPD frontend<commit_after>
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
if 'reply_to' in message:
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
Allow reply_to to not be set in messages to the MPD frontendimport logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
if 'reply_to' in message:
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
<commit_before>import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
<commit_msg>Allow reply_to to not be set in messages to the MPD frontend<commit_after>import logging
from mopidy.frontends.base import BaseFrontend
from mopidy.frontends.mpd.dispatcher import MpdDispatcher
from mopidy.frontends.mpd.thread import MpdThread
from mopidy.utils.process import unpickle_connection
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(BaseFrontend):
"""
The MPD frontend.
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
"""
def __init__(self, *args, **kwargs):
super(MpdFrontend, self).__init__(*args, **kwargs)
self.thread = None
self.dispatcher = MpdDispatcher(self.backend)
def start(self):
"""Starts the MPD server."""
self.thread = MpdThread(self.core_queue)
self.thread.start()
def destroy(self):
"""Destroys the MPD server."""
self.thread.destroy()
def process_message(self, message):
"""
Processes messages with the MPD frontend as destination.
:param message: the message
:type message: dict
"""
assert message['to'] == 'frontend', \
u'Message recipient must be "frontend".'
if message['command'] == 'mpd_request':
response = self.dispatcher.handle_request(message['request'])
if 'reply_to' in message:
connection = unpickle_connection(message['reply_to'])
connection.send(response)
else:
pass # Ignore messages for other frontends
|
98bd24100097473ac771dd08b19640f30970a62d
|
chainerrl/explorers/additive_gaussian.py
|
chainerrl/explorers/additive_gaussian.py
|
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import numpy as np
from chainerrl import explorer
class AdditiveGaussian(explorer.Explorer):
"""Additive Gaussian noise to actions.
Each action must be numpy.ndarray.
Args:
scale (float or array_like of floats): Scale parameter.
"""
def __init__(self, scale, low=None, high=None):
self.scale = scale
self.low = low
self.high = high
def select_action(self, t, greedy_action_func, action_value=None):
a = greedy_action_func()
noise = np.random.normal(
scale=self.scale, size=a.shape).astype(np.float32)
if self.low is not None or self.high is not None:
return np.clip(a + noise, self.low, self.high)
else:
return a + noise
def __repr__(self):
return 'AdditiveGaussian(scale={})'.format(self.scale)
|
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import numpy as np
from chainerrl import explorer
class AdditiveGaussian(explorer.Explorer):
"""Additive Gaussian noise to actions.
Each action must be numpy.ndarray.
Args:
scale (float or array_like of floats): Scale parameter.
low (float, array_like of floats, or None): Lower bound of action
space used to clip an action after adding a noise. If set to None,
clipping is not performed on lower edge.
high (float, array_like of floats, or None): Higher bound of action
space used to clip an action after adding a noise. If set to None,
clipping is not performed on upper edge.
"""
def __init__(self, scale, low=None, high=None):
self.scale = scale
self.low = low
self.high = high
def select_action(self, t, greedy_action_func, action_value=None):
a = greedy_action_func()
noise = np.random.normal(
scale=self.scale, size=a.shape).astype(np.float32)
if self.low is not None or self.high is not None:
return np.clip(a + noise, self.low, self.high)
else:
return a + noise
def __repr__(self):
return 'AdditiveGaussian(scale={}, low={}, high={})'.format(
self.scale, self.low, self.high)
|
Add low and high to docstring and __repr__
|
Add low and high to docstring and __repr__
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import numpy as np
from chainerrl import explorer
class AdditiveGaussian(explorer.Explorer):
"""Additive Gaussian noise to actions.
Each action must be numpy.ndarray.
Args:
scale (float or array_like of floats): Scale parameter.
"""
def __init__(self, scale, low=None, high=None):
self.scale = scale
self.low = low
self.high = high
def select_action(self, t, greedy_action_func, action_value=None):
a = greedy_action_func()
noise = np.random.normal(
scale=self.scale, size=a.shape).astype(np.float32)
if self.low is not None or self.high is not None:
return np.clip(a + noise, self.low, self.high)
else:
return a + noise
def __repr__(self):
return 'AdditiveGaussian(scale={})'.format(self.scale)
Add low and high to docstring and __repr__
|
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import numpy as np
from chainerrl import explorer
class AdditiveGaussian(explorer.Explorer):
"""Additive Gaussian noise to actions.
Each action must be numpy.ndarray.
Args:
scale (float or array_like of floats): Scale parameter.
low (float, array_like of floats, or None): Lower bound of action
space used to clip an action after adding a noise. If set to None,
clipping is not performed on lower edge.
high (float, array_like of floats, or None): Higher bound of action
space used to clip an action after adding a noise. If set to None,
clipping is not performed on upper edge.
"""
def __init__(self, scale, low=None, high=None):
self.scale = scale
self.low = low
self.high = high
def select_action(self, t, greedy_action_func, action_value=None):
a = greedy_action_func()
noise = np.random.normal(
scale=self.scale, size=a.shape).astype(np.float32)
if self.low is not None or self.high is not None:
return np.clip(a + noise, self.low, self.high)
else:
return a + noise
def __repr__(self):
return 'AdditiveGaussian(scale={}, low={}, high={})'.format(
self.scale, self.low, self.high)
|
<commit_before>from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import numpy as np
from chainerrl import explorer
class AdditiveGaussian(explorer.Explorer):
"""Additive Gaussian noise to actions.
Each action must be numpy.ndarray.
Args:
scale (float or array_like of floats): Scale parameter.
"""
def __init__(self, scale, low=None, high=None):
self.scale = scale
self.low = low
self.high = high
def select_action(self, t, greedy_action_func, action_value=None):
a = greedy_action_func()
noise = np.random.normal(
scale=self.scale, size=a.shape).astype(np.float32)
if self.low is not None or self.high is not None:
return np.clip(a + noise, self.low, self.high)
else:
return a + noise
def __repr__(self):
return 'AdditiveGaussian(scale={})'.format(self.scale)
<commit_msg>Add low and high to docstring and __repr__<commit_after>
|
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import numpy as np
from chainerrl import explorer
class AdditiveGaussian(explorer.Explorer):
"""Additive Gaussian noise to actions.
Each action must be numpy.ndarray.
Args:
scale (float or array_like of floats): Scale parameter.
low (float, array_like of floats, or None): Lower bound of action
space used to clip an action after adding a noise. If set to None,
clipping is not performed on lower edge.
high (float, array_like of floats, or None): Higher bound of action
space used to clip an action after adding a noise. If set to None,
clipping is not performed on upper edge.
"""
def __init__(self, scale, low=None, high=None):
self.scale = scale
self.low = low
self.high = high
def select_action(self, t, greedy_action_func, action_value=None):
a = greedy_action_func()
noise = np.random.normal(
scale=self.scale, size=a.shape).astype(np.float32)
if self.low is not None or self.high is not None:
return np.clip(a + noise, self.low, self.high)
else:
return a + noise
def __repr__(self):
return 'AdditiveGaussian(scale={}, low={}, high={})'.format(
self.scale, self.low, self.high)
|
from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import numpy as np
from chainerrl import explorer
class AdditiveGaussian(explorer.Explorer):
"""Additive Gaussian noise to actions.
Each action must be numpy.ndarray.
Args:
scale (float or array_like of floats): Scale parameter.
"""
def __init__(self, scale, low=None, high=None):
self.scale = scale
self.low = low
self.high = high
def select_action(self, t, greedy_action_func, action_value=None):
a = greedy_action_func()
noise = np.random.normal(
scale=self.scale, size=a.shape).astype(np.float32)
if self.low is not None or self.high is not None:
return np.clip(a + noise, self.low, self.high)
else:
return a + noise
def __repr__(self):
return 'AdditiveGaussian(scale={})'.format(self.scale)
Add low and high to docstring and __repr__from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import numpy as np
from chainerrl import explorer
class AdditiveGaussian(explorer.Explorer):
"""Additive Gaussian noise to actions.
Each action must be numpy.ndarray.
Args:
scale (float or array_like of floats): Scale parameter.
low (float, array_like of floats, or None): Lower bound of action
space used to clip an action after adding a noise. If set to None,
clipping is not performed on lower edge.
high (float, array_like of floats, or None): Higher bound of action
space used to clip an action after adding a noise. If set to None,
clipping is not performed on upper edge.
"""
def __init__(self, scale, low=None, high=None):
self.scale = scale
self.low = low
self.high = high
def select_action(self, t, greedy_action_func, action_value=None):
a = greedy_action_func()
noise = np.random.normal(
scale=self.scale, size=a.shape).astype(np.float32)
if self.low is not None or self.high is not None:
return np.clip(a + noise, self.low, self.high)
else:
return a + noise
def __repr__(self):
return 'AdditiveGaussian(scale={}, low={}, high={})'.format(
self.scale, self.low, self.high)
|
<commit_before>from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import numpy as np
from chainerrl import explorer
class AdditiveGaussian(explorer.Explorer):
"""Additive Gaussian noise to actions.
Each action must be numpy.ndarray.
Args:
scale (float or array_like of floats): Scale parameter.
"""
def __init__(self, scale, low=None, high=None):
self.scale = scale
self.low = low
self.high = high
def select_action(self, t, greedy_action_func, action_value=None):
a = greedy_action_func()
noise = np.random.normal(
scale=self.scale, size=a.shape).astype(np.float32)
if self.low is not None or self.high is not None:
return np.clip(a + noise, self.low, self.high)
else:
return a + noise
def __repr__(self):
return 'AdditiveGaussian(scale={})'.format(self.scale)
<commit_msg>Add low and high to docstring and __repr__<commit_after>from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import numpy as np
from chainerrl import explorer
class AdditiveGaussian(explorer.Explorer):
"""Additive Gaussian noise to actions.
Each action must be numpy.ndarray.
Args:
scale (float or array_like of floats): Scale parameter.
low (float, array_like of floats, or None): Lower bound of action
space used to clip an action after adding a noise. If set to None,
clipping is not performed on lower edge.
high (float, array_like of floats, or None): Higher bound of action
space used to clip an action after adding a noise. If set to None,
clipping is not performed on upper edge.
"""
def __init__(self, scale, low=None, high=None):
self.scale = scale
self.low = low
self.high = high
def select_action(self, t, greedy_action_func, action_value=None):
a = greedy_action_func()
noise = np.random.normal(
scale=self.scale, size=a.shape).astype(np.float32)
if self.low is not None or self.high is not None:
return np.clip(a + noise, self.low, self.high)
else:
return a + noise
def __repr__(self):
return 'AdditiveGaussian(scale={}, low={}, high={})'.format(
self.scale, self.low, self.high)
|
39a534d380afea37231cc0c2ca4c8a742354d6e1
|
app.py
|
app.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask, render_template
from sh import git
app = Flask(__name__)
version = git("rev-parse", "--short", "HEAD").strip()
@app.route("/", methods=["GET"])
def status():
"""
Status check. Display the current version of heatlamp, some basic
diagnostics, and a simple form that may be used to manually trigger
a deployment.
"""
return render_template("status.html", version=version)
@app.route("/", methods=["POST", "PUT"])
def refresh():
"""
Webhook accepted. Perform the configured action.
"""
return "yes"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=10100)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from flask import Flask, render_template
from sh import git
app = Flask(__name__)
version = git("rev-parse", "--short", "HEAD").strip()
command = os.getenv("HEATLAMP_COMMAND")
def validate():
"""
Validate the application configuration before launching.
"""
missing = []
if not command:
missing.append((
"HEATLAMP_COMMAND",
"The command to execute when a webhook is triggered."
))
if missing:
print("Missing required configuration values:\n", file=sys.stderr)
for envvar, purpose in missing:
print(" {}: {}".format(envvar, purpose), file=sys.stderr)
print(file=sys.stderr)
sys.exit(1)
validate()
@app.route("/", methods=["GET"])
def status():
"""
Status check. Display the current version of heatlamp, some basic
diagnostics, and a simple form that may be used to manually trigger
a deployment.
"""
return render_template("status.html", version=version)
@app.route("/", methods=["POST", "PUT"])
def refresh():
"""
Webhook accepted. Perform the configured action.
"""
return "yes"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=10100)
|
Configure the command that's executed.
|
Configure the command that's executed.
|
Python
|
mit
|
heatlamp/heatlamp-core,heatlamp/heatlamp-core
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask, render_template
from sh import git
app = Flask(__name__)
version = git("rev-parse", "--short", "HEAD").strip()
@app.route("/", methods=["GET"])
def status():
"""
Status check. Display the current version of heatlamp, some basic
diagnostics, and a simple form that may be used to manually trigger
a deployment.
"""
return render_template("status.html", version=version)
@app.route("/", methods=["POST", "PUT"])
def refresh():
"""
Webhook accepted. Perform the configured action.
"""
return "yes"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=10100)
Configure the command that's executed.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from flask import Flask, render_template
from sh import git
app = Flask(__name__)
version = git("rev-parse", "--short", "HEAD").strip()
command = os.getenv("HEATLAMP_COMMAND")
def validate():
"""
Validate the application configuration before launching.
"""
missing = []
if not command:
missing.append((
"HEATLAMP_COMMAND",
"The command to execute when a webhook is triggered."
))
if missing:
print("Missing required configuration values:\n", file=sys.stderr)
for envvar, purpose in missing:
print(" {}: {}".format(envvar, purpose), file=sys.stderr)
print(file=sys.stderr)
sys.exit(1)
validate()
@app.route("/", methods=["GET"])
def status():
"""
Status check. Display the current version of heatlamp, some basic
diagnostics, and a simple form that may be used to manually trigger
a deployment.
"""
return render_template("status.html", version=version)
@app.route("/", methods=["POST", "PUT"])
def refresh():
"""
Webhook accepted. Perform the configured action.
"""
return "yes"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=10100)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask, render_template
from sh import git
app = Flask(__name__)
version = git("rev-parse", "--short", "HEAD").strip()
@app.route("/", methods=["GET"])
def status():
"""
Status check. Display the current version of heatlamp, some basic
diagnostics, and a simple form that may be used to manually trigger
a deployment.
"""
return render_template("status.html", version=version)
@app.route("/", methods=["POST", "PUT"])
def refresh():
"""
Webhook accepted. Perform the configured action.
"""
return "yes"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=10100)
<commit_msg>Configure the command that's executed.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from flask import Flask, render_template
from sh import git
app = Flask(__name__)
version = git("rev-parse", "--short", "HEAD").strip()
command = os.getenv("HEATLAMP_COMMAND")
def validate():
"""
Validate the application configuration before launching.
"""
missing = []
if not command:
missing.append((
"HEATLAMP_COMMAND",
"The command to execute when a webhook is triggered."
))
if missing:
print("Missing required configuration values:\n", file=sys.stderr)
for envvar, purpose in missing:
print(" {}: {}".format(envvar, purpose), file=sys.stderr)
print(file=sys.stderr)
sys.exit(1)
validate()
@app.route("/", methods=["GET"])
def status():
"""
Status check. Display the current version of heatlamp, some basic
diagnostics, and a simple form that may be used to manually trigger
a deployment.
"""
return render_template("status.html", version=version)
@app.route("/", methods=["POST", "PUT"])
def refresh():
"""
Webhook accepted. Perform the configured action.
"""
return "yes"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=10100)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask, render_template
from sh import git
app = Flask(__name__)
version = git("rev-parse", "--short", "HEAD").strip()
@app.route("/", methods=["GET"])
def status():
"""
Status check. Display the current version of heatlamp, some basic
diagnostics, and a simple form that may be used to manually trigger
a deployment.
"""
return render_template("status.html", version=version)
@app.route("/", methods=["POST", "PUT"])
def refresh():
"""
Webhook accepted. Perform the configured action.
"""
return "yes"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=10100)
Configure the command that's executed.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from flask import Flask, render_template
from sh import git
app = Flask(__name__)
version = git("rev-parse", "--short", "HEAD").strip()
command = os.getenv("HEATLAMP_COMMAND")
def validate():
"""
Validate the application configuration before launching.
"""
missing = []
if not command:
missing.append((
"HEATLAMP_COMMAND",
"The command to execute when a webhook is triggered."
))
if missing:
print("Missing required configuration values:\n", file=sys.stderr)
for envvar, purpose in missing:
print(" {}: {}".format(envvar, purpose), file=sys.stderr)
print(file=sys.stderr)
sys.exit(1)
validate()
@app.route("/", methods=["GET"])
def status():
"""
Status check. Display the current version of heatlamp, some basic
diagnostics, and a simple form that may be used to manually trigger
a deployment.
"""
return render_template("status.html", version=version)
@app.route("/", methods=["POST", "PUT"])
def refresh():
"""
Webhook accepted. Perform the configured action.
"""
return "yes"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=10100)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask, render_template
from sh import git
app = Flask(__name__)
version = git("rev-parse", "--short", "HEAD").strip()
@app.route("/", methods=["GET"])
def status():
"""
Status check. Display the current version of heatlamp, some basic
diagnostics, and a simple form that may be used to manually trigger
a deployment.
"""
return render_template("status.html", version=version)
@app.route("/", methods=["POST", "PUT"])
def refresh():
"""
Webhook accepted. Perform the configured action.
"""
return "yes"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=10100)
<commit_msg>Configure the command that's executed.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from flask import Flask, render_template
from sh import git
app = Flask(__name__)
version = git("rev-parse", "--short", "HEAD").strip()
command = os.getenv("HEATLAMP_COMMAND")
def validate():
"""
Validate the application configuration before launching.
"""
missing = []
if not command:
missing.append((
"HEATLAMP_COMMAND",
"The command to execute when a webhook is triggered."
))
if missing:
print("Missing required configuration values:\n", file=sys.stderr)
for envvar, purpose in missing:
print(" {}: {}".format(envvar, purpose), file=sys.stderr)
print(file=sys.stderr)
sys.exit(1)
validate()
@app.route("/", methods=["GET"])
def status():
"""
Status check. Display the current version of heatlamp, some basic
diagnostics, and a simple form that may be used to manually trigger
a deployment.
"""
return render_template("status.html", version=version)
@app.route("/", methods=["POST", "PUT"])
def refresh():
"""
Webhook accepted. Perform the configured action.
"""
return "yes"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=10100)
|
603fccbbdda5fa45dcc84421901fec085fffcb81
|
test/test_general.py
|
test/test_general.py
|
import hive
import threading
import time
import sys
import worker
# import pash from another directory
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
data = ["iscsiadm -m discovery -t st -p 192.168.88.110 -I default","iscsiadm -m discovery -t st -p 192.168.90.110 -I iface1","iscsiadm -m discovery -t st -p 192.168.88.110 -I iface0"]
apiary.instruct_queen('A1',data, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
print i
sys.exit(0)
|
import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
data = ["iscsiadm -m discovery -t st -p 192.168.88.110 -I default","iscsiadm -m discovery -t st -p 192.168.90.110 -I iface1","iscsiadm -m discovery -t st -p 192.168.88.110 -I iface0"]
apiary.instruct_queen('A1',data, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
print i
sys.exit(0)
|
Change imports to work with new scheme
|
Change imports to work with new scheme
|
Python
|
bsd-3-clause
|
iansmcf/busybees
|
import hive
import threading
import time
import sys
import worker
# import pash from another directory
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
data = ["iscsiadm -m discovery -t st -p 192.168.88.110 -I default","iscsiadm -m discovery -t st -p 192.168.90.110 -I iface1","iscsiadm -m discovery -t st -p 192.168.88.110 -I iface0"]
apiary.instruct_queen('A1',data, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
print i
sys.exit(0)
Change imports to work with new scheme
|
import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
data = ["iscsiadm -m discovery -t st -p 192.168.88.110 -I default","iscsiadm -m discovery -t st -p 192.168.90.110 -I iface1","iscsiadm -m discovery -t st -p 192.168.88.110 -I iface0"]
apiary.instruct_queen('A1',data, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
print i
sys.exit(0)
|
<commit_before>import hive
import threading
import time
import sys
import worker
# import pash from another directory
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
data = ["iscsiadm -m discovery -t st -p 192.168.88.110 -I default","iscsiadm -m discovery -t st -p 192.168.90.110 -I iface1","iscsiadm -m discovery -t st -p 192.168.88.110 -I iface0"]
apiary.instruct_queen('A1',data, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
print i
sys.exit(0)
<commit_msg>Change imports to work with new scheme<commit_after>
|
import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
data = ["iscsiadm -m discovery -t st -p 192.168.88.110 -I default","iscsiadm -m discovery -t st -p 192.168.90.110 -I iface1","iscsiadm -m discovery -t st -p 192.168.88.110 -I iface0"]
apiary.instruct_queen('A1',data, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
print i
sys.exit(0)
|
import hive
import threading
import time
import sys
import worker
# import pash from another directory
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
data = ["iscsiadm -m discovery -t st -p 192.168.88.110 -I default","iscsiadm -m discovery -t st -p 192.168.90.110 -I iface1","iscsiadm -m discovery -t st -p 192.168.88.110 -I iface0"]
apiary.instruct_queen('A1',data, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
print i
sys.exit(0)
Change imports to work with new schemeimport threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
data = ["iscsiadm -m discovery -t st -p 192.168.88.110 -I default","iscsiadm -m discovery -t st -p 192.168.90.110 -I iface1","iscsiadm -m discovery -t st -p 192.168.88.110 -I iface0"]
apiary.instruct_queen('A1',data, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
print i
sys.exit(0)
|
<commit_before>import hive
import threading
import time
import sys
import worker
# import pash from another directory
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
data = ["iscsiadm -m discovery -t st -p 192.168.88.110 -I default","iscsiadm -m discovery -t st -p 192.168.90.110 -I iface1","iscsiadm -m discovery -t st -p 192.168.88.110 -I iface0"]
apiary.instruct_queen('A1',data, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
print i
sys.exit(0)
<commit_msg>Change imports to work with new scheme<commit_after>import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
data = ["iscsiadm -m discovery -t st -p 192.168.88.110 -I default","iscsiadm -m discovery -t st -p 192.168.90.110 -I iface1","iscsiadm -m discovery -t st -p 192.168.88.110 -I iface0"]
apiary.instruct_queen('A1',data, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
print i
sys.exit(0)
|
abb23c47f503197e005637ce220a07975dc01094
|
recipes/spyder-line-profiler/run_test.py
|
recipes/spyder-line-profiler/run_test.py
|
from xvfbwrapper import Xvfb
vdisplay = Xvfb()
vdisplay.start()
import spyder_line_profiler
vdisplay.stop()
|
"""
Test whether spyder_line_profiler is installed
The test is only whether the module can be found. It does not attempt
to import the module because this needs an X server.
"""
import imp
imp.find_module('spyder_line_profiler')
|
Use imp.find_module in test for spyder-line-profiler
|
Use imp.find_module in test for spyder-line-profiler
|
Python
|
bsd-3-clause
|
jjhelmus/staged-recipes,igortg/staged-recipes,petrushy/staged-recipes,Cashalow/staged-recipes,patricksnape/staged-recipes,conda-forge/staged-recipes,NOAA-ORR-ERD/staged-recipes,petrushy/staged-recipes,synapticarbors/staged-recipes,grlee77/staged-recipes,larray-project/staged-recipes,shadowwalkersb/staged-recipes,patricksnape/staged-recipes,planetarypy/staged-recipes,basnijholt/staged-recipes,SylvainCorlay/staged-recipes,chrisburr/staged-recipes,hadim/staged-recipes,goanpeca/staged-recipes,guillochon/staged-recipes,chohner/staged-recipes,NOAA-ORR-ERD/staged-recipes,benvandyke/staged-recipes,larray-project/staged-recipes,gqmelo/staged-recipes,Cashalow/staged-recipes,chrisburr/staged-recipes,rvalieris/staged-recipes,birdsarah/staged-recipes,kwilcox/staged-recipes,mcs07/staged-recipes,hadim/staged-recipes,johanneskoester/staged-recipes,ocefpaf/staged-recipes,glemaitre/staged-recipes,mcs07/staged-recipes,koverholt/staged-recipes,igortg/staged-recipes,grlee77/staged-recipes,rmcgibbo/staged-recipes,JohnGreeley/staged-recipes,basnijholt/staged-recipes,SylvainCorlay/staged-recipes,chohner/staged-recipes,gqmelo/staged-recipes,dschreij/staged-recipes,jjhelmus/staged-recipes,rvalieris/staged-recipes,pmlandwehr/staged-recipes,jochym/staged-recipes,JohnGreeley/staged-recipes,mariusvniekerk/staged-recipes,birdsarah/staged-recipes,isuruf/staged-recipes,scopatz/staged-recipes,jakirkham/staged-recipes,Juanlu001/staged-recipes,sodre/staged-recipes,barkls/staged-recipes,johanneskoester/staged-recipes,ReimarBauer/staged-recipes,benvandyke/staged-recipes,guillochon/staged-recipes,conda-forge/staged-recipes,asmeurer/staged-recipes,koverholt/staged-recipes,sannykr/staged-recipes,ceholden/staged-recipes,stuertz/staged-recipes,cpaulik/staged-recipes,blowekamp/staged-recipes,dschreij/staged-recipes,ReimarBauer/staged-recipes,pmlandwehr/staged-recipes,sannykr/staged-recipes,ocefpaf/staged-recipes,goanpeca/staged-recipes,jochym/staged-recipes,mariusvniekerk/staged-recipes,isuruf/staged-recipes,ceholden/staged-recipes,blowekamp/staged-recipes,kwilcox/staged-recipes,Juanlu001/staged-recipes,scopatz/staged-recipes,rmcgibbo/staged-recipes,barkls/staged-recipes,shadowwalkersb/staged-recipes,sodre/staged-recipes,jakirkham/staged-recipes,planetarypy/staged-recipes,sodre/staged-recipes,asmeurer/staged-recipes,synapticarbors/staged-recipes,stuertz/staged-recipes,cpaulik/staged-recipes,glemaitre/staged-recipes
|
from xvfbwrapper import Xvfb
vdisplay = Xvfb()
vdisplay.start()
import spyder_line_profiler
vdisplay.stop()
Use imp.find_module in test for spyder-line-profiler
|
"""
Test whether spyder_line_profiler is installed
The test is only whether the module can be found. It does not attempt
to import the module because this needs an X server.
"""
import imp
imp.find_module('spyder_line_profiler')
|
<commit_before>from xvfbwrapper import Xvfb
vdisplay = Xvfb()
vdisplay.start()
import spyder_line_profiler
vdisplay.stop()
<commit_msg>Use imp.find_module in test for spyder-line-profiler<commit_after>
|
"""
Test whether spyder_line_profiler is installed
The test is only whether the module can be found. It does not attempt
to import the module because this needs an X server.
"""
import imp
imp.find_module('spyder_line_profiler')
|
from xvfbwrapper import Xvfb
vdisplay = Xvfb()
vdisplay.start()
import spyder_line_profiler
vdisplay.stop()
Use imp.find_module in test for spyder-line-profiler"""
Test whether spyder_line_profiler is installed
The test is only whether the module can be found. It does not attempt
to import the module because this needs an X server.
"""
import imp
imp.find_module('spyder_line_profiler')
|
<commit_before>from xvfbwrapper import Xvfb
vdisplay = Xvfb()
vdisplay.start()
import spyder_line_profiler
vdisplay.stop()
<commit_msg>Use imp.find_module in test for spyder-line-profiler<commit_after>"""
Test whether spyder_line_profiler is installed
The test is only whether the module can be found. It does not attempt
to import the module because this needs an X server.
"""
import imp
imp.find_module('spyder_line_profiler')
|
323e24e86943fd00fc09799361c86bec6383a210
|
test/tst_filepath.py
|
test/tst_filepath.py
|
import os, sys, shutil
import tempfile
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath using a cp1252 string
tmpdir = tempfile.mkdtemp()
filepath = os.path.join(tmpdir,b'Pl\xc3\xb6n.nc'.decode('cp1252'))
nc = netCDF4.Dataset(filepath,'w',encoding='cp1252')
filepatho = nc.filepath(encoding='cp1252')
assert filepath == filepatho
assert filepath.encode('cp1252') == filepatho.encode('cp1252')
nc.close()
shutil.rmtree(tmpdir)
def test_no_such_file_raises(self):
fname = 'not_a_nc_file.nc'
with self.assertRaisesRegexp(IOError, fname):
netCDF4.Dataset(fname, 'r')
if __name__ == '__main__':
unittest.main()
|
import os, sys, shutil
import tempfile
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath using a cp1252 string
tmpdir = tempfile.mkdtemp()
filepath = os.path.join(tmpdir,b'Pl\xc3\xb6n.nc'.decode('cp1252'))
nc = netCDF4.Dataset(filepath,'w',encoding='cp1252')
filepatho = nc.filepath(encoding='cp1252')
assert filepath == filepatho
assert filepath.encode('cp1252') == filepatho.encode('cp1252')
nc.close()
shutil.rmtree(tmpdir)
def test_no_such_file_raises(self):
fname = 'not_a_nc_file.nc'
with self.assertRaisesRegex(IOError, fname):
netCDF4.Dataset(fname, 'r')
if __name__ == '__main__':
unittest.main()
|
Use assertRaisesRegex instead of assertRaisesRegexp for Python 3.11 compatibility.
|
Use assertRaisesRegex instead of assertRaisesRegexp for Python 3.11 compatibility.
|
Python
|
mit
|
Unidata/netcdf4-python,Unidata/netcdf4-python,Unidata/netcdf4-python
|
import os, sys, shutil
import tempfile
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath using a cp1252 string
tmpdir = tempfile.mkdtemp()
filepath = os.path.join(tmpdir,b'Pl\xc3\xb6n.nc'.decode('cp1252'))
nc = netCDF4.Dataset(filepath,'w',encoding='cp1252')
filepatho = nc.filepath(encoding='cp1252')
assert filepath == filepatho
assert filepath.encode('cp1252') == filepatho.encode('cp1252')
nc.close()
shutil.rmtree(tmpdir)
def test_no_such_file_raises(self):
fname = 'not_a_nc_file.nc'
with self.assertRaisesRegexp(IOError, fname):
netCDF4.Dataset(fname, 'r')
if __name__ == '__main__':
unittest.main()
Use assertRaisesRegex instead of assertRaisesRegexp for Python 3.11 compatibility.
|
import os, sys, shutil
import tempfile
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath using a cp1252 string
tmpdir = tempfile.mkdtemp()
filepath = os.path.join(tmpdir,b'Pl\xc3\xb6n.nc'.decode('cp1252'))
nc = netCDF4.Dataset(filepath,'w',encoding='cp1252')
filepatho = nc.filepath(encoding='cp1252')
assert filepath == filepatho
assert filepath.encode('cp1252') == filepatho.encode('cp1252')
nc.close()
shutil.rmtree(tmpdir)
def test_no_such_file_raises(self):
fname = 'not_a_nc_file.nc'
with self.assertRaisesRegex(IOError, fname):
netCDF4.Dataset(fname, 'r')
if __name__ == '__main__':
unittest.main()
|
<commit_before>import os, sys, shutil
import tempfile
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath using a cp1252 string
tmpdir = tempfile.mkdtemp()
filepath = os.path.join(tmpdir,b'Pl\xc3\xb6n.nc'.decode('cp1252'))
nc = netCDF4.Dataset(filepath,'w',encoding='cp1252')
filepatho = nc.filepath(encoding='cp1252')
assert filepath == filepatho
assert filepath.encode('cp1252') == filepatho.encode('cp1252')
nc.close()
shutil.rmtree(tmpdir)
def test_no_such_file_raises(self):
fname = 'not_a_nc_file.nc'
with self.assertRaisesRegexp(IOError, fname):
netCDF4.Dataset(fname, 'r')
if __name__ == '__main__':
unittest.main()
<commit_msg>Use assertRaisesRegex instead of assertRaisesRegexp for Python 3.11 compatibility.<commit_after>
|
import os, sys, shutil
import tempfile
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath using a cp1252 string
tmpdir = tempfile.mkdtemp()
filepath = os.path.join(tmpdir,b'Pl\xc3\xb6n.nc'.decode('cp1252'))
nc = netCDF4.Dataset(filepath,'w',encoding='cp1252')
filepatho = nc.filepath(encoding='cp1252')
assert filepath == filepatho
assert filepath.encode('cp1252') == filepatho.encode('cp1252')
nc.close()
shutil.rmtree(tmpdir)
def test_no_such_file_raises(self):
fname = 'not_a_nc_file.nc'
with self.assertRaisesRegex(IOError, fname):
netCDF4.Dataset(fname, 'r')
if __name__ == '__main__':
unittest.main()
|
import os, sys, shutil
import tempfile
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath using a cp1252 string
tmpdir = tempfile.mkdtemp()
filepath = os.path.join(tmpdir,b'Pl\xc3\xb6n.nc'.decode('cp1252'))
nc = netCDF4.Dataset(filepath,'w',encoding='cp1252')
filepatho = nc.filepath(encoding='cp1252')
assert filepath == filepatho
assert filepath.encode('cp1252') == filepatho.encode('cp1252')
nc.close()
shutil.rmtree(tmpdir)
def test_no_such_file_raises(self):
fname = 'not_a_nc_file.nc'
with self.assertRaisesRegexp(IOError, fname):
netCDF4.Dataset(fname, 'r')
if __name__ == '__main__':
unittest.main()
Use assertRaisesRegex instead of assertRaisesRegexp for Python 3.11 compatibility.import os, sys, shutil
import tempfile
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath using a cp1252 string
tmpdir = tempfile.mkdtemp()
filepath = os.path.join(tmpdir,b'Pl\xc3\xb6n.nc'.decode('cp1252'))
nc = netCDF4.Dataset(filepath,'w',encoding='cp1252')
filepatho = nc.filepath(encoding='cp1252')
assert filepath == filepatho
assert filepath.encode('cp1252') == filepatho.encode('cp1252')
nc.close()
shutil.rmtree(tmpdir)
def test_no_such_file_raises(self):
fname = 'not_a_nc_file.nc'
with self.assertRaisesRegex(IOError, fname):
netCDF4.Dataset(fname, 'r')
if __name__ == '__main__':
unittest.main()
|
<commit_before>import os, sys, shutil
import tempfile
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath using a cp1252 string
tmpdir = tempfile.mkdtemp()
filepath = os.path.join(tmpdir,b'Pl\xc3\xb6n.nc'.decode('cp1252'))
nc = netCDF4.Dataset(filepath,'w',encoding='cp1252')
filepatho = nc.filepath(encoding='cp1252')
assert filepath == filepatho
assert filepath.encode('cp1252') == filepatho.encode('cp1252')
nc.close()
shutil.rmtree(tmpdir)
def test_no_such_file_raises(self):
fname = 'not_a_nc_file.nc'
with self.assertRaisesRegexp(IOError, fname):
netCDF4.Dataset(fname, 'r')
if __name__ == '__main__':
unittest.main()
<commit_msg>Use assertRaisesRegex instead of assertRaisesRegexp for Python 3.11 compatibility.<commit_after>import os, sys, shutil
import tempfile
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath using a cp1252 string
tmpdir = tempfile.mkdtemp()
filepath = os.path.join(tmpdir,b'Pl\xc3\xb6n.nc'.decode('cp1252'))
nc = netCDF4.Dataset(filepath,'w',encoding='cp1252')
filepatho = nc.filepath(encoding='cp1252')
assert filepath == filepatho
assert filepath.encode('cp1252') == filepatho.encode('cp1252')
nc.close()
shutil.rmtree(tmpdir)
def test_no_such_file_raises(self):
fname = 'not_a_nc_file.nc'
with self.assertRaisesRegex(IOError, fname):
netCDF4.Dataset(fname, 'r')
if __name__ == '__main__':
unittest.main()
|
4ff1eb00f8e212d280ac858feb4efcc795d97d80
|
tests/test_models.py
|
tests/test_models.py
|
import pytest
from suddendev.models import GameController
def test_create_game(session):
pass
|
import pytest
from suddendev.models import GameSetup
def test_create_game(session):
game_setup = GameSetup('ASDF')
assert game_setup.player_count == 1
|
Fix broken import in model tests.
|
[NG] Fix broken import in model tests.
|
Python
|
mit
|
SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev
|
import pytest
from suddendev.models import GameController
def test_create_game(session):
pass
[NG] Fix broken import in model tests.
|
import pytest
from suddendev.models import GameSetup
def test_create_game(session):
game_setup = GameSetup('ASDF')
assert game_setup.player_count == 1
|
<commit_before>import pytest
from suddendev.models import GameController
def test_create_game(session):
pass
<commit_msg>[NG] Fix broken import in model tests.<commit_after>
|
import pytest
from suddendev.models import GameSetup
def test_create_game(session):
game_setup = GameSetup('ASDF')
assert game_setup.player_count == 1
|
import pytest
from suddendev.models import GameController
def test_create_game(session):
pass
[NG] Fix broken import in model tests.import pytest
from suddendev.models import GameSetup
def test_create_game(session):
game_setup = GameSetup('ASDF')
assert game_setup.player_count == 1
|
<commit_before>import pytest
from suddendev.models import GameController
def test_create_game(session):
pass
<commit_msg>[NG] Fix broken import in model tests.<commit_after>import pytest
from suddendev.models import GameSetup
def test_create_game(session):
game_setup = GameSetup('ASDF')
assert game_setup.player_count == 1
|
5b046f74c794737b9f1b9534ce0d9f635fe31210
|
record.py
|
record.py
|
#!/usr/bin/env python
# record.py - List a calling context tree.
import argparse
from cct import CCT
import json
from lldbRecorder import lldbRecorder
def main():
parser = argparse.ArgumentParser(description='Record a calling context tree.')
parser.add_argument('executable', help='Executable to run (any additional arguments are forwarded to this executable)')
parser.add_argument('-p', '--pid', help='Process id')
parser.add_argument('-m', '--module', help='Filter by module')
parser.add_argument('-f', '--function', help='Filter for calls made in a specific function')
args, leftoverArgs = parser.parse_known_args()
result = None
if (args.pid):
result = lldbRecorder(args.executable).attachToProcessThenRecord(args.pid, args.module, args.function)
else:
result = lldbRecorder(args.executable).launchProcessThenRecord(leftoverArgs, args.module, args.function)
if result:
# Serialize the result if it is a CCT.
if isinstance(result, CCT):
result = result.asJson(2)
print result
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# record.py - List a calling context tree.
#
# FIXME(phil): Switch to a kernel-level function tracing (dtrace, utrace/systemtap, etc.) over LLDB.
# Kernel hooks are difficult to use for reliably recording all function calls in complex codebases
# due to inlining, RVO, etc (see [1]). Relying on LLDB's complex source mapping logic is slow but
# fairly reliable, and is cross-platform.
# [1] https://github.com/progers/cctdb/blob/b08176b9f24c95a96ff6a22a6e63d176cc0916ae/dtrace.py
import argparse
from cct import CCT
import json
from lldbRecorder import lldbRecorder
def main():
parser = argparse.ArgumentParser(description='Record a calling context tree.')
parser.add_argument('executable', help='Executable to run (any additional arguments are forwarded to this executable)')
parser.add_argument('-p', '--pid', help='Process id')
parser.add_argument('-m', '--module', help='Filter by module')
parser.add_argument('-f', '--function', help='Filter for calls made in a specific function')
args, leftoverArgs = parser.parse_known_args()
result = None
if (args.pid):
result = lldbRecorder(args.executable).attachToProcessThenRecord(args.pid, args.module, args.function)
else:
result = lldbRecorder(args.executable).launchProcessThenRecord(leftoverArgs, args.module, args.function)
if result:
# Serialize the result if it is a CCT.
if isinstance(result, CCT):
result = result.asJson(2)
print result
if __name__ == "__main__":
main()
|
Add note about why lldbRecorder is used, and a FIXME to use kernel hooks in the future
|
Add note about why lldbRecorder is used, and a FIXME to use kernel hooks in the future
|
Python
|
apache-2.0
|
progers/cctdb,progers/cctdb
|
#!/usr/bin/env python
# record.py - List a calling context tree.
import argparse
from cct import CCT
import json
from lldbRecorder import lldbRecorder
def main():
parser = argparse.ArgumentParser(description='Record a calling context tree.')
parser.add_argument('executable', help='Executable to run (any additional arguments are forwarded to this executable)')
parser.add_argument('-p', '--pid', help='Process id')
parser.add_argument('-m', '--module', help='Filter by module')
parser.add_argument('-f', '--function', help='Filter for calls made in a specific function')
args, leftoverArgs = parser.parse_known_args()
result = None
if (args.pid):
result = lldbRecorder(args.executable).attachToProcessThenRecord(args.pid, args.module, args.function)
else:
result = lldbRecorder(args.executable).launchProcessThenRecord(leftoverArgs, args.module, args.function)
if result:
# Serialize the result if it is a CCT.
if isinstance(result, CCT):
result = result.asJson(2)
print result
if __name__ == "__main__":
main()
Add note about why lldbRecorder is used, and a FIXME to use kernel hooks in the future
|
#!/usr/bin/env python
# record.py - List a calling context tree.
#
# FIXME(phil): Switch to a kernel-level function tracing (dtrace, utrace/systemtap, etc.) over LLDB.
# Kernel hooks are difficult to use for reliably recording all function calls in complex codebases
# due to inlining, RVO, etc (see [1]). Relying on LLDB's complex source mapping logic is slow but
# fairly reliable, and is cross-platform.
# [1] https://github.com/progers/cctdb/blob/b08176b9f24c95a96ff6a22a6e63d176cc0916ae/dtrace.py
import argparse
from cct import CCT
import json
from lldbRecorder import lldbRecorder
def main():
parser = argparse.ArgumentParser(description='Record a calling context tree.')
parser.add_argument('executable', help='Executable to run (any additional arguments are forwarded to this executable)')
parser.add_argument('-p', '--pid', help='Process id')
parser.add_argument('-m', '--module', help='Filter by module')
parser.add_argument('-f', '--function', help='Filter for calls made in a specific function')
args, leftoverArgs = parser.parse_known_args()
result = None
if (args.pid):
result = lldbRecorder(args.executable).attachToProcessThenRecord(args.pid, args.module, args.function)
else:
result = lldbRecorder(args.executable).launchProcessThenRecord(leftoverArgs, args.module, args.function)
if result:
# Serialize the result if it is a CCT.
if isinstance(result, CCT):
result = result.asJson(2)
print result
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
# record.py - List a calling context tree.
import argparse
from cct import CCT
import json
from lldbRecorder import lldbRecorder
def main():
parser = argparse.ArgumentParser(description='Record a calling context tree.')
parser.add_argument('executable', help='Executable to run (any additional arguments are forwarded to this executable)')
parser.add_argument('-p', '--pid', help='Process id')
parser.add_argument('-m', '--module', help='Filter by module')
parser.add_argument('-f', '--function', help='Filter for calls made in a specific function')
args, leftoverArgs = parser.parse_known_args()
result = None
if (args.pid):
result = lldbRecorder(args.executable).attachToProcessThenRecord(args.pid, args.module, args.function)
else:
result = lldbRecorder(args.executable).launchProcessThenRecord(leftoverArgs, args.module, args.function)
if result:
# Serialize the result if it is a CCT.
if isinstance(result, CCT):
result = result.asJson(2)
print result
if __name__ == "__main__":
main()
<commit_msg>Add note about why lldbRecorder is used, and a FIXME to use kernel hooks in the future<commit_after>
|
#!/usr/bin/env python
# record.py - List a calling context tree.
#
# FIXME(phil): Switch to a kernel-level function tracing (dtrace, utrace/systemtap, etc.) over LLDB.
# Kernel hooks are difficult to use for reliably recording all function calls in complex codebases
# due to inlining, RVO, etc (see [1]). Relying on LLDB's complex source mapping logic is slow but
# fairly reliable, and is cross-platform.
# [1] https://github.com/progers/cctdb/blob/b08176b9f24c95a96ff6a22a6e63d176cc0916ae/dtrace.py
import argparse
from cct import CCT
import json
from lldbRecorder import lldbRecorder
def main():
parser = argparse.ArgumentParser(description='Record a calling context tree.')
parser.add_argument('executable', help='Executable to run (any additional arguments are forwarded to this executable)')
parser.add_argument('-p', '--pid', help='Process id')
parser.add_argument('-m', '--module', help='Filter by module')
parser.add_argument('-f', '--function', help='Filter for calls made in a specific function')
args, leftoverArgs = parser.parse_known_args()
result = None
if (args.pid):
result = lldbRecorder(args.executable).attachToProcessThenRecord(args.pid, args.module, args.function)
else:
result = lldbRecorder(args.executable).launchProcessThenRecord(leftoverArgs, args.module, args.function)
if result:
# Serialize the result if it is a CCT.
if isinstance(result, CCT):
result = result.asJson(2)
print result
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# record.py - List a calling context tree.
import argparse
from cct import CCT
import json
from lldbRecorder import lldbRecorder
def main():
parser = argparse.ArgumentParser(description='Record a calling context tree.')
parser.add_argument('executable', help='Executable to run (any additional arguments are forwarded to this executable)')
parser.add_argument('-p', '--pid', help='Process id')
parser.add_argument('-m', '--module', help='Filter by module')
parser.add_argument('-f', '--function', help='Filter for calls made in a specific function')
args, leftoverArgs = parser.parse_known_args()
result = None
if (args.pid):
result = lldbRecorder(args.executable).attachToProcessThenRecord(args.pid, args.module, args.function)
else:
result = lldbRecorder(args.executable).launchProcessThenRecord(leftoverArgs, args.module, args.function)
if result:
# Serialize the result if it is a CCT.
if isinstance(result, CCT):
result = result.asJson(2)
print result
if __name__ == "__main__":
main()
Add note about why lldbRecorder is used, and a FIXME to use kernel hooks in the future#!/usr/bin/env python
# record.py - List a calling context tree.
#
# FIXME(phil): Switch to a kernel-level function tracing (dtrace, utrace/systemtap, etc.) over LLDB.
# Kernel hooks are difficult to use for reliably recording all function calls in complex codebases
# due to inlining, RVO, etc (see [1]). Relying on LLDB's complex source mapping logic is slow but
# fairly reliable, and is cross-platform.
# [1] https://github.com/progers/cctdb/blob/b08176b9f24c95a96ff6a22a6e63d176cc0916ae/dtrace.py
import argparse
from cct import CCT
import json
from lldbRecorder import lldbRecorder
def main():
parser = argparse.ArgumentParser(description='Record a calling context tree.')
parser.add_argument('executable', help='Executable to run (any additional arguments are forwarded to this executable)')
parser.add_argument('-p', '--pid', help='Process id')
parser.add_argument('-m', '--module', help='Filter by module')
parser.add_argument('-f', '--function', help='Filter for calls made in a specific function')
args, leftoverArgs = parser.parse_known_args()
result = None
if (args.pid):
result = lldbRecorder(args.executable).attachToProcessThenRecord(args.pid, args.module, args.function)
else:
result = lldbRecorder(args.executable).launchProcessThenRecord(leftoverArgs, args.module, args.function)
if result:
# Serialize the result if it is a CCT.
if isinstance(result, CCT):
result = result.asJson(2)
print result
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
# record.py - List a calling context tree.
import argparse
from cct import CCT
import json
from lldbRecorder import lldbRecorder
def main():
parser = argparse.ArgumentParser(description='Record a calling context tree.')
parser.add_argument('executable', help='Executable to run (any additional arguments are forwarded to this executable)')
parser.add_argument('-p', '--pid', help='Process id')
parser.add_argument('-m', '--module', help='Filter by module')
parser.add_argument('-f', '--function', help='Filter for calls made in a specific function')
args, leftoverArgs = parser.parse_known_args()
result = None
if (args.pid):
result = lldbRecorder(args.executable).attachToProcessThenRecord(args.pid, args.module, args.function)
else:
result = lldbRecorder(args.executable).launchProcessThenRecord(leftoverArgs, args.module, args.function)
if result:
# Serialize the result if it is a CCT.
if isinstance(result, CCT):
result = result.asJson(2)
print result
if __name__ == "__main__":
main()
<commit_msg>Add note about why lldbRecorder is used, and a FIXME to use kernel hooks in the future<commit_after>#!/usr/bin/env python
# record.py - List a calling context tree.
#
# FIXME(phil): Switch to a kernel-level function tracing (dtrace, utrace/systemtap, etc.) over LLDB.
# Kernel hooks are difficult to use for reliably recording all function calls in complex codebases
# due to inlining, RVO, etc (see [1]). Relying on LLDB's complex source mapping logic is slow but
# fairly reliable, and is cross-platform.
# [1] https://github.com/progers/cctdb/blob/b08176b9f24c95a96ff6a22a6e63d176cc0916ae/dtrace.py
import argparse
from cct import CCT
import json
from lldbRecorder import lldbRecorder
def main():
parser = argparse.ArgumentParser(description='Record a calling context tree.')
parser.add_argument('executable', help='Executable to run (any additional arguments are forwarded to this executable)')
parser.add_argument('-p', '--pid', help='Process id')
parser.add_argument('-m', '--module', help='Filter by module')
parser.add_argument('-f', '--function', help='Filter for calls made in a specific function')
args, leftoverArgs = parser.parse_known_args()
result = None
if (args.pid):
result = lldbRecorder(args.executable).attachToProcessThenRecord(args.pid, args.module, args.function)
else:
result = lldbRecorder(args.executable).launchProcessThenRecord(leftoverArgs, args.module, args.function)
if result:
# Serialize the result if it is a CCT.
if isinstance(result, CCT):
result = result.asJson(2)
print result
if __name__ == "__main__":
main()
|
8b9124dc957b1ee1626dd227f6f709b8700dfdb8
|
OneDriveUploader/config.py
|
OneDriveUploader/config.py
|
from os.path import join, dirname
from dotenv import load_dotenv, find_dotenv
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(find_dotenv())
client = dict(id = '8dfcc6ca-304f-4351-a2c9-299e72eb8605')
urls = dict(redirect = 'http://localhost:8080',
discovery = 'https://api.office.com/discovery/',
auth_server = 'https://login.microsoftonline.com/common/oauth2/authorize',
auth_token = 'https://login.microsoftonline.com/common/oauth2/token')
|
from os.path import join, dirname
from dotenv import load_dotenv, find_dotenv
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(find_dotenv())
client = dict(id = '8dfcc6ca-304f-4351-a2c9-299e72eb8605')
urls = dict(redirect = 'http://localhost:8081',
discovery = 'https://api.office.com/discovery/',
auth_server = 'https://login.microsoftonline.com/common/oauth2/authorize',
auth_token = 'https://login.microsoftonline.com/common/oauth2/token')
|
Change local web server's URL
|
Change local web server's URL
|
Python
|
mit
|
SimeoneVilardo/OneDriveUploader
|
from os.path import join, dirname
from dotenv import load_dotenv, find_dotenv
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(find_dotenv())
client = dict(id = '8dfcc6ca-304f-4351-a2c9-299e72eb8605')
urls = dict(redirect = 'http://localhost:8080',
discovery = 'https://api.office.com/discovery/',
auth_server = 'https://login.microsoftonline.com/common/oauth2/authorize',
auth_token = 'https://login.microsoftonline.com/common/oauth2/token')Change local web server's URL
|
from os.path import join, dirname
from dotenv import load_dotenv, find_dotenv
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(find_dotenv())
client = dict(id = '8dfcc6ca-304f-4351-a2c9-299e72eb8605')
urls = dict(redirect = 'http://localhost:8081',
discovery = 'https://api.office.com/discovery/',
auth_server = 'https://login.microsoftonline.com/common/oauth2/authorize',
auth_token = 'https://login.microsoftonline.com/common/oauth2/token')
|
<commit_before>from os.path import join, dirname
from dotenv import load_dotenv, find_dotenv
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(find_dotenv())
client = dict(id = '8dfcc6ca-304f-4351-a2c9-299e72eb8605')
urls = dict(redirect = 'http://localhost:8080',
discovery = 'https://api.office.com/discovery/',
auth_server = 'https://login.microsoftonline.com/common/oauth2/authorize',
auth_token = 'https://login.microsoftonline.com/common/oauth2/token')<commit_msg>Change local web server's URL<commit_after>
|
from os.path import join, dirname
from dotenv import load_dotenv, find_dotenv
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(find_dotenv())
client = dict(id = '8dfcc6ca-304f-4351-a2c9-299e72eb8605')
urls = dict(redirect = 'http://localhost:8081',
discovery = 'https://api.office.com/discovery/',
auth_server = 'https://login.microsoftonline.com/common/oauth2/authorize',
auth_token = 'https://login.microsoftonline.com/common/oauth2/token')
|
from os.path import join, dirname
from dotenv import load_dotenv, find_dotenv
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(find_dotenv())
client = dict(id = '8dfcc6ca-304f-4351-a2c9-299e72eb8605')
urls = dict(redirect = 'http://localhost:8080',
discovery = 'https://api.office.com/discovery/',
auth_server = 'https://login.microsoftonline.com/common/oauth2/authorize',
auth_token = 'https://login.microsoftonline.com/common/oauth2/token')Change local web server's URLfrom os.path import join, dirname
from dotenv import load_dotenv, find_dotenv
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(find_dotenv())
client = dict(id = '8dfcc6ca-304f-4351-a2c9-299e72eb8605')
urls = dict(redirect = 'http://localhost:8081',
discovery = 'https://api.office.com/discovery/',
auth_server = 'https://login.microsoftonline.com/common/oauth2/authorize',
auth_token = 'https://login.microsoftonline.com/common/oauth2/token')
|
<commit_before>from os.path import join, dirname
from dotenv import load_dotenv, find_dotenv
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(find_dotenv())
client = dict(id = '8dfcc6ca-304f-4351-a2c9-299e72eb8605')
urls = dict(redirect = 'http://localhost:8080',
discovery = 'https://api.office.com/discovery/',
auth_server = 'https://login.microsoftonline.com/common/oauth2/authorize',
auth_token = 'https://login.microsoftonline.com/common/oauth2/token')<commit_msg>Change local web server's URL<commit_after>from os.path import join, dirname
from dotenv import load_dotenv, find_dotenv
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(find_dotenv())
client = dict(id = '8dfcc6ca-304f-4351-a2c9-299e72eb8605')
urls = dict(redirect = 'http://localhost:8081',
discovery = 'https://api.office.com/discovery/',
auth_server = 'https://login.microsoftonline.com/common/oauth2/authorize',
auth_token = 'https://login.microsoftonline.com/common/oauth2/token')
|
9e413449f6f85e0cf9465762e31e8f251e14c23e
|
spacy/tests/regression/test_issue1537.py
|
spacy/tests/regression/test_issue1537.py
|
'''Test Span.as_doc() doesn't segfault'''
from ...tokens import Doc
from ...vocab import Vocab
from ... import load as load_spacy
def test_issue1537():
string = 'The sky is blue . The man is pink . The dog is purple .'
doc = Doc(Vocab(), words=string.split())
doc[0].sent_start = True
for word in doc[1:]:
if word.nbor(-1).text == '.':
word.sent_start = True
else:
word.sent_start = False
sents = list(doc.sents)
sent0 = sents[0].as_doc()
sent1 = sents[1].as_doc()
assert isinstance(sent0, Doc)
assert isinstance(sent1, Doc)
# Currently segfaulting, due to l_edge and r_edge misalignment
#def test_issue1537_model():
# nlp = load_spacy('en')
# doc = nlp(u'The sky is blue. The man is pink. The dog is purple.')
# sents = [s.as_doc() for s in doc.sents]
# print(list(sents[0].noun_chunks))
# print(list(sents[1].noun_chunks))
|
'''Test Span.as_doc() doesn't segfault'''
from __future__ import unicode_literals
from ...tokens import Doc
from ...vocab import Vocab
from ... import load as load_spacy
def test_issue1537():
string = 'The sky is blue . The man is pink . The dog is purple .'
doc = Doc(Vocab(), words=string.split())
doc[0].sent_start = True
for word in doc[1:]:
if word.nbor(-1).text == '.':
word.sent_start = True
else:
word.sent_start = False
sents = list(doc.sents)
sent0 = sents[0].as_doc()
sent1 = sents[1].as_doc()
assert isinstance(sent0, Doc)
assert isinstance(sent1, Doc)
# Currently segfaulting, due to l_edge and r_edge misalignment
#def test_issue1537_model():
# nlp = load_spacy('en')
# doc = nlp(u'The sky is blue. The man is pink. The dog is purple.')
# sents = [s.as_doc() for s in doc.sents]
# print(list(sents[0].noun_chunks))
# print(list(sents[1].noun_chunks))
|
Fix unicode error in new test
|
Fix unicode error in new test
|
Python
|
mit
|
spacy-io/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy
|
'''Test Span.as_doc() doesn't segfault'''
from ...tokens import Doc
from ...vocab import Vocab
from ... import load as load_spacy
def test_issue1537():
string = 'The sky is blue . The man is pink . The dog is purple .'
doc = Doc(Vocab(), words=string.split())
doc[0].sent_start = True
for word in doc[1:]:
if word.nbor(-1).text == '.':
word.sent_start = True
else:
word.sent_start = False
sents = list(doc.sents)
sent0 = sents[0].as_doc()
sent1 = sents[1].as_doc()
assert isinstance(sent0, Doc)
assert isinstance(sent1, Doc)
# Currently segfaulting, due to l_edge and r_edge misalignment
#def test_issue1537_model():
# nlp = load_spacy('en')
# doc = nlp(u'The sky is blue. The man is pink. The dog is purple.')
# sents = [s.as_doc() for s in doc.sents]
# print(list(sents[0].noun_chunks))
# print(list(sents[1].noun_chunks))
Fix unicode error in new test
|
'''Test Span.as_doc() doesn't segfault'''
from __future__ import unicode_literals
from ...tokens import Doc
from ...vocab import Vocab
from ... import load as load_spacy
def test_issue1537():
string = 'The sky is blue . The man is pink . The dog is purple .'
doc = Doc(Vocab(), words=string.split())
doc[0].sent_start = True
for word in doc[1:]:
if word.nbor(-1).text == '.':
word.sent_start = True
else:
word.sent_start = False
sents = list(doc.sents)
sent0 = sents[0].as_doc()
sent1 = sents[1].as_doc()
assert isinstance(sent0, Doc)
assert isinstance(sent1, Doc)
# Currently segfaulting, due to l_edge and r_edge misalignment
#def test_issue1537_model():
# nlp = load_spacy('en')
# doc = nlp(u'The sky is blue. The man is pink. The dog is purple.')
# sents = [s.as_doc() for s in doc.sents]
# print(list(sents[0].noun_chunks))
# print(list(sents[1].noun_chunks))
|
<commit_before>'''Test Span.as_doc() doesn't segfault'''
from ...tokens import Doc
from ...vocab import Vocab
from ... import load as load_spacy
def test_issue1537():
string = 'The sky is blue . The man is pink . The dog is purple .'
doc = Doc(Vocab(), words=string.split())
doc[0].sent_start = True
for word in doc[1:]:
if word.nbor(-1).text == '.':
word.sent_start = True
else:
word.sent_start = False
sents = list(doc.sents)
sent0 = sents[0].as_doc()
sent1 = sents[1].as_doc()
assert isinstance(sent0, Doc)
assert isinstance(sent1, Doc)
# Currently segfaulting, due to l_edge and r_edge misalignment
#def test_issue1537_model():
# nlp = load_spacy('en')
# doc = nlp(u'The sky is blue. The man is pink. The dog is purple.')
# sents = [s.as_doc() for s in doc.sents]
# print(list(sents[0].noun_chunks))
# print(list(sents[1].noun_chunks))
<commit_msg>Fix unicode error in new test<commit_after>
|
'''Test Span.as_doc() doesn't segfault'''
from __future__ import unicode_literals
from ...tokens import Doc
from ...vocab import Vocab
from ... import load as load_spacy
def test_issue1537():
string = 'The sky is blue . The man is pink . The dog is purple .'
doc = Doc(Vocab(), words=string.split())
doc[0].sent_start = True
for word in doc[1:]:
if word.nbor(-1).text == '.':
word.sent_start = True
else:
word.sent_start = False
sents = list(doc.sents)
sent0 = sents[0].as_doc()
sent1 = sents[1].as_doc()
assert isinstance(sent0, Doc)
assert isinstance(sent1, Doc)
# Currently segfaulting, due to l_edge and r_edge misalignment
#def test_issue1537_model():
# nlp = load_spacy('en')
# doc = nlp(u'The sky is blue. The man is pink. The dog is purple.')
# sents = [s.as_doc() for s in doc.sents]
# print(list(sents[0].noun_chunks))
# print(list(sents[1].noun_chunks))
|
'''Test Span.as_doc() doesn't segfault'''
from ...tokens import Doc
from ...vocab import Vocab
from ... import load as load_spacy
def test_issue1537():
string = 'The sky is blue . The man is pink . The dog is purple .'
doc = Doc(Vocab(), words=string.split())
doc[0].sent_start = True
for word in doc[1:]:
if word.nbor(-1).text == '.':
word.sent_start = True
else:
word.sent_start = False
sents = list(doc.sents)
sent0 = sents[0].as_doc()
sent1 = sents[1].as_doc()
assert isinstance(sent0, Doc)
assert isinstance(sent1, Doc)
# Currently segfaulting, due to l_edge and r_edge misalignment
#def test_issue1537_model():
# nlp = load_spacy('en')
# doc = nlp(u'The sky is blue. The man is pink. The dog is purple.')
# sents = [s.as_doc() for s in doc.sents]
# print(list(sents[0].noun_chunks))
# print(list(sents[1].noun_chunks))
Fix unicode error in new test'''Test Span.as_doc() doesn't segfault'''
from __future__ import unicode_literals
from ...tokens import Doc
from ...vocab import Vocab
from ... import load as load_spacy
def test_issue1537():
string = 'The sky is blue . The man is pink . The dog is purple .'
doc = Doc(Vocab(), words=string.split())
doc[0].sent_start = True
for word in doc[1:]:
if word.nbor(-1).text == '.':
word.sent_start = True
else:
word.sent_start = False
sents = list(doc.sents)
sent0 = sents[0].as_doc()
sent1 = sents[1].as_doc()
assert isinstance(sent0, Doc)
assert isinstance(sent1, Doc)
# Currently segfaulting, due to l_edge and r_edge misalignment
#def test_issue1537_model():
# nlp = load_spacy('en')
# doc = nlp(u'The sky is blue. The man is pink. The dog is purple.')
# sents = [s.as_doc() for s in doc.sents]
# print(list(sents[0].noun_chunks))
# print(list(sents[1].noun_chunks))
|
<commit_before>'''Test Span.as_doc() doesn't segfault'''
from ...tokens import Doc
from ...vocab import Vocab
from ... import load as load_spacy
def test_issue1537():
string = 'The sky is blue . The man is pink . The dog is purple .'
doc = Doc(Vocab(), words=string.split())
doc[0].sent_start = True
for word in doc[1:]:
if word.nbor(-1).text == '.':
word.sent_start = True
else:
word.sent_start = False
sents = list(doc.sents)
sent0 = sents[0].as_doc()
sent1 = sents[1].as_doc()
assert isinstance(sent0, Doc)
assert isinstance(sent1, Doc)
# Currently segfaulting, due to l_edge and r_edge misalignment
#def test_issue1537_model():
# nlp = load_spacy('en')
# doc = nlp(u'The sky is blue. The man is pink. The dog is purple.')
# sents = [s.as_doc() for s in doc.sents]
# print(list(sents[0].noun_chunks))
# print(list(sents[1].noun_chunks))
<commit_msg>Fix unicode error in new test<commit_after>'''Test Span.as_doc() doesn't segfault'''
from __future__ import unicode_literals
from ...tokens import Doc
from ...vocab import Vocab
from ... import load as load_spacy
def test_issue1537():
string = 'The sky is blue . The man is pink . The dog is purple .'
doc = Doc(Vocab(), words=string.split())
doc[0].sent_start = True
for word in doc[1:]:
if word.nbor(-1).text == '.':
word.sent_start = True
else:
word.sent_start = False
sents = list(doc.sents)
sent0 = sents[0].as_doc()
sent1 = sents[1].as_doc()
assert isinstance(sent0, Doc)
assert isinstance(sent1, Doc)
# Currently segfaulting, due to l_edge and r_edge misalignment
#def test_issue1537_model():
# nlp = load_spacy('en')
# doc = nlp(u'The sky is blue. The man is pink. The dog is purple.')
# sents = [s.as_doc() for s in doc.sents]
# print(list(sents[0].noun_chunks))
# print(list(sents[1].noun_chunks))
|
df9dc6f613916cd96f626e2b337f8d9fe15bb864
|
tests/test_cayley_client.py
|
tests/test_cayley_client.py
|
from unittest import TestCase
from pyley import CayleyClient, GraphObject
class CayleyClientTests(TestCase):
def test_send(self):
client = CayleyClient()
g = GraphObject()
query = g.V().Has("name", "Casablanca") \
.Out("/film/film/starring") \
.Out("/film/performance/actor") \
.Out("name") \
.All()
response = client.Send(query)
print response.result
self.assertTrue(response.r.status_code == 200)
self.assertTrue(response.r is not None)
self.assertTrue(len(response.result) > 0)
|
from unittest import TestCase
import unittest
from pyley import CayleyClient, GraphObject
class CayleyClientTests(TestCase):
@unittest.skip('Disabled for now!')
def test_send(self):
client = CayleyClient()
g = GraphObject()
query = g.V().Has("name", "Casablanca") \
.Out("/film/film/starring") \
.Out("/film/performance/actor") \
.Out("name") \
.All()
response = client.Send(query)
print response.result
self.assertTrue(response.r.status_code == 200)
self.assertTrue(response.r is not None)
self.assertTrue(len(response.result) > 0)
|
Add skip attribute for CayleyClient send test.
|
Add skip attribute for CayleyClient send test.
|
Python
|
unlicense
|
ziyasal/pyley,ziyasal/pyley,joshainglis/pyley,joshainglis/pyley
|
from unittest import TestCase
from pyley import CayleyClient, GraphObject
class CayleyClientTests(TestCase):
def test_send(self):
client = CayleyClient()
g = GraphObject()
query = g.V().Has("name", "Casablanca") \
.Out("/film/film/starring") \
.Out("/film/performance/actor") \
.Out("name") \
.All()
response = client.Send(query)
print response.result
self.assertTrue(response.r.status_code == 200)
self.assertTrue(response.r is not None)
self.assertTrue(len(response.result) > 0)
Add skip attribute for CayleyClient send test.
|
from unittest import TestCase
import unittest
from pyley import CayleyClient, GraphObject
class CayleyClientTests(TestCase):
@unittest.skip('Disabled for now!')
def test_send(self):
client = CayleyClient()
g = GraphObject()
query = g.V().Has("name", "Casablanca") \
.Out("/film/film/starring") \
.Out("/film/performance/actor") \
.Out("name") \
.All()
response = client.Send(query)
print response.result
self.assertTrue(response.r.status_code == 200)
self.assertTrue(response.r is not None)
self.assertTrue(len(response.result) > 0)
|
<commit_before>from unittest import TestCase
from pyley import CayleyClient, GraphObject
class CayleyClientTests(TestCase):
def test_send(self):
client = CayleyClient()
g = GraphObject()
query = g.V().Has("name", "Casablanca") \
.Out("/film/film/starring") \
.Out("/film/performance/actor") \
.Out("name") \
.All()
response = client.Send(query)
print response.result
self.assertTrue(response.r.status_code == 200)
self.assertTrue(response.r is not None)
self.assertTrue(len(response.result) > 0)
<commit_msg>Add skip attribute for CayleyClient send test.<commit_after>
|
from unittest import TestCase
import unittest
from pyley import CayleyClient, GraphObject
class CayleyClientTests(TestCase):
@unittest.skip('Disabled for now!')
def test_send(self):
client = CayleyClient()
g = GraphObject()
query = g.V().Has("name", "Casablanca") \
.Out("/film/film/starring") \
.Out("/film/performance/actor") \
.Out("name") \
.All()
response = client.Send(query)
print response.result
self.assertTrue(response.r.status_code == 200)
self.assertTrue(response.r is not None)
self.assertTrue(len(response.result) > 0)
|
from unittest import TestCase
from pyley import CayleyClient, GraphObject
class CayleyClientTests(TestCase):
def test_send(self):
client = CayleyClient()
g = GraphObject()
query = g.V().Has("name", "Casablanca") \
.Out("/film/film/starring") \
.Out("/film/performance/actor") \
.Out("name") \
.All()
response = client.Send(query)
print response.result
self.assertTrue(response.r.status_code == 200)
self.assertTrue(response.r is not None)
self.assertTrue(len(response.result) > 0)
Add skip attribute for CayleyClient send test.from unittest import TestCase
import unittest
from pyley import CayleyClient, GraphObject
class CayleyClientTests(TestCase):
@unittest.skip('Disabled for now!')
def test_send(self):
client = CayleyClient()
g = GraphObject()
query = g.V().Has("name", "Casablanca") \
.Out("/film/film/starring") \
.Out("/film/performance/actor") \
.Out("name") \
.All()
response = client.Send(query)
print response.result
self.assertTrue(response.r.status_code == 200)
self.assertTrue(response.r is not None)
self.assertTrue(len(response.result) > 0)
|
<commit_before>from unittest import TestCase
from pyley import CayleyClient, GraphObject
class CayleyClientTests(TestCase):
def test_send(self):
client = CayleyClient()
g = GraphObject()
query = g.V().Has("name", "Casablanca") \
.Out("/film/film/starring") \
.Out("/film/performance/actor") \
.Out("name") \
.All()
response = client.Send(query)
print response.result
self.assertTrue(response.r.status_code == 200)
self.assertTrue(response.r is not None)
self.assertTrue(len(response.result) > 0)
<commit_msg>Add skip attribute for CayleyClient send test.<commit_after>from unittest import TestCase
import unittest
from pyley import CayleyClient, GraphObject
class CayleyClientTests(TestCase):
@unittest.skip('Disabled for now!')
def test_send(self):
client = CayleyClient()
g = GraphObject()
query = g.V().Has("name", "Casablanca") \
.Out("/film/film/starring") \
.Out("/film/performance/actor") \
.Out("name") \
.All()
response = client.Send(query)
print response.result
self.assertTrue(response.r.status_code == 200)
self.assertTrue(response.r is not None)
self.assertTrue(len(response.result) > 0)
|
0a9f2d46325ce6856a3979127390f2e48357abd9
|
schedule2stimuli.py
|
schedule2stimuli.py
|
#!/usr/bin/python
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli
a = 0
b = []
phase = ''
for session in range(1,36):
print "%s" % session
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
print ',' . join(map(str,b))
print str(a)
|
#!/usr/bin/python
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli and write csv
a = 0
b = []
phase = ''
csvfile = open('stimuli_' + str(p) + '.csv', 'wb')
writer = csv.writer(csvfile, delimiter=',')
for session in range(1,36):
writer.writerow([session])
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
writer.writerow(b)
writer.writerow([a])
|
Write stimuli schedule to csv file.
|
Write stimuli schedule to csv file.
|
Python
|
cc0-1.0
|
earcanal/dotprobe,earcanal/dotprobe,earcanal/dotprobe
|
#!/usr/bin/python
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli
a = 0
b = []
phase = ''
for session in range(1,36):
print "%s" % session
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
print ',' . join(map(str,b))
print str(a)
Write stimuli schedule to csv file.
|
#!/usr/bin/python
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli and write csv
a = 0
b = []
phase = ''
csvfile = open('stimuli_' + str(p) + '.csv', 'wb')
writer = csv.writer(csvfile, delimiter=',')
for session in range(1,36):
writer.writerow([session])
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
writer.writerow(b)
writer.writerow([a])
|
<commit_before>#!/usr/bin/python
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli
a = 0
b = []
phase = ''
for session in range(1,36):
print "%s" % session
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
print ',' . join(map(str,b))
print str(a)
<commit_msg>Write stimuli schedule to csv file.<commit_after>
|
#!/usr/bin/python
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli and write csv
a = 0
b = []
phase = ''
csvfile = open('stimuli_' + str(p) + '.csv', 'wb')
writer = csv.writer(csvfile, delimiter=',')
for session in range(1,36):
writer.writerow([session])
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
writer.writerow(b)
writer.writerow([a])
|
#!/usr/bin/python
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli
a = 0
b = []
phase = ''
for session in range(1,36):
print "%s" % session
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
print ',' . join(map(str,b))
print str(a)
Write stimuli schedule to csv file.#!/usr/bin/python
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli and write csv
a = 0
b = []
phase = ''
csvfile = open('stimuli_' + str(p) + '.csv', 'wb')
writer = csv.writer(csvfile, delimiter=',')
for session in range(1,36):
writer.writerow([session])
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
writer.writerow(b)
writer.writerow([a])
|
<commit_before>#!/usr/bin/python
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli
a = 0
b = []
phase = ''
for session in range(1,36):
print "%s" % session
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
print ',' . join(map(str,b))
print str(a)
<commit_msg>Write stimuli schedule to csv file.<commit_after>#!/usr/bin/python
import csv
import pprint
p = 0
# read schedule (from SCRT)
schedule_f = 'schedule_' + str(p)
inf = open(schedule_f,'r')
for line in inf.readlines():
line = line.rstrip()
schedule = line.split(' ')
inf.close()
# allocate stimuli and write csv
a = 0
b = []
phase = ''
csvfile = open('stimuli_' + str(p) + '.csv', 'wb')
writer = csv.writer(csvfile, delimiter=',')
for session in range(1,36):
writer.writerow([session])
blocks = ''
previous = phase
phase = schedule[session - 1]
if phase == 'B':
if phase != previous:
transition = session % 10
b = [transition]
repeat = 0
if repeat == 3:
b.append((b[-1] + 1) % 10)
repeat = 0
a = (b[-1] + 1) % 10
repeat += 1
else:
a = session % 10
writer.writerow(b)
writer.writerow([a])
|
0dfc3ab0537757bb5e4b5cc6918024c4ea75ed94
|
fs/archive/opener.py
|
fs/archive/opener.py
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
from pkg_resources import iter_entry_points
from ..opener import open_fs
from ..opener._errors import Unsupported
from ..path import basename
@contextlib.contextmanager
def open_archive(fs_url, archive):
it = iter_entry_points('fs.archive.open_archive')
entry_point = next((ep for ep in it if archive.endswith(ep.name)), None)
if entry_point is None:
raise Unsupported(
'unknown archive extension: {}'.format(archive))
archive_opener = entry_point.load()
# if not isinstance(archive_fs, base.ArchiveFS):
# raise TypeError('bad entry point')
try:
fs = open_fs(fs_url)
binfile = fs.openbin(archive, 'r+' if fs.isfile(archive) else 'w')
binfile.name = basename(archive)
archive_fs = archive_opener(binfile)
yield archive_fs
finally:
archive_fs.close()
binfile.close()
if fs is not fs_url: # close the fs if we opened it
fs.close()
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
from pkg_resources import iter_entry_points
from ..opener import open_fs
from ..opener._errors import Unsupported
from ..path import basename
@contextlib.contextmanager
def open_archive(fs_url, archive):
it = iter_entry_points('fs.archive.open_archive')
entry_point = next((ep for ep in it if archive.endswith(ep.name)), None)
if entry_point is None:
raise Unsupported(
'unknown archive extension: {}'.format(archive))
archive_opener = entry_point.load()
# if not isinstance(archive_fs, base.ArchiveFS):
# raise TypeError('bad entry point')
try:
fs = open_fs(fs_url)
binfile = fs.openbin(archive, 'r+' if fs.isfile(archive) else 'w')
if not hasattr(binfile, 'name'):
binfile.name = basename(archive)
archive_fs = archive_opener(binfile)
yield archive_fs
finally:
archive_fs.close()
binfile.close()
if fs is not fs_url: # close the fs if we opened it
fs.close()
|
Patch binfile name only when needed in open_archive
|
Patch binfile name only when needed in open_archive
|
Python
|
mit
|
althonos/fs.archive
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
from pkg_resources import iter_entry_points
from ..opener import open_fs
from ..opener._errors import Unsupported
from ..path import basename
@contextlib.contextmanager
def open_archive(fs_url, archive):
it = iter_entry_points('fs.archive.open_archive')
entry_point = next((ep for ep in it if archive.endswith(ep.name)), None)
if entry_point is None:
raise Unsupported(
'unknown archive extension: {}'.format(archive))
archive_opener = entry_point.load()
# if not isinstance(archive_fs, base.ArchiveFS):
# raise TypeError('bad entry point')
try:
fs = open_fs(fs_url)
binfile = fs.openbin(archive, 'r+' if fs.isfile(archive) else 'w')
binfile.name = basename(archive)
archive_fs = archive_opener(binfile)
yield archive_fs
finally:
archive_fs.close()
binfile.close()
if fs is not fs_url: # close the fs if we opened it
fs.close()
Patch binfile name only when needed in open_archive
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
from pkg_resources import iter_entry_points
from ..opener import open_fs
from ..opener._errors import Unsupported
from ..path import basename
@contextlib.contextmanager
def open_archive(fs_url, archive):
it = iter_entry_points('fs.archive.open_archive')
entry_point = next((ep for ep in it if archive.endswith(ep.name)), None)
if entry_point is None:
raise Unsupported(
'unknown archive extension: {}'.format(archive))
archive_opener = entry_point.load()
# if not isinstance(archive_fs, base.ArchiveFS):
# raise TypeError('bad entry point')
try:
fs = open_fs(fs_url)
binfile = fs.openbin(archive, 'r+' if fs.isfile(archive) else 'w')
if not hasattr(binfile, 'name'):
binfile.name = basename(archive)
archive_fs = archive_opener(binfile)
yield archive_fs
finally:
archive_fs.close()
binfile.close()
if fs is not fs_url: # close the fs if we opened it
fs.close()
|
<commit_before># coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
from pkg_resources import iter_entry_points
from ..opener import open_fs
from ..opener._errors import Unsupported
from ..path import basename
@contextlib.contextmanager
def open_archive(fs_url, archive):
it = iter_entry_points('fs.archive.open_archive')
entry_point = next((ep for ep in it if archive.endswith(ep.name)), None)
if entry_point is None:
raise Unsupported(
'unknown archive extension: {}'.format(archive))
archive_opener = entry_point.load()
# if not isinstance(archive_fs, base.ArchiveFS):
# raise TypeError('bad entry point')
try:
fs = open_fs(fs_url)
binfile = fs.openbin(archive, 'r+' if fs.isfile(archive) else 'w')
binfile.name = basename(archive)
archive_fs = archive_opener(binfile)
yield archive_fs
finally:
archive_fs.close()
binfile.close()
if fs is not fs_url: # close the fs if we opened it
fs.close()
<commit_msg>Patch binfile name only when needed in open_archive<commit_after>
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
from pkg_resources import iter_entry_points
from ..opener import open_fs
from ..opener._errors import Unsupported
from ..path import basename
@contextlib.contextmanager
def open_archive(fs_url, archive):
it = iter_entry_points('fs.archive.open_archive')
entry_point = next((ep for ep in it if archive.endswith(ep.name)), None)
if entry_point is None:
raise Unsupported(
'unknown archive extension: {}'.format(archive))
archive_opener = entry_point.load()
# if not isinstance(archive_fs, base.ArchiveFS):
# raise TypeError('bad entry point')
try:
fs = open_fs(fs_url)
binfile = fs.openbin(archive, 'r+' if fs.isfile(archive) else 'w')
if not hasattr(binfile, 'name'):
binfile.name = basename(archive)
archive_fs = archive_opener(binfile)
yield archive_fs
finally:
archive_fs.close()
binfile.close()
if fs is not fs_url: # close the fs if we opened it
fs.close()
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
from pkg_resources import iter_entry_points
from ..opener import open_fs
from ..opener._errors import Unsupported
from ..path import basename
@contextlib.contextmanager
def open_archive(fs_url, archive):
it = iter_entry_points('fs.archive.open_archive')
entry_point = next((ep for ep in it if archive.endswith(ep.name)), None)
if entry_point is None:
raise Unsupported(
'unknown archive extension: {}'.format(archive))
archive_opener = entry_point.load()
# if not isinstance(archive_fs, base.ArchiveFS):
# raise TypeError('bad entry point')
try:
fs = open_fs(fs_url)
binfile = fs.openbin(archive, 'r+' if fs.isfile(archive) else 'w')
binfile.name = basename(archive)
archive_fs = archive_opener(binfile)
yield archive_fs
finally:
archive_fs.close()
binfile.close()
if fs is not fs_url: # close the fs if we opened it
fs.close()
Patch binfile name only when needed in open_archive# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
from pkg_resources import iter_entry_points
from ..opener import open_fs
from ..opener._errors import Unsupported
from ..path import basename
@contextlib.contextmanager
def open_archive(fs_url, archive):
it = iter_entry_points('fs.archive.open_archive')
entry_point = next((ep for ep in it if archive.endswith(ep.name)), None)
if entry_point is None:
raise Unsupported(
'unknown archive extension: {}'.format(archive))
archive_opener = entry_point.load()
# if not isinstance(archive_fs, base.ArchiveFS):
# raise TypeError('bad entry point')
try:
fs = open_fs(fs_url)
binfile = fs.openbin(archive, 'r+' if fs.isfile(archive) else 'w')
if not hasattr(binfile, 'name'):
binfile.name = basename(archive)
archive_fs = archive_opener(binfile)
yield archive_fs
finally:
archive_fs.close()
binfile.close()
if fs is not fs_url: # close the fs if we opened it
fs.close()
|
<commit_before># coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
from pkg_resources import iter_entry_points
from ..opener import open_fs
from ..opener._errors import Unsupported
from ..path import basename
@contextlib.contextmanager
def open_archive(fs_url, archive):
it = iter_entry_points('fs.archive.open_archive')
entry_point = next((ep for ep in it if archive.endswith(ep.name)), None)
if entry_point is None:
raise Unsupported(
'unknown archive extension: {}'.format(archive))
archive_opener = entry_point.load()
# if not isinstance(archive_fs, base.ArchiveFS):
# raise TypeError('bad entry point')
try:
fs = open_fs(fs_url)
binfile = fs.openbin(archive, 'r+' if fs.isfile(archive) else 'w')
binfile.name = basename(archive)
archive_fs = archive_opener(binfile)
yield archive_fs
finally:
archive_fs.close()
binfile.close()
if fs is not fs_url: # close the fs if we opened it
fs.close()
<commit_msg>Patch binfile name only when needed in open_archive<commit_after># coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
from pkg_resources import iter_entry_points
from ..opener import open_fs
from ..opener._errors import Unsupported
from ..path import basename
@contextlib.contextmanager
def open_archive(fs_url, archive):
it = iter_entry_points('fs.archive.open_archive')
entry_point = next((ep for ep in it if archive.endswith(ep.name)), None)
if entry_point is None:
raise Unsupported(
'unknown archive extension: {}'.format(archive))
archive_opener = entry_point.load()
# if not isinstance(archive_fs, base.ArchiveFS):
# raise TypeError('bad entry point')
try:
fs = open_fs(fs_url)
binfile = fs.openbin(archive, 'r+' if fs.isfile(archive) else 'w')
if not hasattr(binfile, 'name'):
binfile.name = basename(archive)
archive_fs = archive_opener(binfile)
yield archive_fs
finally:
archive_fs.close()
binfile.close()
if fs is not fs_url: # close the fs if we opened it
fs.close()
|
f061499b9d415b7471edf072c81b93ce5453494d
|
githubtrending/utils.py
|
githubtrending/utils.py
|
import os
def get_console_size():
'''
returns no of rows, no of cols
'''
return map(int, os.popen('stty size', 'r').read().split())
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
|
import os
def get_console_size():
'''
returns no of rows, no of cols
'''
with os.popen('stty size', 'r') as f:
size = map(int, f.read().split())
return size
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
|
Refactor get_console_size to close file after reading
|
Utils: Refactor get_console_size to close file after reading
|
Python
|
mit
|
staranjeet/github-trending-cli
|
import os
def get_console_size():
'''
returns no of rows, no of cols
'''
return map(int, os.popen('stty size', 'r').read().split())
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
Utils: Refactor get_console_size to close file after reading
|
import os
def get_console_size():
'''
returns no of rows, no of cols
'''
with os.popen('stty size', 'r') as f:
size = map(int, f.read().split())
return size
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
|
<commit_before>import os
def get_console_size():
'''
returns no of rows, no of cols
'''
return map(int, os.popen('stty size', 'r').read().split())
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
<commit_msg>Utils: Refactor get_console_size to close file after reading<commit_after>
|
import os
def get_console_size():
'''
returns no of rows, no of cols
'''
with os.popen('stty size', 'r') as f:
size = map(int, f.read().split())
return size
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
|
import os
def get_console_size():
'''
returns no of rows, no of cols
'''
return map(int, os.popen('stty size', 'r').read().split())
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
Utils: Refactor get_console_size to close file after readingimport os
def get_console_size():
'''
returns no of rows, no of cols
'''
with os.popen('stty size', 'r') as f:
size = map(int, f.read().split())
return size
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
|
<commit_before>import os
def get_console_size():
'''
returns no of rows, no of cols
'''
return map(int, os.popen('stty size', 'r').read().split())
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
<commit_msg>Utils: Refactor get_console_size to close file after reading<commit_after>import os
def get_console_size():
'''
returns no of rows, no of cols
'''
with os.popen('stty size', 'r') as f:
size = map(int, f.read().split())
return size
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
|
237b66c8b9cef714b64a75b1f20a79a4357c71b5
|
apps/continiousauth/serializers.py
|
apps/continiousauth/serializers.py
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag', 'start_time', 'end_time')
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
|
Change serializer to omit dates
|
Change serializer to omit dates
|
Python
|
mit
|
larserikgk/mobiauth-server,larserikgk/mobiauth-server,larserikgk/mobiauth-server
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag', 'start_time', 'end_time')
Change serializer to omit dates
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
|
<commit_before>from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag', 'start_time', 'end_time')
<commit_msg>Change serializer to omit dates<commit_after>
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag', 'start_time', 'end_time')
Change serializer to omit datesfrom rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
|
<commit_before>from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag', 'start_time', 'end_time')
<commit_msg>Change serializer to omit dates<commit_after>from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
|
aec33e5eaf40deed73c580a170714810229678fc
|
treetojson.py
|
treetojson.py
|
# !/usr/bin/env python
# coding: utf-8
"""
Converts a list or a list with a given regex grammar which contains a tree structure into a valid JSON.
This module works with both Python 2 and 3.
"""
__version__ = '0.1'
version = __version__
import logging
from nltk.chunk.regexp import *
LOG = logging.getLogger("treetojson")
def set_debug(debug=True, filename='dicttoxml.log'):
if debug:
import datetime
print('Debug mode is on. Events are logged at: %s' % (filename))
logging.basicConfig(filename=filename, level=logging.INFO)
LOG.info('\nLogging session starts: %s' % (
str(datetime.datetime.today()))
)
else:
logging.basicConfig(level=logging.WARNING)
print('Debug mode is off.')
def depict_tree(data, grammar=None):
if grammar:
parser = RegexpParser(grammar)
else:
parser = RegexpParser('''
''')
tree = parser.parse(data)
|
# !/usr/bin/env python
# coding: utf-8
"""
Converts a list or a list with a given regex grammar which contains a tree structure into a valid JSON.
This module works with both Python 2 and 3.
"""
__version__ = '0.1'
version = __version__
import logging
from nltk.chunk.regexp import *
LOG = logging.getLogger("treetojson")
def set_debug(debug=True, filename='dicttoxml.log'):
if debug:
import datetime
print('Debug mode is on. Events are logged at: %s' % (filename))
logging.basicConfig(filename=filename, level=logging.INFO)
LOG.info('\nLogging session starts: %s' % (
str(datetime.datetime.today()))
)
else:
logging.basicConfig(level=logging.WARNING)
print('Debug mode is off.')
def depict_tree(data, grammar=None):
"""Converts the provided list into a tree structure
Arguments:
- data contains a list which should look like,
[('I', 'NN'), ('am', 'NN'), ('a', 'NN'), ('good', 'VB'), ('boy', 'NN')]
- grammar is optional, it accepts NLTK regexp grammar.
"""
LOG.info('Inside depict_tree()')
if grammar:
parser = RegexpParser(grammar)
else:
parser = RegexpParser('''
''')
return parser.parse(data)
def get_json(data, grammar=None):
"""Provides a JSON output for a given list
Arguments:
- data contains a list which should look like,
[('I', 'NN'), ('am', 'NN'), ('a', 'NN'), ('good', 'VB'), ('boy', 'NN')]
- grammar is optional, it accepts NLTK regexp grammar.
"""
LOG.info('Inside get_json()')
tree = depict_tree(data, grammar=grammar)
|
Add get_json method to provide json output
|
Add get_json method to provide json output
|
Python
|
mit
|
saadsahibjan/treetojson
|
# !/usr/bin/env python
# coding: utf-8
"""
Converts a list or a list with a given regex grammar which contains a tree structure into a valid JSON.
This module works with both Python 2 and 3.
"""
__version__ = '0.1'
version = __version__
import logging
from nltk.chunk.regexp import *
LOG = logging.getLogger("treetojson")
def set_debug(debug=True, filename='dicttoxml.log'):
if debug:
import datetime
print('Debug mode is on. Events are logged at: %s' % (filename))
logging.basicConfig(filename=filename, level=logging.INFO)
LOG.info('\nLogging session starts: %s' % (
str(datetime.datetime.today()))
)
else:
logging.basicConfig(level=logging.WARNING)
print('Debug mode is off.')
def depict_tree(data, grammar=None):
if grammar:
parser = RegexpParser(grammar)
else:
parser = RegexpParser('''
''')
tree = parser.parse(data)
Add get_json method to provide json output
|
# !/usr/bin/env python
# coding: utf-8
"""
Converts a list or a list with a given regex grammar which contains a tree structure into a valid JSON.
This module works with both Python 2 and 3.
"""
__version__ = '0.1'
version = __version__
import logging
from nltk.chunk.regexp import *
LOG = logging.getLogger("treetojson")
def set_debug(debug=True, filename='dicttoxml.log'):
if debug:
import datetime
print('Debug mode is on. Events are logged at: %s' % (filename))
logging.basicConfig(filename=filename, level=logging.INFO)
LOG.info('\nLogging session starts: %s' % (
str(datetime.datetime.today()))
)
else:
logging.basicConfig(level=logging.WARNING)
print('Debug mode is off.')
def depict_tree(data, grammar=None):
"""Converts the provided list into a tree structure
Arguments:
- data contains a list which should look like,
[('I', 'NN'), ('am', 'NN'), ('a', 'NN'), ('good', 'VB'), ('boy', 'NN')]
- grammar is optional, it accepts NLTK regexp grammar.
"""
LOG.info('Inside depict_tree()')
if grammar:
parser = RegexpParser(grammar)
else:
parser = RegexpParser('''
''')
return parser.parse(data)
def get_json(data, grammar=None):
"""Provides a JSON output for a given list
Arguments:
- data contains a list which should look like,
[('I', 'NN'), ('am', 'NN'), ('a', 'NN'), ('good', 'VB'), ('boy', 'NN')]
- grammar is optional, it accepts NLTK regexp grammar.
"""
LOG.info('Inside get_json()')
tree = depict_tree(data, grammar=grammar)
|
<commit_before># !/usr/bin/env python
# coding: utf-8
"""
Converts a list or a list with a given regex grammar which contains a tree structure into a valid JSON.
This module works with both Python 2 and 3.
"""
__version__ = '0.1'
version = __version__
import logging
from nltk.chunk.regexp import *
LOG = logging.getLogger("treetojson")
def set_debug(debug=True, filename='dicttoxml.log'):
if debug:
import datetime
print('Debug mode is on. Events are logged at: %s' % (filename))
logging.basicConfig(filename=filename, level=logging.INFO)
LOG.info('\nLogging session starts: %s' % (
str(datetime.datetime.today()))
)
else:
logging.basicConfig(level=logging.WARNING)
print('Debug mode is off.')
def depict_tree(data, grammar=None):
if grammar:
parser = RegexpParser(grammar)
else:
parser = RegexpParser('''
''')
tree = parser.parse(data)
<commit_msg>Add get_json method to provide json output<commit_after>
|
# !/usr/bin/env python
# coding: utf-8
"""
Converts a list or a list with a given regex grammar which contains a tree structure into a valid JSON.
This module works with both Python 2 and 3.
"""
__version__ = '0.1'
version = __version__
import logging
from nltk.chunk.regexp import *
LOG = logging.getLogger("treetojson")
def set_debug(debug=True, filename='dicttoxml.log'):
if debug:
import datetime
print('Debug mode is on. Events are logged at: %s' % (filename))
logging.basicConfig(filename=filename, level=logging.INFO)
LOG.info('\nLogging session starts: %s' % (
str(datetime.datetime.today()))
)
else:
logging.basicConfig(level=logging.WARNING)
print('Debug mode is off.')
def depict_tree(data, grammar=None):
"""Converts the provided list into a tree structure
Arguments:
- data contains a list which should look like,
[('I', 'NN'), ('am', 'NN'), ('a', 'NN'), ('good', 'VB'), ('boy', 'NN')]
- grammar is optional, it accepts NLTK regexp grammar.
"""
LOG.info('Inside depict_tree()')
if grammar:
parser = RegexpParser(grammar)
else:
parser = RegexpParser('''
''')
return parser.parse(data)
def get_json(data, grammar=None):
"""Provides a JSON output for a given list
Arguments:
- data contains a list which should look like,
[('I', 'NN'), ('am', 'NN'), ('a', 'NN'), ('good', 'VB'), ('boy', 'NN')]
- grammar is optional, it accepts NLTK regexp grammar.
"""
LOG.info('Inside get_json()')
tree = depict_tree(data, grammar=grammar)
|
# !/usr/bin/env python
# coding: utf-8
"""
Converts a list or a list with a given regex grammar which contains a tree structure into a valid JSON.
This module works with both Python 2 and 3.
"""
__version__ = '0.1'
version = __version__
import logging
from nltk.chunk.regexp import *
LOG = logging.getLogger("treetojson")
def set_debug(debug=True, filename='dicttoxml.log'):
if debug:
import datetime
print('Debug mode is on. Events are logged at: %s' % (filename))
logging.basicConfig(filename=filename, level=logging.INFO)
LOG.info('\nLogging session starts: %s' % (
str(datetime.datetime.today()))
)
else:
logging.basicConfig(level=logging.WARNING)
print('Debug mode is off.')
def depict_tree(data, grammar=None):
if grammar:
parser = RegexpParser(grammar)
else:
parser = RegexpParser('''
''')
tree = parser.parse(data)
Add get_json method to provide json output# !/usr/bin/env python
# coding: utf-8
"""
Converts a list or a list with a given regex grammar which contains a tree structure into a valid JSON.
This module works with both Python 2 and 3.
"""
__version__ = '0.1'
version = __version__
import logging
from nltk.chunk.regexp import *
LOG = logging.getLogger("treetojson")
def set_debug(debug=True, filename='dicttoxml.log'):
if debug:
import datetime
print('Debug mode is on. Events are logged at: %s' % (filename))
logging.basicConfig(filename=filename, level=logging.INFO)
LOG.info('\nLogging session starts: %s' % (
str(datetime.datetime.today()))
)
else:
logging.basicConfig(level=logging.WARNING)
print('Debug mode is off.')
def depict_tree(data, grammar=None):
"""Converts the provided list into a tree structure
Arguments:
- data contains a list which should look like,
[('I', 'NN'), ('am', 'NN'), ('a', 'NN'), ('good', 'VB'), ('boy', 'NN')]
- grammar is optional, it accepts NLTK regexp grammar.
"""
LOG.info('Inside depict_tree()')
if grammar:
parser = RegexpParser(grammar)
else:
parser = RegexpParser('''
''')
return parser.parse(data)
def get_json(data, grammar=None):
"""Provides a JSON output for a given list
Arguments:
- data contains a list which should look like,
[('I', 'NN'), ('am', 'NN'), ('a', 'NN'), ('good', 'VB'), ('boy', 'NN')]
- grammar is optional, it accepts NLTK regexp grammar.
"""
LOG.info('Inside get_json()')
tree = depict_tree(data, grammar=grammar)
|
<commit_before># !/usr/bin/env python
# coding: utf-8
"""
Converts a list or a list with a given regex grammar which contains a tree structure into a valid JSON.
This module works with both Python 2 and 3.
"""
__version__ = '0.1'
version = __version__
import logging
from nltk.chunk.regexp import *
LOG = logging.getLogger("treetojson")
def set_debug(debug=True, filename='dicttoxml.log'):
if debug:
import datetime
print('Debug mode is on. Events are logged at: %s' % (filename))
logging.basicConfig(filename=filename, level=logging.INFO)
LOG.info('\nLogging session starts: %s' % (
str(datetime.datetime.today()))
)
else:
logging.basicConfig(level=logging.WARNING)
print('Debug mode is off.')
def depict_tree(data, grammar=None):
if grammar:
parser = RegexpParser(grammar)
else:
parser = RegexpParser('''
''')
tree = parser.parse(data)
<commit_msg>Add get_json method to provide json output<commit_after># !/usr/bin/env python
# coding: utf-8
"""
Converts a list or a list with a given regex grammar which contains a tree structure into a valid JSON.
This module works with both Python 2 and 3.
"""
__version__ = '0.1'
version = __version__
import logging
from nltk.chunk.regexp import *
LOG = logging.getLogger("treetojson")
def set_debug(debug=True, filename='dicttoxml.log'):
if debug:
import datetime
print('Debug mode is on. Events are logged at: %s' % (filename))
logging.basicConfig(filename=filename, level=logging.INFO)
LOG.info('\nLogging session starts: %s' % (
str(datetime.datetime.today()))
)
else:
logging.basicConfig(level=logging.WARNING)
print('Debug mode is off.')
def depict_tree(data, grammar=None):
"""Converts the provided list into a tree structure
Arguments:
- data contains a list which should look like,
[('I', 'NN'), ('am', 'NN'), ('a', 'NN'), ('good', 'VB'), ('boy', 'NN')]
- grammar is optional, it accepts NLTK regexp grammar.
"""
LOG.info('Inside depict_tree()')
if grammar:
parser = RegexpParser(grammar)
else:
parser = RegexpParser('''
''')
return parser.parse(data)
def get_json(data, grammar=None):
"""Provides a JSON output for a given list
Arguments:
- data contains a list which should look like,
[('I', 'NN'), ('am', 'NN'), ('a', 'NN'), ('good', 'VB'), ('boy', 'NN')]
- grammar is optional, it accepts NLTK regexp grammar.
"""
LOG.info('Inside get_json()')
tree = depict_tree(data, grammar=grammar)
|
dbb668c3f72ab6d20abe08f9f23b7d66cfa0d8c3
|
ideascube/blog/forms.py
|
ideascube/blog/forms.py
|
from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
|
from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
use_required_attribute = False
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
|
Fix the blog content form
|
Fix the blog content form
The form plays a trick on the 'text' field: it hides it, and creates a
nicer-looking 'content' field. When submitting the form, the content of
the 'content' field is injected into the 'text' field.
This works great, until Django 1.10.
With Django 1.10, the browser refuses to submit the form, saying the
'text' field is empty and required.
This is due to this hidden gem from the Django 1.10 release notes:
> * Required form fields now have the required HTML attribute. Set the
> Form.use_required_attribute attribute to False to disable it. You
> could also add the novalidate attribute to <form> if you don’t want
> browser validation.
https://docs.djangoproject.com/en/1.10/releases/1.10/#miscellaneous
This commit fixes the blog content form, which is the only one I found
problematic so far.
Hopefully it is the only one that is actually impacted. :-/
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
Fix the blog content form
The form plays a trick on the 'text' field: it hides it, and creates a
nicer-looking 'content' field. When submitting the form, the content of
the 'content' field is injected into the 'text' field.
This works great, until Django 1.10.
With Django 1.10, the browser refuses to submit the form, saying the
'text' field is empty and required.
This is due to this hidden gem from the Django 1.10 release notes:
> * Required form fields now have the required HTML attribute. Set the
> Form.use_required_attribute attribute to False to disable it. You
> could also add the novalidate attribute to <form> if you don’t want
> browser validation.
https://docs.djangoproject.com/en/1.10/releases/1.10/#miscellaneous
This commit fixes the blog content form, which is the only one I found
problematic so far.
Hopefully it is the only one that is actually impacted. :-/
|
from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
use_required_attribute = False
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
|
<commit_before>from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
<commit_msg>Fix the blog content form
The form plays a trick on the 'text' field: it hides it, and creates a
nicer-looking 'content' field. When submitting the form, the content of
the 'content' field is injected into the 'text' field.
This works great, until Django 1.10.
With Django 1.10, the browser refuses to submit the form, saying the
'text' field is empty and required.
This is due to this hidden gem from the Django 1.10 release notes:
> * Required form fields now have the required HTML attribute. Set the
> Form.use_required_attribute attribute to False to disable it. You
> could also add the novalidate attribute to <form> if you don’t want
> browser validation.
https://docs.djangoproject.com/en/1.10/releases/1.10/#miscellaneous
This commit fixes the blog content form, which is the only one I found
problematic so far.
Hopefully it is the only one that is actually impacted. :-/<commit_after>
|
from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
use_required_attribute = False
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
|
from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
Fix the blog content form
The form plays a trick on the 'text' field: it hides it, and creates a
nicer-looking 'content' field. When submitting the form, the content of
the 'content' field is injected into the 'text' field.
This works great, until Django 1.10.
With Django 1.10, the browser refuses to submit the form, saying the
'text' field is empty and required.
This is due to this hidden gem from the Django 1.10 release notes:
> * Required form fields now have the required HTML attribute. Set the
> Form.use_required_attribute attribute to False to disable it. You
> could also add the novalidate attribute to <form> if you don’t want
> browser validation.
https://docs.djangoproject.com/en/1.10/releases/1.10/#miscellaneous
This commit fixes the blog content form, which is the only one I found
problematic so far.
Hopefully it is the only one that is actually impacted. :-/from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
use_required_attribute = False
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
|
<commit_before>from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
<commit_msg>Fix the blog content form
The form plays a trick on the 'text' field: it hides it, and creates a
nicer-looking 'content' field. When submitting the form, the content of
the 'content' field is injected into the 'text' field.
This works great, until Django 1.10.
With Django 1.10, the browser refuses to submit the form, saying the
'text' field is empty and required.
This is due to this hidden gem from the Django 1.10 release notes:
> * Required form fields now have the required HTML attribute. Set the
> Form.use_required_attribute attribute to False to disable it. You
> could also add the novalidate attribute to <form> if you don’t want
> browser validation.
https://docs.djangoproject.com/en/1.10/releases/1.10/#miscellaneous
This commit fixes the blog content form, which is the only one I found
problematic so far.
Hopefully it is the only one that is actually impacted. :-/<commit_after>from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
use_required_attribute = False
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
|
4b80c061073857cffebcb23cf2e597919318e1ba
|
db/__init__.py
|
db/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import session_scope
def commit_db_item(db_item, add=False):
with session_scope() as session:
if add:
session.add(db_item)
else:
session.merge(db_item)
session.commit()
def create_or_update_db_item(db_item, new_item):
"""
Updates an existing or creates a new database item.
"""
with session_scope() as session:
# if database item exists
if db_item is not None:
# returning if database item is unchanged
if db_item == new_item:
return
# updating database item otherwise
else:
db_item.update(new_item)
session.merge(db_item)
# creating database item otherwise
else:
session.add(new_item)
session.commit()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from .common import session_scope
logger = logging.getLogger()
def commit_db_item(db_item, add=False):
with session_scope() as session:
if add:
session.add(db_item)
else:
session.merge(db_item)
session.commit()
def create_or_update_db_item(db_item, new_item):
"""
Updates an existing or creates a new database item.
"""
with session_scope() as session:
# if database item exists
if db_item is not None:
# returning if database item is unchanged
if db_item == new_item:
return
# updating database item otherwise
else:
db_item.update(new_item)
session.merge(db_item)
# creating database item otherwise
else:
session.add(new_item)
session.commit()
def create_or_update_db_item_alternate(db_item, new_item):
"""
Creates or updates a database item.
"""
cls_name = new_item.__class__.HUMAN_READABLE
with session_scope() as session:
if db_item is not None:
if db_item != new_item:
logger.debug("\t+ Updating %s item" % cls_name)
db_item.update(new_item)
return_item = session.merge(db_item)
else:
return_item = db_item
else:
logger.debug("\t+ Adding %s item" % cls_name)
session.add(new_item)
return_item = new_item
session.commit()
return return_item
|
Add alternate function to create or update database item
|
Add alternate function to create or update database item
|
Python
|
mit
|
leaffan/pynhldb
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import session_scope
def commit_db_item(db_item, add=False):
with session_scope() as session:
if add:
session.add(db_item)
else:
session.merge(db_item)
session.commit()
def create_or_update_db_item(db_item, new_item):
"""
Updates an existing or creates a new database item.
"""
with session_scope() as session:
# if database item exists
if db_item is not None:
# returning if database item is unchanged
if db_item == new_item:
return
# updating database item otherwise
else:
db_item.update(new_item)
session.merge(db_item)
# creating database item otherwise
else:
session.add(new_item)
session.commit()
Add alternate function to create or update database item
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from .common import session_scope
logger = logging.getLogger()
def commit_db_item(db_item, add=False):
with session_scope() as session:
if add:
session.add(db_item)
else:
session.merge(db_item)
session.commit()
def create_or_update_db_item(db_item, new_item):
"""
Updates an existing or creates a new database item.
"""
with session_scope() as session:
# if database item exists
if db_item is not None:
# returning if database item is unchanged
if db_item == new_item:
return
# updating database item otherwise
else:
db_item.update(new_item)
session.merge(db_item)
# creating database item otherwise
else:
session.add(new_item)
session.commit()
def create_or_update_db_item_alternate(db_item, new_item):
"""
Creates or updates a database item.
"""
cls_name = new_item.__class__.HUMAN_READABLE
with session_scope() as session:
if db_item is not None:
if db_item != new_item:
logger.debug("\t+ Updating %s item" % cls_name)
db_item.update(new_item)
return_item = session.merge(db_item)
else:
return_item = db_item
else:
logger.debug("\t+ Adding %s item" % cls_name)
session.add(new_item)
return_item = new_item
session.commit()
return return_item
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import session_scope
def commit_db_item(db_item, add=False):
with session_scope() as session:
if add:
session.add(db_item)
else:
session.merge(db_item)
session.commit()
def create_or_update_db_item(db_item, new_item):
"""
Updates an existing or creates a new database item.
"""
with session_scope() as session:
# if database item exists
if db_item is not None:
# returning if database item is unchanged
if db_item == new_item:
return
# updating database item otherwise
else:
db_item.update(new_item)
session.merge(db_item)
# creating database item otherwise
else:
session.add(new_item)
session.commit()
<commit_msg>Add alternate function to create or update database item<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from .common import session_scope
logger = logging.getLogger()
def commit_db_item(db_item, add=False):
with session_scope() as session:
if add:
session.add(db_item)
else:
session.merge(db_item)
session.commit()
def create_or_update_db_item(db_item, new_item):
"""
Updates an existing or creates a new database item.
"""
with session_scope() as session:
# if database item exists
if db_item is not None:
# returning if database item is unchanged
if db_item == new_item:
return
# updating database item otherwise
else:
db_item.update(new_item)
session.merge(db_item)
# creating database item otherwise
else:
session.add(new_item)
session.commit()
def create_or_update_db_item_alternate(db_item, new_item):
"""
Creates or updates a database item.
"""
cls_name = new_item.__class__.HUMAN_READABLE
with session_scope() as session:
if db_item is not None:
if db_item != new_item:
logger.debug("\t+ Updating %s item" % cls_name)
db_item.update(new_item)
return_item = session.merge(db_item)
else:
return_item = db_item
else:
logger.debug("\t+ Adding %s item" % cls_name)
session.add(new_item)
return_item = new_item
session.commit()
return return_item
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import session_scope
def commit_db_item(db_item, add=False):
with session_scope() as session:
if add:
session.add(db_item)
else:
session.merge(db_item)
session.commit()
def create_or_update_db_item(db_item, new_item):
"""
Updates an existing or creates a new database item.
"""
with session_scope() as session:
# if database item exists
if db_item is not None:
# returning if database item is unchanged
if db_item == new_item:
return
# updating database item otherwise
else:
db_item.update(new_item)
session.merge(db_item)
# creating database item otherwise
else:
session.add(new_item)
session.commit()
Add alternate function to create or update database item#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from .common import session_scope
logger = logging.getLogger()
def commit_db_item(db_item, add=False):
with session_scope() as session:
if add:
session.add(db_item)
else:
session.merge(db_item)
session.commit()
def create_or_update_db_item(db_item, new_item):
"""
Updates an existing or creates a new database item.
"""
with session_scope() as session:
# if database item exists
if db_item is not None:
# returning if database item is unchanged
if db_item == new_item:
return
# updating database item otherwise
else:
db_item.update(new_item)
session.merge(db_item)
# creating database item otherwise
else:
session.add(new_item)
session.commit()
def create_or_update_db_item_alternate(db_item, new_item):
"""
Creates or updates a database item.
"""
cls_name = new_item.__class__.HUMAN_READABLE
with session_scope() as session:
if db_item is not None:
if db_item != new_item:
logger.debug("\t+ Updating %s item" % cls_name)
db_item.update(new_item)
return_item = session.merge(db_item)
else:
return_item = db_item
else:
logger.debug("\t+ Adding %s item" % cls_name)
session.add(new_item)
return_item = new_item
session.commit()
return return_item
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import session_scope
def commit_db_item(db_item, add=False):
with session_scope() as session:
if add:
session.add(db_item)
else:
session.merge(db_item)
session.commit()
def create_or_update_db_item(db_item, new_item):
"""
Updates an existing or creates a new database item.
"""
with session_scope() as session:
# if database item exists
if db_item is not None:
# returning if database item is unchanged
if db_item == new_item:
return
# updating database item otherwise
else:
db_item.update(new_item)
session.merge(db_item)
# creating database item otherwise
else:
session.add(new_item)
session.commit()
<commit_msg>Add alternate function to create or update database item<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from .common import session_scope
logger = logging.getLogger()
def commit_db_item(db_item, add=False):
with session_scope() as session:
if add:
session.add(db_item)
else:
session.merge(db_item)
session.commit()
def create_or_update_db_item(db_item, new_item):
"""
Updates an existing or creates a new database item.
"""
with session_scope() as session:
# if database item exists
if db_item is not None:
# returning if database item is unchanged
if db_item == new_item:
return
# updating database item otherwise
else:
db_item.update(new_item)
session.merge(db_item)
# creating database item otherwise
else:
session.add(new_item)
session.commit()
def create_or_update_db_item_alternate(db_item, new_item):
"""
Creates or updates a database item.
"""
cls_name = new_item.__class__.HUMAN_READABLE
with session_scope() as session:
if db_item is not None:
if db_item != new_item:
logger.debug("\t+ Updating %s item" % cls_name)
db_item.update(new_item)
return_item = session.merge(db_item)
else:
return_item = db_item
else:
logger.debug("\t+ Adding %s item" % cls_name)
session.add(new_item)
return_item = new_item
session.commit()
return return_item
|
2d0901eb60302750cd42007241d4e0f6010bea7c
|
pypeerassets/provider/rpcnode.py
|
pypeerassets/provider/rpcnode.py
|
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"scriptSig": tx["scriptPubKey"],
"amount": cls.gettransaction(tx["txid"])["amount"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return utxo
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
class RpcNode(Client):
select_inputs = select_inputs
@property
def is_testnet(self):
'''check if node is configured to use testnet or mainnet'''
if self.getinfo()["testnet"] is True:
return True
else:
return False
|
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"scriptSig": tx["scriptPubKey"],
"amount": tx["amount"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return {'utxos':utxo, 'total':utxo_sum}
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
class RpcNode(Client):
select_inputs = select_inputs
@property
def is_testnet(self):
'''check if node is configured to use testnet or mainnet'''
if self.getinfo()["testnet"] is True:
return True
else:
return False
|
Return dict with selected utxo list and total
|
Return dict with selected utxo list and total
|
Python
|
bsd-3-clause
|
backpacker69/pypeerassets,PeerAssets/pypeerassets
|
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"scriptSig": tx["scriptPubKey"],
"amount": cls.gettransaction(tx["txid"])["amount"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return utxo
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
class RpcNode(Client):
select_inputs = select_inputs
@property
def is_testnet(self):
'''check if node is configured to use testnet or mainnet'''
if self.getinfo()["testnet"] is True:
return True
else:
return False
Return dict with selected utxo list and total
|
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"scriptSig": tx["scriptPubKey"],
"amount": tx["amount"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return {'utxos':utxo, 'total':utxo_sum}
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
class RpcNode(Client):
select_inputs = select_inputs
@property
def is_testnet(self):
'''check if node is configured to use testnet or mainnet'''
if self.getinfo()["testnet"] is True:
return True
else:
return False
|
<commit_before>
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"scriptSig": tx["scriptPubKey"],
"amount": cls.gettransaction(tx["txid"])["amount"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return utxo
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
class RpcNode(Client):
select_inputs = select_inputs
@property
def is_testnet(self):
'''check if node is configured to use testnet or mainnet'''
if self.getinfo()["testnet"] is True:
return True
else:
return False
<commit_msg>Return dict with selected utxo list and total<commit_after>
|
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"scriptSig": tx["scriptPubKey"],
"amount": tx["amount"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return {'utxos':utxo, 'total':utxo_sum}
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
class RpcNode(Client):
select_inputs = select_inputs
@property
def is_testnet(self):
'''check if node is configured to use testnet or mainnet'''
if self.getinfo()["testnet"] is True:
return True
else:
return False
|
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"scriptSig": tx["scriptPubKey"],
"amount": cls.gettransaction(tx["txid"])["amount"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return utxo
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
class RpcNode(Client):
select_inputs = select_inputs
@property
def is_testnet(self):
'''check if node is configured to use testnet or mainnet'''
if self.getinfo()["testnet"] is True:
return True
else:
return False
Return dict with selected utxo list and total
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"scriptSig": tx["scriptPubKey"],
"amount": tx["amount"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return {'utxos':utxo, 'total':utxo_sum}
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
class RpcNode(Client):
select_inputs = select_inputs
@property
def is_testnet(self):
'''check if node is configured to use testnet or mainnet'''
if self.getinfo()["testnet"] is True:
return True
else:
return False
|
<commit_before>
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"scriptSig": tx["scriptPubKey"],
"amount": cls.gettransaction(tx["txid"])["amount"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return utxo
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
class RpcNode(Client):
select_inputs = select_inputs
@property
def is_testnet(self):
'''check if node is configured to use testnet or mainnet'''
if self.getinfo()["testnet"] is True:
return True
else:
return False
<commit_msg>Return dict with selected utxo list and total<commit_after>
'''Communicate with local or remote peercoin-daemon via JSON-RPC'''
from operator import itemgetter
try:
from peercoin_rpc import Client
except:
raise EnvironmentError("peercoin_rpc library is required for this to work,\
use pip to install it.")
def select_inputs(cls, total_amount):
'''finds apropriate utxo's to include in rawtx, while being careful
to never spend old transactions with a lot of coin age.
Argument is intiger, returns list of apropriate UTXO's'''
utxo = []
utxo_sum = float(-0.01) ## starts from negative due to minimal fee
for tx in sorted(cls.listunspent(), key=itemgetter('confirmations')):
utxo.append({
"txid": tx["txid"],
"vout": tx["vout"],
"scriptSig": tx["scriptPubKey"],
"amount": tx["amount"]
})
utxo_sum += float(tx["amount"])
if utxo_sum >= total_amount:
return {'utxos':utxo, 'total':utxo_sum}
if utxo_sum < total_amount:
raise ValueError("Not enough funds.")
class RpcNode(Client):
select_inputs = select_inputs
@property
def is_testnet(self):
'''check if node is configured to use testnet or mainnet'''
if self.getinfo()["testnet"] is True:
return True
else:
return False
|
c3792ccdde5a44979f34d84cebad722c7a64ab64
|
juliet_importer.py
|
juliet_importer.py
|
import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive sorting at some point in the future
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
name = name.split('.')[0]
try:
new_module = imp.load_source(name, path)
modules[name] = new_module
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
|
import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future
modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py")
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
try:
modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name)
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
|
Change juliet_module to load before other modules for dependency reasons
|
Change juliet_module to load before other modules for dependency reasons
|
Python
|
bsd-2-clause
|
halfbro/juliet
|
import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive sorting at some point in the future
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
name = name.split('.')[0]
try:
new_module = imp.load_source(name, path)
modules[name] = new_module
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
Change juliet_module to load before other modules for dependency reasons
|
import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future
modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py")
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
try:
modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name)
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
|
<commit_before>import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive sorting at some point in the future
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
name = name.split('.')[0]
try:
new_module = imp.load_source(name, path)
modules[name] = new_module
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
<commit_msg>Change juliet_module to load before other modules for dependency reasons<commit_after>
|
import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future
modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py")
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
try:
modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name)
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
|
import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive sorting at some point in the future
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
name = name.split('.')[0]
try:
new_module = imp.load_source(name, path)
modules[name] = new_module
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
Change juliet_module to load before other modules for dependency reasonsimport os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future
modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py")
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
try:
modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name)
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
|
<commit_before>import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive sorting at some point in the future
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
name = name.split('.')[0]
try:
new_module = imp.load_source(name, path)
modules[name] = new_module
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
<commit_msg>Change juliet_module to load before other modules for dependency reasons<commit_after>import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future
modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py")
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
try:
modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name)
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
|
33ceea40e41d9f568b11e30779b8b7c16ba8f5b8
|
bench/split-file.py
|
bench/split-file.py
|
"""
Split out a monolithic file with many different runs of
indexed_search.py. The resulting files are meant for use in
get-figures.py.
Usage: python split-file.py prefix filename
"""
import sys
prefix = sys.argv[1]
filename = sys.argv[2]
f = open(filename)
sf = None
for line in f:
if line.startswith('Processing database:'):
if sf:
sf.close()
line2 = line.split(':')[1]
# Check if entry is compressed and if has to be processed
line2 = line2[:line2.rfind('.')]
params = line2.split('-')
optlevel = 0
complib = None
for param in params:
if param[0] == 'O' and param[1].isdigit():
optlevel = int(param[1])
elif param[:-1] in ('zlib', 'lzo'):
complib = param
if complib:
sfilename = "%s-O%s-%s.out" % (prefix, optlevel, complib)
else:
sfilename = "%s-O%s.out" % (prefix, optlevel,)
sf = file(sfilename, 'a')
sf.write(line)
f.close()
|
"""
Split out a monolithic file with many different runs of
indexed_search.py. The resulting files are meant for use in
get-figures.py.
Usage: python split-file.py prefix filename
"""
import sys
prefix = sys.argv[1]
filename = sys.argv[2]
f = open(filename)
sf = None
for line in f:
if line.startswith('Processing database:'):
if sf:
sf.close()
line2 = line.split(':')[1]
# Check if entry is compressed and if has to be processed
line2 = line2[:line2.rfind('.')]
params = line2.split('-')
optlevel = 0
complib = None
for param in params:
if param[0] == 'O' and param[1].isdigit():
optlevel = int(param[1])
elif param[:-1] in ('zlib', 'lzo'):
complib = param
if 'PyTables' in prefix:
if complib:
sfilename = "%s-O%s-%s.out" % (prefix, optlevel, complib)
else:
sfilename = "%s-O%s.out" % (prefix, optlevel,)
else:
sfilename = "%s.out" % (prefix,)
sf = file(sfilename, 'a')
if sf:
sf.write(line)
f.close()
|
Support for splitting outputs for PyTables and Postgres indexing benchmarks all in one.
|
Support for splitting outputs for PyTables and Postgres indexing
benchmarks all in one.
git-svn-id: 92c705c98a17f0f7623a131b3c42ed50fcde59b4@2885 1b98710c-d8ec-0310-ae81-f5f2bcd8cb94
|
Python
|
bsd-3-clause
|
jennolsen84/PyTables,rabernat/PyTables,avalentino/PyTables,jack-pappas/PyTables,rdhyee/PyTables,gdementen/PyTables,joonro/PyTables,PyTables/PyTables,mohamed-ali/PyTables,andreabedini/PyTables,tp199911/PyTables,jennolsen84/PyTables,tp199911/PyTables,dotsdl/PyTables,cpcloud/PyTables,tp199911/PyTables,FrancescAlted/PyTables,PyTables/PyTables,dotsdl/PyTables,cpcloud/PyTables,rabernat/PyTables,rabernat/PyTables,andreabedini/PyTables,mohamed-ali/PyTables,gdementen/PyTables,jack-pappas/PyTables,jack-pappas/PyTables,rdhyee/PyTables,mohamed-ali/PyTables,FrancescAlted/PyTables,rdhyee/PyTables,PyTables/PyTables,cpcloud/PyTables,avalentino/PyTables,avalentino/PyTables,rdhyee/PyTables,gdementen/PyTables,rabernat/PyTables,jennolsen84/PyTables,jennolsen84/PyTables,tp199911/PyTables,andreabedini/PyTables,dotsdl/PyTables,mohamed-ali/PyTables,joonro/PyTables,joonro/PyTables,andreabedini/PyTables,jack-pappas/PyTables,dotsdl/PyTables,jack-pappas/PyTables,gdementen/PyTables,cpcloud/PyTables,FrancescAlted/PyTables,joonro/PyTables
|
"""
Split out a monolithic file with many different runs of
indexed_search.py. The resulting files are meant for use in
get-figures.py.
Usage: python split-file.py prefix filename
"""
import sys
prefix = sys.argv[1]
filename = sys.argv[2]
f = open(filename)
sf = None
for line in f:
if line.startswith('Processing database:'):
if sf:
sf.close()
line2 = line.split(':')[1]
# Check if entry is compressed and if has to be processed
line2 = line2[:line2.rfind('.')]
params = line2.split('-')
optlevel = 0
complib = None
for param in params:
if param[0] == 'O' and param[1].isdigit():
optlevel = int(param[1])
elif param[:-1] in ('zlib', 'lzo'):
complib = param
if complib:
sfilename = "%s-O%s-%s.out" % (prefix, optlevel, complib)
else:
sfilename = "%s-O%s.out" % (prefix, optlevel,)
sf = file(sfilename, 'a')
sf.write(line)
f.close()
Support for splitting outputs for PyTables and Postgres indexing
benchmarks all in one.
git-svn-id: 92c705c98a17f0f7623a131b3c42ed50fcde59b4@2885 1b98710c-d8ec-0310-ae81-f5f2bcd8cb94
|
"""
Split out a monolithic file with many different runs of
indexed_search.py. The resulting files are meant for use in
get-figures.py.
Usage: python split-file.py prefix filename
"""
import sys
prefix = sys.argv[1]
filename = sys.argv[2]
f = open(filename)
sf = None
for line in f:
if line.startswith('Processing database:'):
if sf:
sf.close()
line2 = line.split(':')[1]
# Check if entry is compressed and if has to be processed
line2 = line2[:line2.rfind('.')]
params = line2.split('-')
optlevel = 0
complib = None
for param in params:
if param[0] == 'O' and param[1].isdigit():
optlevel = int(param[1])
elif param[:-1] in ('zlib', 'lzo'):
complib = param
if 'PyTables' in prefix:
if complib:
sfilename = "%s-O%s-%s.out" % (prefix, optlevel, complib)
else:
sfilename = "%s-O%s.out" % (prefix, optlevel,)
else:
sfilename = "%s.out" % (prefix,)
sf = file(sfilename, 'a')
if sf:
sf.write(line)
f.close()
|
<commit_before>"""
Split out a monolithic file with many different runs of
indexed_search.py. The resulting files are meant for use in
get-figures.py.
Usage: python split-file.py prefix filename
"""
import sys
prefix = sys.argv[1]
filename = sys.argv[2]
f = open(filename)
sf = None
for line in f:
if line.startswith('Processing database:'):
if sf:
sf.close()
line2 = line.split(':')[1]
# Check if entry is compressed and if has to be processed
line2 = line2[:line2.rfind('.')]
params = line2.split('-')
optlevel = 0
complib = None
for param in params:
if param[0] == 'O' and param[1].isdigit():
optlevel = int(param[1])
elif param[:-1] in ('zlib', 'lzo'):
complib = param
if complib:
sfilename = "%s-O%s-%s.out" % (prefix, optlevel, complib)
else:
sfilename = "%s-O%s.out" % (prefix, optlevel,)
sf = file(sfilename, 'a')
sf.write(line)
f.close()
<commit_msg>Support for splitting outputs for PyTables and Postgres indexing
benchmarks all in one.
git-svn-id: 92c705c98a17f0f7623a131b3c42ed50fcde59b4@2885 1b98710c-d8ec-0310-ae81-f5f2bcd8cb94<commit_after>
|
"""
Split out a monolithic file with many different runs of
indexed_search.py. The resulting files are meant for use in
get-figures.py.
Usage: python split-file.py prefix filename
"""
import sys
prefix = sys.argv[1]
filename = sys.argv[2]
f = open(filename)
sf = None
for line in f:
if line.startswith('Processing database:'):
if sf:
sf.close()
line2 = line.split(':')[1]
# Check if entry is compressed and if has to be processed
line2 = line2[:line2.rfind('.')]
params = line2.split('-')
optlevel = 0
complib = None
for param in params:
if param[0] == 'O' and param[1].isdigit():
optlevel = int(param[1])
elif param[:-1] in ('zlib', 'lzo'):
complib = param
if 'PyTables' in prefix:
if complib:
sfilename = "%s-O%s-%s.out" % (prefix, optlevel, complib)
else:
sfilename = "%s-O%s.out" % (prefix, optlevel,)
else:
sfilename = "%s.out" % (prefix,)
sf = file(sfilename, 'a')
if sf:
sf.write(line)
f.close()
|
"""
Split out a monolithic file with many different runs of
indexed_search.py. The resulting files are meant for use in
get-figures.py.
Usage: python split-file.py prefix filename
"""
import sys
prefix = sys.argv[1]
filename = sys.argv[2]
f = open(filename)
sf = None
for line in f:
if line.startswith('Processing database:'):
if sf:
sf.close()
line2 = line.split(':')[1]
# Check if entry is compressed and if has to be processed
line2 = line2[:line2.rfind('.')]
params = line2.split('-')
optlevel = 0
complib = None
for param in params:
if param[0] == 'O' and param[1].isdigit():
optlevel = int(param[1])
elif param[:-1] in ('zlib', 'lzo'):
complib = param
if complib:
sfilename = "%s-O%s-%s.out" % (prefix, optlevel, complib)
else:
sfilename = "%s-O%s.out" % (prefix, optlevel,)
sf = file(sfilename, 'a')
sf.write(line)
f.close()
Support for splitting outputs for PyTables and Postgres indexing
benchmarks all in one.
git-svn-id: 92c705c98a17f0f7623a131b3c42ed50fcde59b4@2885 1b98710c-d8ec-0310-ae81-f5f2bcd8cb94"""
Split out a monolithic file with many different runs of
indexed_search.py. The resulting files are meant for use in
get-figures.py.
Usage: python split-file.py prefix filename
"""
import sys
prefix = sys.argv[1]
filename = sys.argv[2]
f = open(filename)
sf = None
for line in f:
if line.startswith('Processing database:'):
if sf:
sf.close()
line2 = line.split(':')[1]
# Check if entry is compressed and if has to be processed
line2 = line2[:line2.rfind('.')]
params = line2.split('-')
optlevel = 0
complib = None
for param in params:
if param[0] == 'O' and param[1].isdigit():
optlevel = int(param[1])
elif param[:-1] in ('zlib', 'lzo'):
complib = param
if 'PyTables' in prefix:
if complib:
sfilename = "%s-O%s-%s.out" % (prefix, optlevel, complib)
else:
sfilename = "%s-O%s.out" % (prefix, optlevel,)
else:
sfilename = "%s.out" % (prefix,)
sf = file(sfilename, 'a')
if sf:
sf.write(line)
f.close()
|
<commit_before>"""
Split out a monolithic file with many different runs of
indexed_search.py. The resulting files are meant for use in
get-figures.py.
Usage: python split-file.py prefix filename
"""
import sys
prefix = sys.argv[1]
filename = sys.argv[2]
f = open(filename)
sf = None
for line in f:
if line.startswith('Processing database:'):
if sf:
sf.close()
line2 = line.split(':')[1]
# Check if entry is compressed and if has to be processed
line2 = line2[:line2.rfind('.')]
params = line2.split('-')
optlevel = 0
complib = None
for param in params:
if param[0] == 'O' and param[1].isdigit():
optlevel = int(param[1])
elif param[:-1] in ('zlib', 'lzo'):
complib = param
if complib:
sfilename = "%s-O%s-%s.out" % (prefix, optlevel, complib)
else:
sfilename = "%s-O%s.out" % (prefix, optlevel,)
sf = file(sfilename, 'a')
sf.write(line)
f.close()
<commit_msg>Support for splitting outputs for PyTables and Postgres indexing
benchmarks all in one.
git-svn-id: 92c705c98a17f0f7623a131b3c42ed50fcde59b4@2885 1b98710c-d8ec-0310-ae81-f5f2bcd8cb94<commit_after>"""
Split out a monolithic file with many different runs of
indexed_search.py. The resulting files are meant for use in
get-figures.py.
Usage: python split-file.py prefix filename
"""
import sys
prefix = sys.argv[1]
filename = sys.argv[2]
f = open(filename)
sf = None
for line in f:
if line.startswith('Processing database:'):
if sf:
sf.close()
line2 = line.split(':')[1]
# Check if entry is compressed and if has to be processed
line2 = line2[:line2.rfind('.')]
params = line2.split('-')
optlevel = 0
complib = None
for param in params:
if param[0] == 'O' and param[1].isdigit():
optlevel = int(param[1])
elif param[:-1] in ('zlib', 'lzo'):
complib = param
if 'PyTables' in prefix:
if complib:
sfilename = "%s-O%s-%s.out" % (prefix, optlevel, complib)
else:
sfilename = "%s-O%s.out" % (prefix, optlevel,)
else:
sfilename = "%s.out" % (prefix,)
sf = file(sfilename, 'a')
if sf:
sf.write(line)
f.close()
|
ba026f431ca7196a489dd1157af0c58972fe2356
|
localore/people/wagtail_hooks.py
|
localore/people/wagtail_hooks.py
|
from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = (
'first_name',
'last_name',
'role',
'biography',
'production__title',
)
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
|
from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = (
'first_name',
'last_name',
'role',
'biography',
'production__title',
)
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
if not obj.photo:
return
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
|
Fix people list breaking due to deleted photo.
|
Fix people list breaking due to deleted photo.
|
Python
|
mpl-2.0
|
ghostwords/localore,ghostwords/localore,ghostwords/localore
|
from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = (
'first_name',
'last_name',
'role',
'biography',
'production__title',
)
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
Fix people list breaking due to deleted photo.
|
from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = (
'first_name',
'last_name',
'role',
'biography',
'production__title',
)
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
if not obj.photo:
return
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
|
<commit_before>from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = (
'first_name',
'last_name',
'role',
'biography',
'production__title',
)
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
<commit_msg>Fix people list breaking due to deleted photo.<commit_after>
|
from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = (
'first_name',
'last_name',
'role',
'biography',
'production__title',
)
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
if not obj.photo:
return
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
|
from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = (
'first_name',
'last_name',
'role',
'biography',
'production__title',
)
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
Fix people list breaking due to deleted photo.from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = (
'first_name',
'last_name',
'role',
'biography',
'production__title',
)
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
if not obj.photo:
return
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
|
<commit_before>from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = (
'first_name',
'last_name',
'role',
'biography',
'production__title',
)
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
<commit_msg>Fix people list breaking due to deleted photo.<commit_after>from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
search_fields = (
'first_name',
'last_name',
'role',
'biography',
'production__title',
)
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
if not obj.photo:
return
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
|
a8b4409dd2261edea536f3e8080b90a770eccf70
|
mediacloud/mediawords/tm/mine.py
|
mediacloud/mediawords/tm/mine.py
|
from typing import List
from mediawords.db.handler import DatabaseHandler
from mediawords.util.log import create_logger
from mediawords.util.perl import decode_object_from_bytes_if_needed
l = create_logger(__name__)
class McPostgresRegexMatch(Exception):
"""postgres_regex_match() exception."""
pass
def postgres_regex_match(db: DatabaseHandler, strings: List[str], regex: str) -> bool:
"""Run the regex through the PostgreSQL engine against a given list of strings.
Return True if any string matches the given regex.
This is necessary because very occasionally the wrong combination of text and complex boolean regex will cause Perl
(Python too?) to hang."""
strings = decode_object_from_bytes_if_needed(strings)
regex = decode_object_from_bytes_if_needed(regex)
if not isinstance(strings, list):
raise McPostgresRegexMatch("Strings must be a list, but is: %s" % str(strings))
if len(strings) == 0:
return False
if not isinstance(strings[0], str):
raise McPostgresRegexMatch("Strings must be a list of strings, but is: %s" % str(strings))
full_regex = '(?isx)%s' % regex
match = db.query("""
SELECT 1
FROM UNNEST(%(strings)s) AS string
WHERE string ~ %(regex)s
""", {
'strings': strings, # list gets converted to PostgreSQL's ARRAY[]
'regex': full_regex,
}).hash()
if match is not None:
return True
else:
return False
|
from typing import List
from mediawords.db.handler import DatabaseHandler
from mediawords.util.log import create_logger
from mediawords.util.perl import decode_object_from_bytes_if_needed
l = create_logger(__name__)
class McPostgresRegexMatch(Exception):
"""postgres_regex_match() exception."""
pass
def postgres_regex_match(db: DatabaseHandler, strings: List[str], regex: str) -> bool:
"""Run the regex through the PostgreSQL engine against a given list of strings.
Return True if any string matches the given regex.
This is necessary because very occasionally the wrong combination of text and complex boolean regex will cause Perl
(Python too?) to hang."""
strings = decode_object_from_bytes_if_needed(strings)
regex = decode_object_from_bytes_if_needed(regex)
if not isinstance(strings, list):
raise McPostgresRegexMatch("Strings must be a list, but is: %s" % str(strings))
if len(strings) == 0:
return False
if not isinstance(strings[0], str):
raise McPostgresRegexMatch("Strings must be a list of strings, but is: %s" % str(strings))
full_regex = '(?isx)%s' % regex
match = db.query("""
SELECT 1
FROM UNNEST(%(strings)s) AS string
WHERE string ~ %(regex)s
LIMIT 1
""", {
'strings': strings, # list gets converted to PostgreSQL's ARRAY[]
'regex': full_regex,
}).hash()
if match is not None:
return True
else:
return False
|
Add LIMIT 1 to speed up query
|
Add LIMIT 1 to speed up query
|
Python
|
agpl-3.0
|
berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud
|
from typing import List
from mediawords.db.handler import DatabaseHandler
from mediawords.util.log import create_logger
from mediawords.util.perl import decode_object_from_bytes_if_needed
l = create_logger(__name__)
class McPostgresRegexMatch(Exception):
"""postgres_regex_match() exception."""
pass
def postgres_regex_match(db: DatabaseHandler, strings: List[str], regex: str) -> bool:
"""Run the regex through the PostgreSQL engine against a given list of strings.
Return True if any string matches the given regex.
This is necessary because very occasionally the wrong combination of text and complex boolean regex will cause Perl
(Python too?) to hang."""
strings = decode_object_from_bytes_if_needed(strings)
regex = decode_object_from_bytes_if_needed(regex)
if not isinstance(strings, list):
raise McPostgresRegexMatch("Strings must be a list, but is: %s" % str(strings))
if len(strings) == 0:
return False
if not isinstance(strings[0], str):
raise McPostgresRegexMatch("Strings must be a list of strings, but is: %s" % str(strings))
full_regex = '(?isx)%s' % regex
match = db.query("""
SELECT 1
FROM UNNEST(%(strings)s) AS string
WHERE string ~ %(regex)s
""", {
'strings': strings, # list gets converted to PostgreSQL's ARRAY[]
'regex': full_regex,
}).hash()
if match is not None:
return True
else:
return False
Add LIMIT 1 to speed up query
|
from typing import List
from mediawords.db.handler import DatabaseHandler
from mediawords.util.log import create_logger
from mediawords.util.perl import decode_object_from_bytes_if_needed
l = create_logger(__name__)
class McPostgresRegexMatch(Exception):
"""postgres_regex_match() exception."""
pass
def postgres_regex_match(db: DatabaseHandler, strings: List[str], regex: str) -> bool:
"""Run the regex through the PostgreSQL engine against a given list of strings.
Return True if any string matches the given regex.
This is necessary because very occasionally the wrong combination of text and complex boolean regex will cause Perl
(Python too?) to hang."""
strings = decode_object_from_bytes_if_needed(strings)
regex = decode_object_from_bytes_if_needed(regex)
if not isinstance(strings, list):
raise McPostgresRegexMatch("Strings must be a list, but is: %s" % str(strings))
if len(strings) == 0:
return False
if not isinstance(strings[0], str):
raise McPostgresRegexMatch("Strings must be a list of strings, but is: %s" % str(strings))
full_regex = '(?isx)%s' % regex
match = db.query("""
SELECT 1
FROM UNNEST(%(strings)s) AS string
WHERE string ~ %(regex)s
LIMIT 1
""", {
'strings': strings, # list gets converted to PostgreSQL's ARRAY[]
'regex': full_regex,
}).hash()
if match is not None:
return True
else:
return False
|
<commit_before>from typing import List
from mediawords.db.handler import DatabaseHandler
from mediawords.util.log import create_logger
from mediawords.util.perl import decode_object_from_bytes_if_needed
l = create_logger(__name__)
class McPostgresRegexMatch(Exception):
"""postgres_regex_match() exception."""
pass
def postgres_regex_match(db: DatabaseHandler, strings: List[str], regex: str) -> bool:
"""Run the regex through the PostgreSQL engine against a given list of strings.
Return True if any string matches the given regex.
This is necessary because very occasionally the wrong combination of text and complex boolean regex will cause Perl
(Python too?) to hang."""
strings = decode_object_from_bytes_if_needed(strings)
regex = decode_object_from_bytes_if_needed(regex)
if not isinstance(strings, list):
raise McPostgresRegexMatch("Strings must be a list, but is: %s" % str(strings))
if len(strings) == 0:
return False
if not isinstance(strings[0], str):
raise McPostgresRegexMatch("Strings must be a list of strings, but is: %s" % str(strings))
full_regex = '(?isx)%s' % regex
match = db.query("""
SELECT 1
FROM UNNEST(%(strings)s) AS string
WHERE string ~ %(regex)s
""", {
'strings': strings, # list gets converted to PostgreSQL's ARRAY[]
'regex': full_regex,
}).hash()
if match is not None:
return True
else:
return False
<commit_msg>Add LIMIT 1 to speed up query<commit_after>
|
from typing import List
from mediawords.db.handler import DatabaseHandler
from mediawords.util.log import create_logger
from mediawords.util.perl import decode_object_from_bytes_if_needed
l = create_logger(__name__)
class McPostgresRegexMatch(Exception):
"""postgres_regex_match() exception."""
pass
def postgres_regex_match(db: DatabaseHandler, strings: List[str], regex: str) -> bool:
"""Run the regex through the PostgreSQL engine against a given list of strings.
Return True if any string matches the given regex.
This is necessary because very occasionally the wrong combination of text and complex boolean regex will cause Perl
(Python too?) to hang."""
strings = decode_object_from_bytes_if_needed(strings)
regex = decode_object_from_bytes_if_needed(regex)
if not isinstance(strings, list):
raise McPostgresRegexMatch("Strings must be a list, but is: %s" % str(strings))
if len(strings) == 0:
return False
if not isinstance(strings[0], str):
raise McPostgresRegexMatch("Strings must be a list of strings, but is: %s" % str(strings))
full_regex = '(?isx)%s' % regex
match = db.query("""
SELECT 1
FROM UNNEST(%(strings)s) AS string
WHERE string ~ %(regex)s
LIMIT 1
""", {
'strings': strings, # list gets converted to PostgreSQL's ARRAY[]
'regex': full_regex,
}).hash()
if match is not None:
return True
else:
return False
|
from typing import List
from mediawords.db.handler import DatabaseHandler
from mediawords.util.log import create_logger
from mediawords.util.perl import decode_object_from_bytes_if_needed
l = create_logger(__name__)
class McPostgresRegexMatch(Exception):
"""postgres_regex_match() exception."""
pass
def postgres_regex_match(db: DatabaseHandler, strings: List[str], regex: str) -> bool:
"""Run the regex through the PostgreSQL engine against a given list of strings.
Return True if any string matches the given regex.
This is necessary because very occasionally the wrong combination of text and complex boolean regex will cause Perl
(Python too?) to hang."""
strings = decode_object_from_bytes_if_needed(strings)
regex = decode_object_from_bytes_if_needed(regex)
if not isinstance(strings, list):
raise McPostgresRegexMatch("Strings must be a list, but is: %s" % str(strings))
if len(strings) == 0:
return False
if not isinstance(strings[0], str):
raise McPostgresRegexMatch("Strings must be a list of strings, but is: %s" % str(strings))
full_regex = '(?isx)%s' % regex
match = db.query("""
SELECT 1
FROM UNNEST(%(strings)s) AS string
WHERE string ~ %(regex)s
""", {
'strings': strings, # list gets converted to PostgreSQL's ARRAY[]
'regex': full_regex,
}).hash()
if match is not None:
return True
else:
return False
Add LIMIT 1 to speed up queryfrom typing import List
from mediawords.db.handler import DatabaseHandler
from mediawords.util.log import create_logger
from mediawords.util.perl import decode_object_from_bytes_if_needed
l = create_logger(__name__)
class McPostgresRegexMatch(Exception):
"""postgres_regex_match() exception."""
pass
def postgres_regex_match(db: DatabaseHandler, strings: List[str], regex: str) -> bool:
"""Run the regex through the PostgreSQL engine against a given list of strings.
Return True if any string matches the given regex.
This is necessary because very occasionally the wrong combination of text and complex boolean regex will cause Perl
(Python too?) to hang."""
strings = decode_object_from_bytes_if_needed(strings)
regex = decode_object_from_bytes_if_needed(regex)
if not isinstance(strings, list):
raise McPostgresRegexMatch("Strings must be a list, but is: %s" % str(strings))
if len(strings) == 0:
return False
if not isinstance(strings[0], str):
raise McPostgresRegexMatch("Strings must be a list of strings, but is: %s" % str(strings))
full_regex = '(?isx)%s' % regex
match = db.query("""
SELECT 1
FROM UNNEST(%(strings)s) AS string
WHERE string ~ %(regex)s
LIMIT 1
""", {
'strings': strings, # list gets converted to PostgreSQL's ARRAY[]
'regex': full_regex,
}).hash()
if match is not None:
return True
else:
return False
|
<commit_before>from typing import List
from mediawords.db.handler import DatabaseHandler
from mediawords.util.log import create_logger
from mediawords.util.perl import decode_object_from_bytes_if_needed
l = create_logger(__name__)
class McPostgresRegexMatch(Exception):
"""postgres_regex_match() exception."""
pass
def postgres_regex_match(db: DatabaseHandler, strings: List[str], regex: str) -> bool:
"""Run the regex through the PostgreSQL engine against a given list of strings.
Return True if any string matches the given regex.
This is necessary because very occasionally the wrong combination of text and complex boolean regex will cause Perl
(Python too?) to hang."""
strings = decode_object_from_bytes_if_needed(strings)
regex = decode_object_from_bytes_if_needed(regex)
if not isinstance(strings, list):
raise McPostgresRegexMatch("Strings must be a list, but is: %s" % str(strings))
if len(strings) == 0:
return False
if not isinstance(strings[0], str):
raise McPostgresRegexMatch("Strings must be a list of strings, but is: %s" % str(strings))
full_regex = '(?isx)%s' % regex
match = db.query("""
SELECT 1
FROM UNNEST(%(strings)s) AS string
WHERE string ~ %(regex)s
""", {
'strings': strings, # list gets converted to PostgreSQL's ARRAY[]
'regex': full_regex,
}).hash()
if match is not None:
return True
else:
return False
<commit_msg>Add LIMIT 1 to speed up query<commit_after>from typing import List
from mediawords.db.handler import DatabaseHandler
from mediawords.util.log import create_logger
from mediawords.util.perl import decode_object_from_bytes_if_needed
l = create_logger(__name__)
class McPostgresRegexMatch(Exception):
"""postgres_regex_match() exception."""
pass
def postgres_regex_match(db: DatabaseHandler, strings: List[str], regex: str) -> bool:
"""Run the regex through the PostgreSQL engine against a given list of strings.
Return True if any string matches the given regex.
This is necessary because very occasionally the wrong combination of text and complex boolean regex will cause Perl
(Python too?) to hang."""
strings = decode_object_from_bytes_if_needed(strings)
regex = decode_object_from_bytes_if_needed(regex)
if not isinstance(strings, list):
raise McPostgresRegexMatch("Strings must be a list, but is: %s" % str(strings))
if len(strings) == 0:
return False
if not isinstance(strings[0], str):
raise McPostgresRegexMatch("Strings must be a list of strings, but is: %s" % str(strings))
full_regex = '(?isx)%s' % regex
match = db.query("""
SELECT 1
FROM UNNEST(%(strings)s) AS string
WHERE string ~ %(regex)s
LIMIT 1
""", {
'strings': strings, # list gets converted to PostgreSQL's ARRAY[]
'regex': full_regex,
}).hash()
if match is not None:
return True
else:
return False
|
779874f573d8bddb835ac8ac7875f2f04c093222
|
tests/test_client.py
|
tests/test_client.py
|
import six
import sys
import test_helper
import unittest
from authy import AuthyException
from authy.api import AuthyApiClient
from authy.api.resources import Tokens
from authy.api.resources import Users
class ApiClientTest(unittest.TestCase):
def setUp(self):
self.api = AuthyApiClient(test_helper.API_KEY, test_helper.API_URL)
def test_tokens(self):
self.assertIsInstance(self.api.tokens, Tokens)
def test_users(self):
self.assertIsInstance(self.api.users, Users)
def test_version(self):
if six.PY3:
self.assertRegex(self.api.version(), '\d.\d*')
else:
import re
self.assertTrue(re.compile(r'\d.\d*').search(self.api.version()))
if __name__ == "__main__":
unittest.main()
|
import six
import sys
import test_helper
import unittest
from authy import AuthyException
from authy.api import AuthyApiClient
from authy.api.resources import Tokens
from authy.api.resources import Users
class ApiClientTest(unittest.TestCase):
def setUp(self):
self.api = AuthyApiClient(test_helper.API_KEY, test_helper.API_URL)
def test_tokens(self):
self.assertIsInstance(self.api.tokens, Tokens)
def test_users(self):
self.assertIsInstance(self.api.users, Users)
def test_version(self):
if six.PY3:
self.assertRegex(self.api.version(), r'\d.\d*')
else:
import re
self.assertTrue(re.compile(r'\d.\d*').search(self.api.version()))
if __name__ == "__main__":
unittest.main()
|
Fix deprecation warnings due to invalid escape sequences.
|
Fix deprecation warnings due to invalid escape sequences.
|
Python
|
mit
|
authy/authy-python,authy/authy-python
|
import six
import sys
import test_helper
import unittest
from authy import AuthyException
from authy.api import AuthyApiClient
from authy.api.resources import Tokens
from authy.api.resources import Users
class ApiClientTest(unittest.TestCase):
def setUp(self):
self.api = AuthyApiClient(test_helper.API_KEY, test_helper.API_URL)
def test_tokens(self):
self.assertIsInstance(self.api.tokens, Tokens)
def test_users(self):
self.assertIsInstance(self.api.users, Users)
def test_version(self):
if six.PY3:
self.assertRegex(self.api.version(), '\d.\d*')
else:
import re
self.assertTrue(re.compile(r'\d.\d*').search(self.api.version()))
if __name__ == "__main__":
unittest.main()
Fix deprecation warnings due to invalid escape sequences.
|
import six
import sys
import test_helper
import unittest
from authy import AuthyException
from authy.api import AuthyApiClient
from authy.api.resources import Tokens
from authy.api.resources import Users
class ApiClientTest(unittest.TestCase):
def setUp(self):
self.api = AuthyApiClient(test_helper.API_KEY, test_helper.API_URL)
def test_tokens(self):
self.assertIsInstance(self.api.tokens, Tokens)
def test_users(self):
self.assertIsInstance(self.api.users, Users)
def test_version(self):
if six.PY3:
self.assertRegex(self.api.version(), r'\d.\d*')
else:
import re
self.assertTrue(re.compile(r'\d.\d*').search(self.api.version()))
if __name__ == "__main__":
unittest.main()
|
<commit_before>import six
import sys
import test_helper
import unittest
from authy import AuthyException
from authy.api import AuthyApiClient
from authy.api.resources import Tokens
from authy.api.resources import Users
class ApiClientTest(unittest.TestCase):
def setUp(self):
self.api = AuthyApiClient(test_helper.API_KEY, test_helper.API_URL)
def test_tokens(self):
self.assertIsInstance(self.api.tokens, Tokens)
def test_users(self):
self.assertIsInstance(self.api.users, Users)
def test_version(self):
if six.PY3:
self.assertRegex(self.api.version(), '\d.\d*')
else:
import re
self.assertTrue(re.compile(r'\d.\d*').search(self.api.version()))
if __name__ == "__main__":
unittest.main()
<commit_msg>Fix deprecation warnings due to invalid escape sequences.<commit_after>
|
import six
import sys
import test_helper
import unittest
from authy import AuthyException
from authy.api import AuthyApiClient
from authy.api.resources import Tokens
from authy.api.resources import Users
class ApiClientTest(unittest.TestCase):
def setUp(self):
self.api = AuthyApiClient(test_helper.API_KEY, test_helper.API_URL)
def test_tokens(self):
self.assertIsInstance(self.api.tokens, Tokens)
def test_users(self):
self.assertIsInstance(self.api.users, Users)
def test_version(self):
if six.PY3:
self.assertRegex(self.api.version(), r'\d.\d*')
else:
import re
self.assertTrue(re.compile(r'\d.\d*').search(self.api.version()))
if __name__ == "__main__":
unittest.main()
|
import six
import sys
import test_helper
import unittest
from authy import AuthyException
from authy.api import AuthyApiClient
from authy.api.resources import Tokens
from authy.api.resources import Users
class ApiClientTest(unittest.TestCase):
def setUp(self):
self.api = AuthyApiClient(test_helper.API_KEY, test_helper.API_URL)
def test_tokens(self):
self.assertIsInstance(self.api.tokens, Tokens)
def test_users(self):
self.assertIsInstance(self.api.users, Users)
def test_version(self):
if six.PY3:
self.assertRegex(self.api.version(), '\d.\d*')
else:
import re
self.assertTrue(re.compile(r'\d.\d*').search(self.api.version()))
if __name__ == "__main__":
unittest.main()
Fix deprecation warnings due to invalid escape sequences.import six
import sys
import test_helper
import unittest
from authy import AuthyException
from authy.api import AuthyApiClient
from authy.api.resources import Tokens
from authy.api.resources import Users
class ApiClientTest(unittest.TestCase):
def setUp(self):
self.api = AuthyApiClient(test_helper.API_KEY, test_helper.API_URL)
def test_tokens(self):
self.assertIsInstance(self.api.tokens, Tokens)
def test_users(self):
self.assertIsInstance(self.api.users, Users)
def test_version(self):
if six.PY3:
self.assertRegex(self.api.version(), r'\d.\d*')
else:
import re
self.assertTrue(re.compile(r'\d.\d*').search(self.api.version()))
if __name__ == "__main__":
unittest.main()
|
<commit_before>import six
import sys
import test_helper
import unittest
from authy import AuthyException
from authy.api import AuthyApiClient
from authy.api.resources import Tokens
from authy.api.resources import Users
class ApiClientTest(unittest.TestCase):
def setUp(self):
self.api = AuthyApiClient(test_helper.API_KEY, test_helper.API_URL)
def test_tokens(self):
self.assertIsInstance(self.api.tokens, Tokens)
def test_users(self):
self.assertIsInstance(self.api.users, Users)
def test_version(self):
if six.PY3:
self.assertRegex(self.api.version(), '\d.\d*')
else:
import re
self.assertTrue(re.compile(r'\d.\d*').search(self.api.version()))
if __name__ == "__main__":
unittest.main()
<commit_msg>Fix deprecation warnings due to invalid escape sequences.<commit_after>import six
import sys
import test_helper
import unittest
from authy import AuthyException
from authy.api import AuthyApiClient
from authy.api.resources import Tokens
from authy.api.resources import Users
class ApiClientTest(unittest.TestCase):
def setUp(self):
self.api = AuthyApiClient(test_helper.API_KEY, test_helper.API_URL)
def test_tokens(self):
self.assertIsInstance(self.api.tokens, Tokens)
def test_users(self):
self.assertIsInstance(self.api.users, Users)
def test_version(self):
if six.PY3:
self.assertRegex(self.api.version(), r'\d.\d*')
else:
import re
self.assertTrue(re.compile(r'\d.\d*').search(self.api.version()))
if __name__ == "__main__":
unittest.main()
|
5d0df4c15bc28cba8b7c766f3e5bd63a27f8d5b7
|
tflitehub/lit.cfg.py
|
tflitehub/lit.cfg.py
|
import os
import sys
import lit.formats
import lit.util
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
# name: The name of this test suite.
config.name = 'TFLITEHUB'
config.test_format = lit.formats.ShTest()
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
#config.use_default_substitutions()
config.excludes = [
'imagenet_test_data.py',
'lit.cfg.py',
'lit.site.cfg.py',
'manual_test.py',
'squad_test_data.py',
'test_util.py',
]
config.substitutions.extend([
('%PYTHON', sys.executable),
])
config.environment['PYTHONPATH'] = ":".join(sys.path)
project_root = os.path.dirname(os.path.dirname(__file__))
# Enable features based on -D FEATURES=hugetest,vulkan
# syntax.
features_param = lit_config.params.get('FEATURES')
if features_param:
config.available_features.update(features_param.split(','))
|
import os
import sys
import lit.formats
import lit.util
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
# name: The name of this test suite.
config.name = 'TFLITEHUB'
config.test_format = lit.formats.ShTest()
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
#config.use_default_substitutions()
config.excludes = [
'coco_test_data.py',
'imagenet_test_data.py',
'lit.cfg.py',
'lit.site.cfg.py',
'manual_test.py',
'squad_test_data.py',
'test_util.py',
]
config.substitutions.extend([
('%PYTHON', sys.executable),
])
config.environment['PYTHONPATH'] = ":".join(sys.path)
project_root = os.path.dirname(os.path.dirname(__file__))
# Enable features based on -D FEATURES=hugetest,vulkan
# syntax.
features_param = lit_config.params.get('FEATURES')
if features_param:
config.available_features.update(features_param.split(','))
|
Add coco_test_data.py to lit ignore list
|
Add coco_test_data.py to lit ignore list
|
Python
|
apache-2.0
|
iree-org/iree-samples,iree-org/iree-samples,iree-org/iree-samples,iree-org/iree-samples
|
import os
import sys
import lit.formats
import lit.util
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
# name: The name of this test suite.
config.name = 'TFLITEHUB'
config.test_format = lit.formats.ShTest()
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
#config.use_default_substitutions()
config.excludes = [
'imagenet_test_data.py',
'lit.cfg.py',
'lit.site.cfg.py',
'manual_test.py',
'squad_test_data.py',
'test_util.py',
]
config.substitutions.extend([
('%PYTHON', sys.executable),
])
config.environment['PYTHONPATH'] = ":".join(sys.path)
project_root = os.path.dirname(os.path.dirname(__file__))
# Enable features based on -D FEATURES=hugetest,vulkan
# syntax.
features_param = lit_config.params.get('FEATURES')
if features_param:
config.available_features.update(features_param.split(','))
Add coco_test_data.py to lit ignore list
|
import os
import sys
import lit.formats
import lit.util
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
# name: The name of this test suite.
config.name = 'TFLITEHUB'
config.test_format = lit.formats.ShTest()
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
#config.use_default_substitutions()
config.excludes = [
'coco_test_data.py',
'imagenet_test_data.py',
'lit.cfg.py',
'lit.site.cfg.py',
'manual_test.py',
'squad_test_data.py',
'test_util.py',
]
config.substitutions.extend([
('%PYTHON', sys.executable),
])
config.environment['PYTHONPATH'] = ":".join(sys.path)
project_root = os.path.dirname(os.path.dirname(__file__))
# Enable features based on -D FEATURES=hugetest,vulkan
# syntax.
features_param = lit_config.params.get('FEATURES')
if features_param:
config.available_features.update(features_param.split(','))
|
<commit_before>import os
import sys
import lit.formats
import lit.util
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
# name: The name of this test suite.
config.name = 'TFLITEHUB'
config.test_format = lit.formats.ShTest()
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
#config.use_default_substitutions()
config.excludes = [
'imagenet_test_data.py',
'lit.cfg.py',
'lit.site.cfg.py',
'manual_test.py',
'squad_test_data.py',
'test_util.py',
]
config.substitutions.extend([
('%PYTHON', sys.executable),
])
config.environment['PYTHONPATH'] = ":".join(sys.path)
project_root = os.path.dirname(os.path.dirname(__file__))
# Enable features based on -D FEATURES=hugetest,vulkan
# syntax.
features_param = lit_config.params.get('FEATURES')
if features_param:
config.available_features.update(features_param.split(','))
<commit_msg>Add coco_test_data.py to lit ignore list<commit_after>
|
import os
import sys
import lit.formats
import lit.util
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
# name: The name of this test suite.
config.name = 'TFLITEHUB'
config.test_format = lit.formats.ShTest()
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
#config.use_default_substitutions()
config.excludes = [
'coco_test_data.py',
'imagenet_test_data.py',
'lit.cfg.py',
'lit.site.cfg.py',
'manual_test.py',
'squad_test_data.py',
'test_util.py',
]
config.substitutions.extend([
('%PYTHON', sys.executable),
])
config.environment['PYTHONPATH'] = ":".join(sys.path)
project_root = os.path.dirname(os.path.dirname(__file__))
# Enable features based on -D FEATURES=hugetest,vulkan
# syntax.
features_param = lit_config.params.get('FEATURES')
if features_param:
config.available_features.update(features_param.split(','))
|
import os
import sys
import lit.formats
import lit.util
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
# name: The name of this test suite.
config.name = 'TFLITEHUB'
config.test_format = lit.formats.ShTest()
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
#config.use_default_substitutions()
config.excludes = [
'imagenet_test_data.py',
'lit.cfg.py',
'lit.site.cfg.py',
'manual_test.py',
'squad_test_data.py',
'test_util.py',
]
config.substitutions.extend([
('%PYTHON', sys.executable),
])
config.environment['PYTHONPATH'] = ":".join(sys.path)
project_root = os.path.dirname(os.path.dirname(__file__))
# Enable features based on -D FEATURES=hugetest,vulkan
# syntax.
features_param = lit_config.params.get('FEATURES')
if features_param:
config.available_features.update(features_param.split(','))
Add coco_test_data.py to lit ignore listimport os
import sys
import lit.formats
import lit.util
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
# name: The name of this test suite.
config.name = 'TFLITEHUB'
config.test_format = lit.formats.ShTest()
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
#config.use_default_substitutions()
config.excludes = [
'coco_test_data.py',
'imagenet_test_data.py',
'lit.cfg.py',
'lit.site.cfg.py',
'manual_test.py',
'squad_test_data.py',
'test_util.py',
]
config.substitutions.extend([
('%PYTHON', sys.executable),
])
config.environment['PYTHONPATH'] = ":".join(sys.path)
project_root = os.path.dirname(os.path.dirname(__file__))
# Enable features based on -D FEATURES=hugetest,vulkan
# syntax.
features_param = lit_config.params.get('FEATURES')
if features_param:
config.available_features.update(features_param.split(','))
|
<commit_before>import os
import sys
import lit.formats
import lit.util
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
# name: The name of this test suite.
config.name = 'TFLITEHUB'
config.test_format = lit.formats.ShTest()
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
#config.use_default_substitutions()
config.excludes = [
'imagenet_test_data.py',
'lit.cfg.py',
'lit.site.cfg.py',
'manual_test.py',
'squad_test_data.py',
'test_util.py',
]
config.substitutions.extend([
('%PYTHON', sys.executable),
])
config.environment['PYTHONPATH'] = ":".join(sys.path)
project_root = os.path.dirname(os.path.dirname(__file__))
# Enable features based on -D FEATURES=hugetest,vulkan
# syntax.
features_param = lit_config.params.get('FEATURES')
if features_param:
config.available_features.update(features_param.split(','))
<commit_msg>Add coco_test_data.py to lit ignore list<commit_after>import os
import sys
import lit.formats
import lit.util
import lit.llvm
# Configuration file for the 'lit' test runner.
lit.llvm.initialize(lit_config, config)
# name: The name of this test suite.
config.name = 'TFLITEHUB'
config.test_format = lit.formats.ShTest()
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.py']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
#config.use_default_substitutions()
config.excludes = [
'coco_test_data.py',
'imagenet_test_data.py',
'lit.cfg.py',
'lit.site.cfg.py',
'manual_test.py',
'squad_test_data.py',
'test_util.py',
]
config.substitutions.extend([
('%PYTHON', sys.executable),
])
config.environment['PYTHONPATH'] = ":".join(sys.path)
project_root = os.path.dirname(os.path.dirname(__file__))
# Enable features based on -D FEATURES=hugetest,vulkan
# syntax.
features_param = lit_config.params.get('FEATURES')
if features_param:
config.available_features.update(features_param.split(','))
|
6bc11ea44c07cddd567a5039b9442a95e9ce04fe
|
comics/crawler/utils/lxmlparser.py
|
comics/crawler/utils/lxmlparser.py
|
#encoding: utf-8
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string:
self.root = fromstring(string)
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class DoesNotExist(Exception):
pass
class MultipleElementsReturned(Exception):
pass
|
#encoding: utf-8
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url is not None:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string is not None:
self.root = fromstring(string)
else:
raise LxmlParserException()
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class LxmlParserException(Exception):
pass
class DoesNotExist(LxmlParserException):
pass
class MultipleElementsReturned(LxmlParserException):
pass
|
Update exception handling in LxmlParser
|
Update exception handling in LxmlParser
|
Python
|
agpl-3.0
|
datagutten/comics,klette/comics,klette/comics,jodal/comics,jodal/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics
|
#encoding: utf-8
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string:
self.root = fromstring(string)
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class DoesNotExist(Exception):
pass
class MultipleElementsReturned(Exception):
pass
Update exception handling in LxmlParser
|
#encoding: utf-8
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url is not None:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string is not None:
self.root = fromstring(string)
else:
raise LxmlParserException()
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class LxmlParserException(Exception):
pass
class DoesNotExist(LxmlParserException):
pass
class MultipleElementsReturned(LxmlParserException):
pass
|
<commit_before>#encoding: utf-8
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string:
self.root = fromstring(string)
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class DoesNotExist(Exception):
pass
class MultipleElementsReturned(Exception):
pass
<commit_msg>Update exception handling in LxmlParser<commit_after>
|
#encoding: utf-8
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url is not None:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string is not None:
self.root = fromstring(string)
else:
raise LxmlParserException()
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class LxmlParserException(Exception):
pass
class DoesNotExist(LxmlParserException):
pass
class MultipleElementsReturned(LxmlParserException):
pass
|
#encoding: utf-8
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string:
self.root = fromstring(string)
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class DoesNotExist(Exception):
pass
class MultipleElementsReturned(Exception):
pass
Update exception handling in LxmlParser#encoding: utf-8
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url is not None:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string is not None:
self.root = fromstring(string)
else:
raise LxmlParserException()
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class LxmlParserException(Exception):
pass
class DoesNotExist(LxmlParserException):
pass
class MultipleElementsReturned(LxmlParserException):
pass
|
<commit_before>#encoding: utf-8
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string:
self.root = fromstring(string)
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class DoesNotExist(Exception):
pass
class MultipleElementsReturned(Exception):
pass
<commit_msg>Update exception handling in LxmlParser<commit_after>#encoding: utf-8
from lxml.html import parse, fromstring
class LxmlParser(object):
def __init__(self, url=None, string=None):
if url is not None:
self.root = parse(url).getroot()
self.root.make_links_absolute(url)
elif string is not None:
self.root = fromstring(string)
else:
raise LxmlParserException()
def text(self, selector):
return self.select(selector).text_content()
def src(self, selector):
return self.select(selector).get('src')
def alt(self, selector):
return self.select(selector).get('alt')
def title(self, selector):
return self.select(selector).get('title')
def remove(self, selector):
for element in self.root.cssselect(selector):
element.drop_tree()
def select(self, selector):
elements = self.root.cssselect(selector)
if len(elements) == 0:
raise DoesNotExist('Noting matched the selector: %s' % selector)
elif len(elements) > 1:
raise MultipleElementsReturned('Selector matched %d elements: %s' %
(len(elements), selector))
return elements[0]
class LxmlParserException(Exception):
pass
class DoesNotExist(LxmlParserException):
pass
class MultipleElementsReturned(LxmlParserException):
pass
|
32789be8f1f98f7538f4452a8118c261037f2d75
|
tempwatcher/watch.py
|
tempwatcher/watch.py
|
import json
import requests
class TemperatureWatch(object):
thermostat_url = None
alert_high = 80
alert_low = 60
_last_response = None
def get_info(self):
r = requests.get(self.thermostat_url + '/tstat')
self._last_response = json.loads(r.text)
return r.text
def check_temp(self):
if not self._last_response:
self.get_info()
if self._last_response['temp'] > self.alert_high:
self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp']))
if self._last_response['temp'] < self.alert_low:
self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp']))
def alert(self, message):
print(message)
if __name__ == '__main__':
tw = TemperatureWatch()
tw.thermostat_url = 'http://10.0.1.52'
tw.check_temp()
|
import json
import requests
class TemperatureWatch(object):
thermostat_url = None
alert_high = 80
alert_low = 60
_last_response = None
def get_info(self):
r = requests.get(self.thermostat_url + '/tstat')
self._last_response = json.loads(r.text)
return r.text
def check_temp(self):
if not self._last_response:
self.get_info()
if self._last_response['temp'] > self.alert_high:
self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp']))
if self._last_response['temp'] < self.alert_low:
self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp']))
def alert(self, message):
print(message)
if __name__ == '__main__':
thermostat_ip = '10.0.1.53'
# simple configuration - set the IP, nothing else. Print the alerts when they occur to stdout. Not very useful though...
tw = TemperatureWatch()
tw.thermostat_url = 'http://%s' % thermostat_ip
tw.check_temp()
|
Refactor the initialization a bit to make configuration easier.
|
Refactor the initialization a bit to make configuration easier.
|
Python
|
bsd-3-clause
|
adamfast/tempwatcher
|
import json
import requests
class TemperatureWatch(object):
thermostat_url = None
alert_high = 80
alert_low = 60
_last_response = None
def get_info(self):
r = requests.get(self.thermostat_url + '/tstat')
self._last_response = json.loads(r.text)
return r.text
def check_temp(self):
if not self._last_response:
self.get_info()
if self._last_response['temp'] > self.alert_high:
self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp']))
if self._last_response['temp'] < self.alert_low:
self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp']))
def alert(self, message):
print(message)
if __name__ == '__main__':
tw = TemperatureWatch()
tw.thermostat_url = 'http://10.0.1.52'
tw.check_temp()
Refactor the initialization a bit to make configuration easier.
|
import json
import requests
class TemperatureWatch(object):
thermostat_url = None
alert_high = 80
alert_low = 60
_last_response = None
def get_info(self):
r = requests.get(self.thermostat_url + '/tstat')
self._last_response = json.loads(r.text)
return r.text
def check_temp(self):
if not self._last_response:
self.get_info()
if self._last_response['temp'] > self.alert_high:
self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp']))
if self._last_response['temp'] < self.alert_low:
self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp']))
def alert(self, message):
print(message)
if __name__ == '__main__':
thermostat_ip = '10.0.1.53'
# simple configuration - set the IP, nothing else. Print the alerts when they occur to stdout. Not very useful though...
tw = TemperatureWatch()
tw.thermostat_url = 'http://%s' % thermostat_ip
tw.check_temp()
|
<commit_before>import json
import requests
class TemperatureWatch(object):
thermostat_url = None
alert_high = 80
alert_low = 60
_last_response = None
def get_info(self):
r = requests.get(self.thermostat_url + '/tstat')
self._last_response = json.loads(r.text)
return r.text
def check_temp(self):
if not self._last_response:
self.get_info()
if self._last_response['temp'] > self.alert_high:
self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp']))
if self._last_response['temp'] < self.alert_low:
self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp']))
def alert(self, message):
print(message)
if __name__ == '__main__':
tw = TemperatureWatch()
tw.thermostat_url = 'http://10.0.1.52'
tw.check_temp()
<commit_msg>Refactor the initialization a bit to make configuration easier.<commit_after>
|
import json
import requests
class TemperatureWatch(object):
thermostat_url = None
alert_high = 80
alert_low = 60
_last_response = None
def get_info(self):
r = requests.get(self.thermostat_url + '/tstat')
self._last_response = json.loads(r.text)
return r.text
def check_temp(self):
if not self._last_response:
self.get_info()
if self._last_response['temp'] > self.alert_high:
self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp']))
if self._last_response['temp'] < self.alert_low:
self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp']))
def alert(self, message):
print(message)
if __name__ == '__main__':
thermostat_ip = '10.0.1.53'
# simple configuration - set the IP, nothing else. Print the alerts when they occur to stdout. Not very useful though...
tw = TemperatureWatch()
tw.thermostat_url = 'http://%s' % thermostat_ip
tw.check_temp()
|
import json
import requests
class TemperatureWatch(object):
thermostat_url = None
alert_high = 80
alert_low = 60
_last_response = None
def get_info(self):
r = requests.get(self.thermostat_url + '/tstat')
self._last_response = json.loads(r.text)
return r.text
def check_temp(self):
if not self._last_response:
self.get_info()
if self._last_response['temp'] > self.alert_high:
self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp']))
if self._last_response['temp'] < self.alert_low:
self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp']))
def alert(self, message):
print(message)
if __name__ == '__main__':
tw = TemperatureWatch()
tw.thermostat_url = 'http://10.0.1.52'
tw.check_temp()
Refactor the initialization a bit to make configuration easier.import json
import requests
class TemperatureWatch(object):
thermostat_url = None
alert_high = 80
alert_low = 60
_last_response = None
def get_info(self):
r = requests.get(self.thermostat_url + '/tstat')
self._last_response = json.loads(r.text)
return r.text
def check_temp(self):
if not self._last_response:
self.get_info()
if self._last_response['temp'] > self.alert_high:
self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp']))
if self._last_response['temp'] < self.alert_low:
self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp']))
def alert(self, message):
print(message)
if __name__ == '__main__':
thermostat_ip = '10.0.1.53'
# simple configuration - set the IP, nothing else. Print the alerts when they occur to stdout. Not very useful though...
tw = TemperatureWatch()
tw.thermostat_url = 'http://%s' % thermostat_ip
tw.check_temp()
|
<commit_before>import json
import requests
class TemperatureWatch(object):
thermostat_url = None
alert_high = 80
alert_low = 60
_last_response = None
def get_info(self):
r = requests.get(self.thermostat_url + '/tstat')
self._last_response = json.loads(r.text)
return r.text
def check_temp(self):
if not self._last_response:
self.get_info()
if self._last_response['temp'] > self.alert_high:
self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp']))
if self._last_response['temp'] < self.alert_low:
self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp']))
def alert(self, message):
print(message)
if __name__ == '__main__':
tw = TemperatureWatch()
tw.thermostat_url = 'http://10.0.1.52'
tw.check_temp()
<commit_msg>Refactor the initialization a bit to make configuration easier.<commit_after>import json
import requests
class TemperatureWatch(object):
thermostat_url = None
alert_high = 80
alert_low = 60
_last_response = None
def get_info(self):
r = requests.get(self.thermostat_url + '/tstat')
self._last_response = json.loads(r.text)
return r.text
def check_temp(self):
if not self._last_response:
self.get_info()
if self._last_response['temp'] > self.alert_high:
self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp']))
if self._last_response['temp'] < self.alert_low:
self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp']))
def alert(self, message):
print(message)
if __name__ == '__main__':
thermostat_ip = '10.0.1.53'
# simple configuration - set the IP, nothing else. Print the alerts when they occur to stdout. Not very useful though...
tw = TemperatureWatch()
tw.thermostat_url = 'http://%s' % thermostat_ip
tw.check_temp()
|
89804f4d2caeab07b56a90912afc058145620375
|
jal_stats/stats/views.py
|
jal_stats/stats/views.py
|
# from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, permissions # , serializers
from .models import Stat, Activity
from .permissions import IsAPIUser
from .serializers import ActivitySerializer, ActivityListSerializer, StatSerializer
# Create your views here.
# class UserViewSet(viewsets.ModelViewSet):
# permission_classes = (permissions.IsAuthenticated,
# IsAPIUser)
#
# def list(self, request, *args, **kwargs):
# return []
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
def get_serializer_class(self):
if self.action == 'list':
return ActivitySerializer
else:
return ActivityListSerializer
class StatViewSet(viewsets.ModelViewSet):
serializer_class = StatSerializer
def get_queryset(self):
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
return Stat.objects.all().filter(
# user=self.request.user,
activity=activity)
def get_serializer_context(self):
context = super().get_serializer_context().copy()
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
context['activity'] = activity
return context
# def perform_create(self, serializer):
# serializers.save(user=self.request.user)
|
# from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, mixins, permissions # , serializers
from .models import Stat, Activity
# from .permissions import IsAPIUser
from .serializers import ActivitySerializer, ActivityListSerializer, StatSerializer
# Create your views here.
# class UserViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin):
# permission_classes = (permissions.IsAuthenticated,
# IsAPIUser)
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
def get_serializer_class(self):
if self.action == 'list':
return ActivitySerializer
else:
return ActivityListSerializer
class StatViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin,
mixins.UpdateModelMixin, mixins.DestroyModelMixin):
serializer_class = StatSerializer
def get_queryset(self):
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
return Stat.objects.all().filter(
# user=self.request.user,
activity=activity)
def get_serializer_context(self):
context = super().get_serializer_context().copy()
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
context['activity'] = activity
return context
# def perform_create(self, serializer):
# serializers.save(user=self.request.user)
|
Update StatViewSet to generic, add necessary mixins
|
Update StatViewSet to generic, add necessary mixins
|
Python
|
mit
|
jal-stats/django
|
# from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, permissions # , serializers
from .models import Stat, Activity
from .permissions import IsAPIUser
from .serializers import ActivitySerializer, ActivityListSerializer, StatSerializer
# Create your views here.
# class UserViewSet(viewsets.ModelViewSet):
# permission_classes = (permissions.IsAuthenticated,
# IsAPIUser)
#
# def list(self, request, *args, **kwargs):
# return []
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
def get_serializer_class(self):
if self.action == 'list':
return ActivitySerializer
else:
return ActivityListSerializer
class StatViewSet(viewsets.ModelViewSet):
serializer_class = StatSerializer
def get_queryset(self):
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
return Stat.objects.all().filter(
# user=self.request.user,
activity=activity)
def get_serializer_context(self):
context = super().get_serializer_context().copy()
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
context['activity'] = activity
return context
# def perform_create(self, serializer):
# serializers.save(user=self.request.user)
Update StatViewSet to generic, add necessary mixins
|
# from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, mixins, permissions # , serializers
from .models import Stat, Activity
# from .permissions import IsAPIUser
from .serializers import ActivitySerializer, ActivityListSerializer, StatSerializer
# Create your views here.
# class UserViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin):
# permission_classes = (permissions.IsAuthenticated,
# IsAPIUser)
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
def get_serializer_class(self):
if self.action == 'list':
return ActivitySerializer
else:
return ActivityListSerializer
class StatViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin,
mixins.UpdateModelMixin, mixins.DestroyModelMixin):
serializer_class = StatSerializer
def get_queryset(self):
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
return Stat.objects.all().filter(
# user=self.request.user,
activity=activity)
def get_serializer_context(self):
context = super().get_serializer_context().copy()
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
context['activity'] = activity
return context
# def perform_create(self, serializer):
# serializers.save(user=self.request.user)
|
<commit_before># from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, permissions # , serializers
from .models import Stat, Activity
from .permissions import IsAPIUser
from .serializers import ActivitySerializer, ActivityListSerializer, StatSerializer
# Create your views here.
# class UserViewSet(viewsets.ModelViewSet):
# permission_classes = (permissions.IsAuthenticated,
# IsAPIUser)
#
# def list(self, request, *args, **kwargs):
# return []
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
def get_serializer_class(self):
if self.action == 'list':
return ActivitySerializer
else:
return ActivityListSerializer
class StatViewSet(viewsets.ModelViewSet):
serializer_class = StatSerializer
def get_queryset(self):
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
return Stat.objects.all().filter(
# user=self.request.user,
activity=activity)
def get_serializer_context(self):
context = super().get_serializer_context().copy()
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
context['activity'] = activity
return context
# def perform_create(self, serializer):
# serializers.save(user=self.request.user)
<commit_msg>Update StatViewSet to generic, add necessary mixins<commit_after>
|
# from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, mixins, permissions # , serializers
from .models import Stat, Activity
# from .permissions import IsAPIUser
from .serializers import ActivitySerializer, ActivityListSerializer, StatSerializer
# Create your views here.
# class UserViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin):
# permission_classes = (permissions.IsAuthenticated,
# IsAPIUser)
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
def get_serializer_class(self):
if self.action == 'list':
return ActivitySerializer
else:
return ActivityListSerializer
class StatViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin,
mixins.UpdateModelMixin, mixins.DestroyModelMixin):
serializer_class = StatSerializer
def get_queryset(self):
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
return Stat.objects.all().filter(
# user=self.request.user,
activity=activity)
def get_serializer_context(self):
context = super().get_serializer_context().copy()
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
context['activity'] = activity
return context
# def perform_create(self, serializer):
# serializers.save(user=self.request.user)
|
# from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, permissions # , serializers
from .models import Stat, Activity
from .permissions import IsAPIUser
from .serializers import ActivitySerializer, ActivityListSerializer, StatSerializer
# Create your views here.
# class UserViewSet(viewsets.ModelViewSet):
# permission_classes = (permissions.IsAuthenticated,
# IsAPIUser)
#
# def list(self, request, *args, **kwargs):
# return []
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
def get_serializer_class(self):
if self.action == 'list':
return ActivitySerializer
else:
return ActivityListSerializer
class StatViewSet(viewsets.ModelViewSet):
serializer_class = StatSerializer
def get_queryset(self):
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
return Stat.objects.all().filter(
# user=self.request.user,
activity=activity)
def get_serializer_context(self):
context = super().get_serializer_context().copy()
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
context['activity'] = activity
return context
# def perform_create(self, serializer):
# serializers.save(user=self.request.user)
Update StatViewSet to generic, add necessary mixins# from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, mixins, permissions # , serializers
from .models import Stat, Activity
# from .permissions import IsAPIUser
from .serializers import ActivitySerializer, ActivityListSerializer, StatSerializer
# Create your views here.
# class UserViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin):
# permission_classes = (permissions.IsAuthenticated,
# IsAPIUser)
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
def get_serializer_class(self):
if self.action == 'list':
return ActivitySerializer
else:
return ActivityListSerializer
class StatViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin,
mixins.UpdateModelMixin, mixins.DestroyModelMixin):
serializer_class = StatSerializer
def get_queryset(self):
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
return Stat.objects.all().filter(
# user=self.request.user,
activity=activity)
def get_serializer_context(self):
context = super().get_serializer_context().copy()
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
context['activity'] = activity
return context
# def perform_create(self, serializer):
# serializers.save(user=self.request.user)
|
<commit_before># from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, permissions # , serializers
from .models import Stat, Activity
from .permissions import IsAPIUser
from .serializers import ActivitySerializer, ActivityListSerializer, StatSerializer
# Create your views here.
# class UserViewSet(viewsets.ModelViewSet):
# permission_classes = (permissions.IsAuthenticated,
# IsAPIUser)
#
# def list(self, request, *args, **kwargs):
# return []
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
def get_serializer_class(self):
if self.action == 'list':
return ActivitySerializer
else:
return ActivityListSerializer
class StatViewSet(viewsets.ModelViewSet):
serializer_class = StatSerializer
def get_queryset(self):
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
return Stat.objects.all().filter(
# user=self.request.user,
activity=activity)
def get_serializer_context(self):
context = super().get_serializer_context().copy()
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
context['activity'] = activity
return context
# def perform_create(self, serializer):
# serializers.save(user=self.request.user)
<commit_msg>Update StatViewSet to generic, add necessary mixins<commit_after># from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, mixins, permissions # , serializers
from .models import Stat, Activity
# from .permissions import IsAPIUser
from .serializers import ActivitySerializer, ActivityListSerializer, StatSerializer
# Create your views here.
# class UserViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin):
# permission_classes = (permissions.IsAuthenticated,
# IsAPIUser)
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
def get_serializer_class(self):
if self.action == 'list':
return ActivitySerializer
else:
return ActivityListSerializer
class StatViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin,
mixins.UpdateModelMixin, mixins.DestroyModelMixin):
serializer_class = StatSerializer
def get_queryset(self):
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
return Stat.objects.all().filter(
# user=self.request.user,
activity=activity)
def get_serializer_context(self):
context = super().get_serializer_context().copy()
activity = get_object_or_404(Activity, pk=self.kwargs['activity_pk'])
context['activity'] = activity
return context
# def perform_create(self, serializer):
# serializers.save(user=self.request.user)
|
9887b962ddc27f7bebe212e169d1a2c442a35239
|
ironic_ui/content/ironic/panel.py
|
ironic_ui/content/ironic/panel.py
|
# Copyright 2016 Cisco Systems, Inc.
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.api import base
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin',)
def allowed(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return super(Ironic, self).allowed(context)
def nav(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return True
dashboard.Admin.register(Ironic)
|
# Copyright 2016 Cisco Systems, Inc.
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin', 'openstack.services.baremetal',)
dashboard.Admin.register(Ironic)
|
Use permissions attribute to detect ironic service
|
Use permissions attribute to detect ironic service
Horizon implements a logic to enable/disable panel by permissions
defined in each panel class. This change replaces the current redundant
logic by that built-in feature to simplify how we define requirements
of the Ironic panels.
Change-Id: I4a9dabfea79c23155fb8986fe8386202d7474485
|
Python
|
apache-2.0
|
openstack/ironic-ui,openstack/ironic-ui,openstack/ironic-ui,openstack/ironic-ui
|
# Copyright 2016 Cisco Systems, Inc.
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.api import base
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin',)
def allowed(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return super(Ironic, self).allowed(context)
def nav(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return True
dashboard.Admin.register(Ironic)
Use permissions attribute to detect ironic service
Horizon implements a logic to enable/disable panel by permissions
defined in each panel class. This change replaces the current redundant
logic by that built-in feature to simplify how we define requirements
of the Ironic panels.
Change-Id: I4a9dabfea79c23155fb8986fe8386202d7474485
|
# Copyright 2016 Cisco Systems, Inc.
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin', 'openstack.services.baremetal',)
dashboard.Admin.register(Ironic)
|
<commit_before># Copyright 2016 Cisco Systems, Inc.
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.api import base
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin',)
def allowed(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return super(Ironic, self).allowed(context)
def nav(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return True
dashboard.Admin.register(Ironic)
<commit_msg>Use permissions attribute to detect ironic service
Horizon implements a logic to enable/disable panel by permissions
defined in each panel class. This change replaces the current redundant
logic by that built-in feature to simplify how we define requirements
of the Ironic panels.
Change-Id: I4a9dabfea79c23155fb8986fe8386202d7474485<commit_after>
|
# Copyright 2016 Cisco Systems, Inc.
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin', 'openstack.services.baremetal',)
dashboard.Admin.register(Ironic)
|
# Copyright 2016 Cisco Systems, Inc.
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.api import base
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin',)
def allowed(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return super(Ironic, self).allowed(context)
def nav(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return True
dashboard.Admin.register(Ironic)
Use permissions attribute to detect ironic service
Horizon implements a logic to enable/disable panel by permissions
defined in each panel class. This change replaces the current redundant
logic by that built-in feature to simplify how we define requirements
of the Ironic panels.
Change-Id: I4a9dabfea79c23155fb8986fe8386202d7474485# Copyright 2016 Cisco Systems, Inc.
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin', 'openstack.services.baremetal',)
dashboard.Admin.register(Ironic)
|
<commit_before># Copyright 2016 Cisco Systems, Inc.
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.api import base
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin',)
def allowed(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return super(Ironic, self).allowed(context)
def nav(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return True
dashboard.Admin.register(Ironic)
<commit_msg>Use permissions attribute to detect ironic service
Horizon implements a logic to enable/disable panel by permissions
defined in each panel class. This change replaces the current redundant
logic by that built-in feature to simplify how we define requirements
of the Ironic panels.
Change-Id: I4a9dabfea79c23155fb8986fe8386202d7474485<commit_after># Copyright 2016 Cisco Systems, Inc.
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin', 'openstack.services.baremetal',)
dashboard.Admin.register(Ironic)
|
058882a1d0e4ac458fe8cab972010e17c248ee81
|
wate/views.py
|
wate/views.py
|
from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = ""
# First, print the header
for item in header:
retval += ( item + ", " )
retval += ( "<br>"*2 )
# Now print each user
for user in users:
for item in user:
retval += ( str(item) + " " )
retval += "<br>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
|
from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = '<table border="1">'
# First, print the header
retval += '<tr>'
for item in header:
retval += "<th>{}</th>".format(item)
retval += '</tr>'
# Now print each user
for user in users:
retval += "<tr>"
for item in user:
retval += "<td>{}</td>".format(item)
retval += "</tr>"
retval += "</table>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
|
Make a table for the front page
|
Make a table for the front page
|
Python
|
mit
|
jamesmunns/wate,jamesmunns/wate,jamesmunns/wate
|
from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = ""
# First, print the header
for item in header:
retval += ( item + ", " )
retval += ( "<br>"*2 )
# Now print each user
for user in users:
for item in user:
retval += ( str(item) + " " )
retval += "<br>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
Make a table for the front page
|
from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = '<table border="1">'
# First, print the header
retval += '<tr>'
for item in header:
retval += "<th>{}</th>".format(item)
retval += '</tr>'
# Now print each user
for user in users:
retval += "<tr>"
for item in user:
retval += "<td>{}</td>".format(item)
retval += "</tr>"
retval += "</table>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
|
<commit_before>from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = ""
# First, print the header
for item in header:
retval += ( item + ", " )
retval += ( "<br>"*2 )
# Now print each user
for user in users:
for item in user:
retval += ( str(item) + " " )
retval += "<br>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
<commit_msg>Make a table for the front page<commit_after>
|
from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = '<table border="1">'
# First, print the header
retval += '<tr>'
for item in header:
retval += "<th>{}</th>".format(item)
retval += '</tr>'
# Now print each user
for user in users:
retval += "<tr>"
for item in user:
retval += "<td>{}</td>".format(item)
retval += "</tr>"
retval += "</table>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
|
from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = ""
# First, print the header
for item in header:
retval += ( item + ", " )
retval += ( "<br>"*2 )
# Now print each user
for user in users:
for item in user:
retval += ( str(item) + " " )
retval += "<br>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
Make a table for the front pagefrom wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = '<table border="1">'
# First, print the header
retval += '<tr>'
for item in header:
retval += "<th>{}</th>".format(item)
retval += '</tr>'
# Now print each user
for user in users:
retval += "<tr>"
for item in user:
retval += "<td>{}</td>".format(item)
retval += "</tr>"
retval += "</table>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
|
<commit_before>from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = ""
# First, print the header
for item in header:
retval += ( item + ", " )
retval += ( "<br>"*2 )
# Now print each user
for user in users:
for item in user:
retval += ( str(item) + " " )
retval += "<br>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
<commit_msg>Make a table for the front page<commit_after>from wate import app
import db_ops
@app.route('/')
def index():
users = db_ops.users_everything_get()
header = db_ops.COMPLETE_USER_SCHEMA
retval = '<table border="1">'
# First, print the header
retval += '<tr>'
for item in header:
retval += "<th>{}</th>".format(item)
retval += '</tr>'
# Now print each user
for user in users:
retval += "<tr>"
for item in user:
retval += "<td>{}</td>".format(item)
retval += "</tr>"
retval += "</table>"
return retval
#@app.route('/user/<username>')
#def user_data(username=None):
|
935043dda123a030130571a2a4bb45b2b13f145c
|
addons/website_quote/__manifest__.py
|
addons/website_quote/__manifest__.py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail', 'sale_payment'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
Revert "[FIX] website_quote: make 'Pay & Confirm' work without website_sale"
|
Revert "[FIX] website_quote: make 'Pay & Confirm' work without website_sale"
No dependency change in stable version
This reverts commit 65a589eb54a1421baa71074701bea2873a83c75f.
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail', 'sale_payment'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
Revert "[FIX] website_quote: make 'Pay & Confirm' work without website_sale"
No dependency change in stable version
This reverts commit 65a589eb54a1421baa71074701bea2873a83c75f.
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail', 'sale_payment'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
<commit_msg>Revert "[FIX] website_quote: make 'Pay & Confirm' work without website_sale"
No dependency change in stable version
This reverts commit 65a589eb54a1421baa71074701bea2873a83c75f.<commit_after>
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail', 'sale_payment'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
Revert "[FIX] website_quote: make 'Pay & Confirm' work without website_sale"
No dependency change in stable version
This reverts commit 65a589eb54a1421baa71074701bea2873a83c75f.# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail', 'sale_payment'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
<commit_msg>Revert "[FIX] website_quote: make 'Pay & Confirm' work without website_sale"
No dependency change in stable version
This reverts commit 65a589eb54a1421baa71074701bea2873a83c75f.<commit_after># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
c295e3644eb6a49f953c6d7bb346000b5e673c89
|
badgekit_webhooks/views.py
|
badgekit_webhooks/views.py
|
from __future__ import unicode_literals
import datetime
from django.http import HttpResponse, HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
import models
import json
def hello(request):
return HttpResponse("Hello, world. Badges!!!")
@require_POST
@csrf_exempt
def badge_issued_hook(request):
# TODO validate Authorization header
try:
data = json.loads(request.body.decode(request.encoding or 'utf-8'))
expected_keys = set(['action', 'uid', 'email', 'assertionUrl', 'issuedOn'])
if type(data) != dict or set(data.keys()) != expected_keys:
return HttpResponseBadRequest("Unexpected or Missing Fields")
data['issuedOn'] = datetime.datetime.fromtimestamp(data['issuedOn'])
del data['action']
obj = models.BadgeInstanceNotification.objects.create(**data)
obj.full_clean() # throws ValidationError if fields are bad.
obj.save()
except (ValueError, TypeError, ValidationError) as e:
return HttpResponseBadRequest("Bad JSON request: %s" % e.message)
return HttpResponse(json.dumps({"status": "ok"}), content_type="application/json")
|
from __future__ import unicode_literals
import datetime
from django.http import HttpResponse, HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from . import models
import json
def hello(request):
return HttpResponse("Hello, world. Badges!!!")
@require_POST
@csrf_exempt
def badge_issued_hook(request):
# TODO validate Authorization header
try:
data = json.loads(request.body.decode(request.encoding or 'utf-8'))
expected_keys = set(['action', 'uid', 'email', 'assertionUrl', 'issuedOn'])
if type(data) != dict or set(data.keys()) != expected_keys:
return HttpResponseBadRequest("Unexpected or Missing Fields")
data['issuedOn'] = datetime.datetime.fromtimestamp(data['issuedOn'])
del data['action']
obj = models.BadgeInstanceNotification.objects.create(**data)
obj.full_clean() # throws ValidationError if fields are bad.
obj.save()
except (ValueError, TypeError, ValidationError) as e:
return HttpResponseBadRequest("Bad JSON request: %s" % e.message)
return HttpResponse(json.dumps({"status": "ok"}), content_type="application/json")
|
Fix relative import for py3
|
Fix relative import for py3
|
Python
|
mit
|
tgs/django-badgekit-webhooks
|
from __future__ import unicode_literals
import datetime
from django.http import HttpResponse, HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
import models
import json
def hello(request):
return HttpResponse("Hello, world. Badges!!!")
@require_POST
@csrf_exempt
def badge_issued_hook(request):
# TODO validate Authorization header
try:
data = json.loads(request.body.decode(request.encoding or 'utf-8'))
expected_keys = set(['action', 'uid', 'email', 'assertionUrl', 'issuedOn'])
if type(data) != dict or set(data.keys()) != expected_keys:
return HttpResponseBadRequest("Unexpected or Missing Fields")
data['issuedOn'] = datetime.datetime.fromtimestamp(data['issuedOn'])
del data['action']
obj = models.BadgeInstanceNotification.objects.create(**data)
obj.full_clean() # throws ValidationError if fields are bad.
obj.save()
except (ValueError, TypeError, ValidationError) as e:
return HttpResponseBadRequest("Bad JSON request: %s" % e.message)
return HttpResponse(json.dumps({"status": "ok"}), content_type="application/json")
Fix relative import for py3
|
from __future__ import unicode_literals
import datetime
from django.http import HttpResponse, HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from . import models
import json
def hello(request):
return HttpResponse("Hello, world. Badges!!!")
@require_POST
@csrf_exempt
def badge_issued_hook(request):
# TODO validate Authorization header
try:
data = json.loads(request.body.decode(request.encoding or 'utf-8'))
expected_keys = set(['action', 'uid', 'email', 'assertionUrl', 'issuedOn'])
if type(data) != dict or set(data.keys()) != expected_keys:
return HttpResponseBadRequest("Unexpected or Missing Fields")
data['issuedOn'] = datetime.datetime.fromtimestamp(data['issuedOn'])
del data['action']
obj = models.BadgeInstanceNotification.objects.create(**data)
obj.full_clean() # throws ValidationError if fields are bad.
obj.save()
except (ValueError, TypeError, ValidationError) as e:
return HttpResponseBadRequest("Bad JSON request: %s" % e.message)
return HttpResponse(json.dumps({"status": "ok"}), content_type="application/json")
|
<commit_before>from __future__ import unicode_literals
import datetime
from django.http import HttpResponse, HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
import models
import json
def hello(request):
return HttpResponse("Hello, world. Badges!!!")
@require_POST
@csrf_exempt
def badge_issued_hook(request):
# TODO validate Authorization header
try:
data = json.loads(request.body.decode(request.encoding or 'utf-8'))
expected_keys = set(['action', 'uid', 'email', 'assertionUrl', 'issuedOn'])
if type(data) != dict or set(data.keys()) != expected_keys:
return HttpResponseBadRequest("Unexpected or Missing Fields")
data['issuedOn'] = datetime.datetime.fromtimestamp(data['issuedOn'])
del data['action']
obj = models.BadgeInstanceNotification.objects.create(**data)
obj.full_clean() # throws ValidationError if fields are bad.
obj.save()
except (ValueError, TypeError, ValidationError) as e:
return HttpResponseBadRequest("Bad JSON request: %s" % e.message)
return HttpResponse(json.dumps({"status": "ok"}), content_type="application/json")
<commit_msg>Fix relative import for py3<commit_after>
|
from __future__ import unicode_literals
import datetime
from django.http import HttpResponse, HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from . import models
import json
def hello(request):
return HttpResponse("Hello, world. Badges!!!")
@require_POST
@csrf_exempt
def badge_issued_hook(request):
# TODO validate Authorization header
try:
data = json.loads(request.body.decode(request.encoding or 'utf-8'))
expected_keys = set(['action', 'uid', 'email', 'assertionUrl', 'issuedOn'])
if type(data) != dict or set(data.keys()) != expected_keys:
return HttpResponseBadRequest("Unexpected or Missing Fields")
data['issuedOn'] = datetime.datetime.fromtimestamp(data['issuedOn'])
del data['action']
obj = models.BadgeInstanceNotification.objects.create(**data)
obj.full_clean() # throws ValidationError if fields are bad.
obj.save()
except (ValueError, TypeError, ValidationError) as e:
return HttpResponseBadRequest("Bad JSON request: %s" % e.message)
return HttpResponse(json.dumps({"status": "ok"}), content_type="application/json")
|
from __future__ import unicode_literals
import datetime
from django.http import HttpResponse, HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
import models
import json
def hello(request):
return HttpResponse("Hello, world. Badges!!!")
@require_POST
@csrf_exempt
def badge_issued_hook(request):
# TODO validate Authorization header
try:
data = json.loads(request.body.decode(request.encoding or 'utf-8'))
expected_keys = set(['action', 'uid', 'email', 'assertionUrl', 'issuedOn'])
if type(data) != dict or set(data.keys()) != expected_keys:
return HttpResponseBadRequest("Unexpected or Missing Fields")
data['issuedOn'] = datetime.datetime.fromtimestamp(data['issuedOn'])
del data['action']
obj = models.BadgeInstanceNotification.objects.create(**data)
obj.full_clean() # throws ValidationError if fields are bad.
obj.save()
except (ValueError, TypeError, ValidationError) as e:
return HttpResponseBadRequest("Bad JSON request: %s" % e.message)
return HttpResponse(json.dumps({"status": "ok"}), content_type="application/json")
Fix relative import for py3from __future__ import unicode_literals
import datetime
from django.http import HttpResponse, HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from . import models
import json
def hello(request):
return HttpResponse("Hello, world. Badges!!!")
@require_POST
@csrf_exempt
def badge_issued_hook(request):
# TODO validate Authorization header
try:
data = json.loads(request.body.decode(request.encoding or 'utf-8'))
expected_keys = set(['action', 'uid', 'email', 'assertionUrl', 'issuedOn'])
if type(data) != dict or set(data.keys()) != expected_keys:
return HttpResponseBadRequest("Unexpected or Missing Fields")
data['issuedOn'] = datetime.datetime.fromtimestamp(data['issuedOn'])
del data['action']
obj = models.BadgeInstanceNotification.objects.create(**data)
obj.full_clean() # throws ValidationError if fields are bad.
obj.save()
except (ValueError, TypeError, ValidationError) as e:
return HttpResponseBadRequest("Bad JSON request: %s" % e.message)
return HttpResponse(json.dumps({"status": "ok"}), content_type="application/json")
|
<commit_before>from __future__ import unicode_literals
import datetime
from django.http import HttpResponse, HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
import models
import json
def hello(request):
return HttpResponse("Hello, world. Badges!!!")
@require_POST
@csrf_exempt
def badge_issued_hook(request):
# TODO validate Authorization header
try:
data = json.loads(request.body.decode(request.encoding or 'utf-8'))
expected_keys = set(['action', 'uid', 'email', 'assertionUrl', 'issuedOn'])
if type(data) != dict or set(data.keys()) != expected_keys:
return HttpResponseBadRequest("Unexpected or Missing Fields")
data['issuedOn'] = datetime.datetime.fromtimestamp(data['issuedOn'])
del data['action']
obj = models.BadgeInstanceNotification.objects.create(**data)
obj.full_clean() # throws ValidationError if fields are bad.
obj.save()
except (ValueError, TypeError, ValidationError) as e:
return HttpResponseBadRequest("Bad JSON request: %s" % e.message)
return HttpResponse(json.dumps({"status": "ok"}), content_type="application/json")
<commit_msg>Fix relative import for py3<commit_after>from __future__ import unicode_literals
import datetime
from django.http import HttpResponse, HttpResponseBadRequest
from django.core.exceptions import ValidationError
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from . import models
import json
def hello(request):
return HttpResponse("Hello, world. Badges!!!")
@require_POST
@csrf_exempt
def badge_issued_hook(request):
# TODO validate Authorization header
try:
data = json.loads(request.body.decode(request.encoding or 'utf-8'))
expected_keys = set(['action', 'uid', 'email', 'assertionUrl', 'issuedOn'])
if type(data) != dict or set(data.keys()) != expected_keys:
return HttpResponseBadRequest("Unexpected or Missing Fields")
data['issuedOn'] = datetime.datetime.fromtimestamp(data['issuedOn'])
del data['action']
obj = models.BadgeInstanceNotification.objects.create(**data)
obj.full_clean() # throws ValidationError if fields are bad.
obj.save()
except (ValueError, TypeError, ValidationError) as e:
return HttpResponseBadRequest("Bad JSON request: %s" % e.message)
return HttpResponse(json.dumps({"status": "ok"}), content_type="application/json")
|
0985890e76596495ee83c67d7e4dfa5d6996cf06
|
test_bert_trainer.py
|
test_bert_trainer.py
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
Fix minor function call naming bug
|
Fix minor function call naming bug
|
Python
|
apache-2.0
|
googleinterns/smart-news-query-embeddings,googleinterns/smart-news-query-embeddings
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
Fix minor function call naming bug
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix minor function call naming bug<commit_after>
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
Fix minor function call naming bugimport unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix minor function call naming bug<commit_after>import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
e0ef570c072bbb170a21b460f8422a63293b9983
|
regressiontests/userapp/admin.py
|
regressiontests/userapp/admin.py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown.seo.admin import register_seo_admin, get_inline
from django.contrib import admin
from userapp.seo import Coverage, WithSites
register_seo_admin(admin.site, Coverage)
register_seo_admin(admin.site, WithSites)
from userapp.models import Product, Page, Category, Tag, NoPath
class WithMetadataAdmin(admin.ModelAdmin):
inlines = [get_inline(Coverage), get_inline(WithSites)]
admin.site.register(Product, admin.ModelAdmin)
admin.site.register(Page, admin.ModelAdmin)
admin.site.register(Tag, WithMetadataAdmin)
admin.site.register(NoPath, WithMetadataAdmin)
alternative_site = admin.AdminSite()
#from rollyourown.seo.admin import auto_register_inlines
#alternative_site.register(Tag)
#auto_register_inlines(Coverage, alternative_site)
#alternative_site.register(Page)
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown.seo.admin import register_seo_admin, get_inline
from django.contrib import admin
from userapp.seo import Coverage, WithSites
register_seo_admin(admin.site, Coverage)
register_seo_admin(admin.site, WithSites)
from userapp.models import Product, Page, Category, Tag, NoPath
class WithMetadataAdmin(admin.ModelAdmin):
inlines = [get_inline(Coverage), get_inline(WithSites)]
admin.site.register(Product, admin.ModelAdmin)
admin.site.register(Page, admin.ModelAdmin)
admin.site.register(Tag, WithMetadataAdmin)
admin.site.register(NoPath, WithMetadataAdmin)
alternative_site = admin.AdminSite()
#from rollyourown.seo.admin import auto_register_inlines
alternative_site.register(Tag)
#auto_register_inlines(Coverage, alternative_site)
alternative_site.register(Page)
|
Test now fails properly without feature.
|
Test now fails properly without feature.
|
Python
|
bsd-3-clause
|
AlexLSB/django-seo,nikhila05/django-seo,nikhila05/django-seo,MicroPyramid/django-seo,nimoism/django-seo,annikaC/django-seo,willhardy/django-seo,romansalin/django-seo2,asfaltboy/django-seo,tangochin/django-seo,winzard/django-seo2,whyflyru/django-seo,vintasoftware/django-seo,winzard/django-seo,AlexLSB/django-seo,winzard/django-seo,wx-ast/django-seo,romansalin/django-seo,annikaC/django-seo,tangochin/django-seo,winzard/django-seo2,wx-ast/django-seo,whyflyru/django-seo,asfaltboy/django-seo,vintasoftware/django-seo,nimoism/django-seo,shirishagaddi/django-seo,romansalin/django-seo,sandow-digital/django-seo,romansalin/django-seo2,shirishagaddi/django-seo
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown.seo.admin import register_seo_admin, get_inline
from django.contrib import admin
from userapp.seo import Coverage, WithSites
register_seo_admin(admin.site, Coverage)
register_seo_admin(admin.site, WithSites)
from userapp.models import Product, Page, Category, Tag, NoPath
class WithMetadataAdmin(admin.ModelAdmin):
inlines = [get_inline(Coverage), get_inline(WithSites)]
admin.site.register(Product, admin.ModelAdmin)
admin.site.register(Page, admin.ModelAdmin)
admin.site.register(Tag, WithMetadataAdmin)
admin.site.register(NoPath, WithMetadataAdmin)
alternative_site = admin.AdminSite()
#from rollyourown.seo.admin import auto_register_inlines
#alternative_site.register(Tag)
#auto_register_inlines(Coverage, alternative_site)
#alternative_site.register(Page)
Test now fails properly without feature.
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown.seo.admin import register_seo_admin, get_inline
from django.contrib import admin
from userapp.seo import Coverage, WithSites
register_seo_admin(admin.site, Coverage)
register_seo_admin(admin.site, WithSites)
from userapp.models import Product, Page, Category, Tag, NoPath
class WithMetadataAdmin(admin.ModelAdmin):
inlines = [get_inline(Coverage), get_inline(WithSites)]
admin.site.register(Product, admin.ModelAdmin)
admin.site.register(Page, admin.ModelAdmin)
admin.site.register(Tag, WithMetadataAdmin)
admin.site.register(NoPath, WithMetadataAdmin)
alternative_site = admin.AdminSite()
#from rollyourown.seo.admin import auto_register_inlines
alternative_site.register(Tag)
#auto_register_inlines(Coverage, alternative_site)
alternative_site.register(Page)
|
<commit_before>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown.seo.admin import register_seo_admin, get_inline
from django.contrib import admin
from userapp.seo import Coverage, WithSites
register_seo_admin(admin.site, Coverage)
register_seo_admin(admin.site, WithSites)
from userapp.models import Product, Page, Category, Tag, NoPath
class WithMetadataAdmin(admin.ModelAdmin):
inlines = [get_inline(Coverage), get_inline(WithSites)]
admin.site.register(Product, admin.ModelAdmin)
admin.site.register(Page, admin.ModelAdmin)
admin.site.register(Tag, WithMetadataAdmin)
admin.site.register(NoPath, WithMetadataAdmin)
alternative_site = admin.AdminSite()
#from rollyourown.seo.admin import auto_register_inlines
#alternative_site.register(Tag)
#auto_register_inlines(Coverage, alternative_site)
#alternative_site.register(Page)
<commit_msg>Test now fails properly without feature.<commit_after>
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown.seo.admin import register_seo_admin, get_inline
from django.contrib import admin
from userapp.seo import Coverage, WithSites
register_seo_admin(admin.site, Coverage)
register_seo_admin(admin.site, WithSites)
from userapp.models import Product, Page, Category, Tag, NoPath
class WithMetadataAdmin(admin.ModelAdmin):
inlines = [get_inline(Coverage), get_inline(WithSites)]
admin.site.register(Product, admin.ModelAdmin)
admin.site.register(Page, admin.ModelAdmin)
admin.site.register(Tag, WithMetadataAdmin)
admin.site.register(NoPath, WithMetadataAdmin)
alternative_site = admin.AdminSite()
#from rollyourown.seo.admin import auto_register_inlines
alternative_site.register(Tag)
#auto_register_inlines(Coverage, alternative_site)
alternative_site.register(Page)
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown.seo.admin import register_seo_admin, get_inline
from django.contrib import admin
from userapp.seo import Coverage, WithSites
register_seo_admin(admin.site, Coverage)
register_seo_admin(admin.site, WithSites)
from userapp.models import Product, Page, Category, Tag, NoPath
class WithMetadataAdmin(admin.ModelAdmin):
inlines = [get_inline(Coverage), get_inline(WithSites)]
admin.site.register(Product, admin.ModelAdmin)
admin.site.register(Page, admin.ModelAdmin)
admin.site.register(Tag, WithMetadataAdmin)
admin.site.register(NoPath, WithMetadataAdmin)
alternative_site = admin.AdminSite()
#from rollyourown.seo.admin import auto_register_inlines
#alternative_site.register(Tag)
#auto_register_inlines(Coverage, alternative_site)
#alternative_site.register(Page)
Test now fails properly without feature.#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown.seo.admin import register_seo_admin, get_inline
from django.contrib import admin
from userapp.seo import Coverage, WithSites
register_seo_admin(admin.site, Coverage)
register_seo_admin(admin.site, WithSites)
from userapp.models import Product, Page, Category, Tag, NoPath
class WithMetadataAdmin(admin.ModelAdmin):
inlines = [get_inline(Coverage), get_inline(WithSites)]
admin.site.register(Product, admin.ModelAdmin)
admin.site.register(Page, admin.ModelAdmin)
admin.site.register(Tag, WithMetadataAdmin)
admin.site.register(NoPath, WithMetadataAdmin)
alternative_site = admin.AdminSite()
#from rollyourown.seo.admin import auto_register_inlines
alternative_site.register(Tag)
#auto_register_inlines(Coverage, alternative_site)
alternative_site.register(Page)
|
<commit_before>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown.seo.admin import register_seo_admin, get_inline
from django.contrib import admin
from userapp.seo import Coverage, WithSites
register_seo_admin(admin.site, Coverage)
register_seo_admin(admin.site, WithSites)
from userapp.models import Product, Page, Category, Tag, NoPath
class WithMetadataAdmin(admin.ModelAdmin):
inlines = [get_inline(Coverage), get_inline(WithSites)]
admin.site.register(Product, admin.ModelAdmin)
admin.site.register(Page, admin.ModelAdmin)
admin.site.register(Tag, WithMetadataAdmin)
admin.site.register(NoPath, WithMetadataAdmin)
alternative_site = admin.AdminSite()
#from rollyourown.seo.admin import auto_register_inlines
#alternative_site.register(Tag)
#auto_register_inlines(Coverage, alternative_site)
#alternative_site.register(Page)
<commit_msg>Test now fails properly without feature.<commit_after>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from rollyourown.seo.admin import register_seo_admin, get_inline
from django.contrib import admin
from userapp.seo import Coverage, WithSites
register_seo_admin(admin.site, Coverage)
register_seo_admin(admin.site, WithSites)
from userapp.models import Product, Page, Category, Tag, NoPath
class WithMetadataAdmin(admin.ModelAdmin):
inlines = [get_inline(Coverage), get_inline(WithSites)]
admin.site.register(Product, admin.ModelAdmin)
admin.site.register(Page, admin.ModelAdmin)
admin.site.register(Tag, WithMetadataAdmin)
admin.site.register(NoPath, WithMetadataAdmin)
alternative_site = admin.AdminSite()
#from rollyourown.seo.admin import auto_register_inlines
alternative_site.register(Tag)
#auto_register_inlines(Coverage, alternative_site)
alternative_site.register(Page)
|
8b8c7f851b96456e80201295af645066ab8f6fbb
|
contrib/internal/build-media.py
|
contrib/internal/build-media.py
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
scripts_dir = os.path.abspath(os.path.dirname(__file__))
# Source root directory
sys.path.insert(0, os.path.abspath(os.path.join(scripts_dir, '..', '..')))
# Script config directory
sys.path.insert(0, os.path.join(scripts_dir, 'conf'))
from reviewboard.dependencies import django_version
import __main__
__main__.__requires__ = ['Django' + django_version]
import pkg_resources
from django.core.management import call_command
if __name__ == '__main__':
os.putenv('FORCE_BUILD_MEDIA', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
ret = call_command('collectstatic', interactive=False, verbosity=2)
sys.exit(ret)
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
scripts_dir = os.path.abspath(os.path.dirname(__file__))
# Source root directory
sys.path.insert(0, os.path.abspath(os.path.join(scripts_dir, '..', '..')))
# Script config directory
sys.path.insert(0, os.path.join(scripts_dir, 'conf'))
from reviewboard.dependencies import django_version
import __main__
__main__.__requires__ = ['Django' + django_version]
import pkg_resources
from django.core.management import call_command
if __name__ == '__main__':
os.putenv('FORCE_BUILD_MEDIA', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
# This will raise a CommandError or call sys.exit(1) on failure.
call_command('collectstatic', interactive=False, verbosity=2)
|
Fix building static media on Django 1.11.
|
Fix building static media on Django 1.11.
Our wrapper script for building static media attempted to honor the
exit code of the `collectstatic` management command, passing it along to
`sys.exit()` so that we wouldn't have a failure show up as a successful
result.
However, exit codes are never returned. Instead, we were always getting
`None` back, which Python helpfully converts to an exit code of 0. Any
failure would have been an explicit `sys.exit(1)` or a raised exception.
So what we were doing was pointless.
On Django 1.11, though, we actually got a result back: The result of
`stdout`. We were then passing this to `sys.exit()`, which Python was
converting to an exit code of 1, resulting in the command always
failing.
We now just exit normally without trying to be clever and helpful,
letting Django do its own thing.
Testing Done:
Tested building static media on Django 1.6 and 1.11.
Reviewed at https://reviews.reviewboard.org/r/10605/
|
Python
|
mit
|
reviewboard/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,chipx86/reviewboard,reviewboard/reviewboard,chipx86/reviewboard
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
scripts_dir = os.path.abspath(os.path.dirname(__file__))
# Source root directory
sys.path.insert(0, os.path.abspath(os.path.join(scripts_dir, '..', '..')))
# Script config directory
sys.path.insert(0, os.path.join(scripts_dir, 'conf'))
from reviewboard.dependencies import django_version
import __main__
__main__.__requires__ = ['Django' + django_version]
import pkg_resources
from django.core.management import call_command
if __name__ == '__main__':
os.putenv('FORCE_BUILD_MEDIA', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
ret = call_command('collectstatic', interactive=False, verbosity=2)
sys.exit(ret)
Fix building static media on Django 1.11.
Our wrapper script for building static media attempted to honor the
exit code of the `collectstatic` management command, passing it along to
`sys.exit()` so that we wouldn't have a failure show up as a successful
result.
However, exit codes are never returned. Instead, we were always getting
`None` back, which Python helpfully converts to an exit code of 0. Any
failure would have been an explicit `sys.exit(1)` or a raised exception.
So what we were doing was pointless.
On Django 1.11, though, we actually got a result back: The result of
`stdout`. We were then passing this to `sys.exit()`, which Python was
converting to an exit code of 1, resulting in the command always
failing.
We now just exit normally without trying to be clever and helpful,
letting Django do its own thing.
Testing Done:
Tested building static media on Django 1.6 and 1.11.
Reviewed at https://reviews.reviewboard.org/r/10605/
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
scripts_dir = os.path.abspath(os.path.dirname(__file__))
# Source root directory
sys.path.insert(0, os.path.abspath(os.path.join(scripts_dir, '..', '..')))
# Script config directory
sys.path.insert(0, os.path.join(scripts_dir, 'conf'))
from reviewboard.dependencies import django_version
import __main__
__main__.__requires__ = ['Django' + django_version]
import pkg_resources
from django.core.management import call_command
if __name__ == '__main__':
os.putenv('FORCE_BUILD_MEDIA', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
# This will raise a CommandError or call sys.exit(1) on failure.
call_command('collectstatic', interactive=False, verbosity=2)
|
<commit_before>#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
scripts_dir = os.path.abspath(os.path.dirname(__file__))
# Source root directory
sys.path.insert(0, os.path.abspath(os.path.join(scripts_dir, '..', '..')))
# Script config directory
sys.path.insert(0, os.path.join(scripts_dir, 'conf'))
from reviewboard.dependencies import django_version
import __main__
__main__.__requires__ = ['Django' + django_version]
import pkg_resources
from django.core.management import call_command
if __name__ == '__main__':
os.putenv('FORCE_BUILD_MEDIA', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
ret = call_command('collectstatic', interactive=False, verbosity=2)
sys.exit(ret)
<commit_msg>Fix building static media on Django 1.11.
Our wrapper script for building static media attempted to honor the
exit code of the `collectstatic` management command, passing it along to
`sys.exit()` so that we wouldn't have a failure show up as a successful
result.
However, exit codes are never returned. Instead, we were always getting
`None` back, which Python helpfully converts to an exit code of 0. Any
failure would have been an explicit `sys.exit(1)` or a raised exception.
So what we were doing was pointless.
On Django 1.11, though, we actually got a result back: The result of
`stdout`. We were then passing this to `sys.exit()`, which Python was
converting to an exit code of 1, resulting in the command always
failing.
We now just exit normally without trying to be clever and helpful,
letting Django do its own thing.
Testing Done:
Tested building static media on Django 1.6 and 1.11.
Reviewed at https://reviews.reviewboard.org/r/10605/<commit_after>
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
scripts_dir = os.path.abspath(os.path.dirname(__file__))
# Source root directory
sys.path.insert(0, os.path.abspath(os.path.join(scripts_dir, '..', '..')))
# Script config directory
sys.path.insert(0, os.path.join(scripts_dir, 'conf'))
from reviewboard.dependencies import django_version
import __main__
__main__.__requires__ = ['Django' + django_version]
import pkg_resources
from django.core.management import call_command
if __name__ == '__main__':
os.putenv('FORCE_BUILD_MEDIA', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
# This will raise a CommandError or call sys.exit(1) on failure.
call_command('collectstatic', interactive=False, verbosity=2)
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
scripts_dir = os.path.abspath(os.path.dirname(__file__))
# Source root directory
sys.path.insert(0, os.path.abspath(os.path.join(scripts_dir, '..', '..')))
# Script config directory
sys.path.insert(0, os.path.join(scripts_dir, 'conf'))
from reviewboard.dependencies import django_version
import __main__
__main__.__requires__ = ['Django' + django_version]
import pkg_resources
from django.core.management import call_command
if __name__ == '__main__':
os.putenv('FORCE_BUILD_MEDIA', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
ret = call_command('collectstatic', interactive=False, verbosity=2)
sys.exit(ret)
Fix building static media on Django 1.11.
Our wrapper script for building static media attempted to honor the
exit code of the `collectstatic` management command, passing it along to
`sys.exit()` so that we wouldn't have a failure show up as a successful
result.
However, exit codes are never returned. Instead, we were always getting
`None` back, which Python helpfully converts to an exit code of 0. Any
failure would have been an explicit `sys.exit(1)` or a raised exception.
So what we were doing was pointless.
On Django 1.11, though, we actually got a result back: The result of
`stdout`. We were then passing this to `sys.exit()`, which Python was
converting to an exit code of 1, resulting in the command always
failing.
We now just exit normally without trying to be clever and helpful,
letting Django do its own thing.
Testing Done:
Tested building static media on Django 1.6 and 1.11.
Reviewed at https://reviews.reviewboard.org/r/10605/#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
scripts_dir = os.path.abspath(os.path.dirname(__file__))
# Source root directory
sys.path.insert(0, os.path.abspath(os.path.join(scripts_dir, '..', '..')))
# Script config directory
sys.path.insert(0, os.path.join(scripts_dir, 'conf'))
from reviewboard.dependencies import django_version
import __main__
__main__.__requires__ = ['Django' + django_version]
import pkg_resources
from django.core.management import call_command
if __name__ == '__main__':
os.putenv('FORCE_BUILD_MEDIA', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
# This will raise a CommandError or call sys.exit(1) on failure.
call_command('collectstatic', interactive=False, verbosity=2)
|
<commit_before>#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
scripts_dir = os.path.abspath(os.path.dirname(__file__))
# Source root directory
sys.path.insert(0, os.path.abspath(os.path.join(scripts_dir, '..', '..')))
# Script config directory
sys.path.insert(0, os.path.join(scripts_dir, 'conf'))
from reviewboard.dependencies import django_version
import __main__
__main__.__requires__ = ['Django' + django_version]
import pkg_resources
from django.core.management import call_command
if __name__ == '__main__':
os.putenv('FORCE_BUILD_MEDIA', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
ret = call_command('collectstatic', interactive=False, verbosity=2)
sys.exit(ret)
<commit_msg>Fix building static media on Django 1.11.
Our wrapper script for building static media attempted to honor the
exit code of the `collectstatic` management command, passing it along to
`sys.exit()` so that we wouldn't have a failure show up as a successful
result.
However, exit codes are never returned. Instead, we were always getting
`None` back, which Python helpfully converts to an exit code of 0. Any
failure would have been an explicit `sys.exit(1)` or a raised exception.
So what we were doing was pointless.
On Django 1.11, though, we actually got a result back: The result of
`stdout`. We were then passing this to `sys.exit()`, which Python was
converting to an exit code of 1, resulting in the command always
failing.
We now just exit normally without trying to be clever and helpful,
letting Django do its own thing.
Testing Done:
Tested building static media on Django 1.6 and 1.11.
Reviewed at https://reviews.reviewboard.org/r/10605/<commit_after>#!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
scripts_dir = os.path.abspath(os.path.dirname(__file__))
# Source root directory
sys.path.insert(0, os.path.abspath(os.path.join(scripts_dir, '..', '..')))
# Script config directory
sys.path.insert(0, os.path.join(scripts_dir, 'conf'))
from reviewboard.dependencies import django_version
import __main__
__main__.__requires__ = ['Django' + django_version]
import pkg_resources
from django.core.management import call_command
if __name__ == '__main__':
os.putenv('FORCE_BUILD_MEDIA', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reviewboard.settings')
# This will raise a CommandError or call sys.exit(1) on failure.
call_command('collectstatic', interactive=False, verbosity=2)
|
9acf278a0a20262174e68829f9725731771e2601
|
example/app.py
|
example/app.py
|
from flask import Flask, render_template
from flask_gears import Gears
from gears_stylus import StylusCompiler
from gears_clean_css import CleanCSSCompressor
app = Flask(__name__)
gears = Gears(
compilers={'.styl': StylusCompiler.as_handler()},
compressors={'text/css': CleanCSSCompressor.as_handler()},
)
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
|
from flask import Flask, render_template
from flask.ext.gears import Gears
from gears_stylus import StylusCompiler
from gears_clean_css import CleanCSSCompressor
app = Flask(__name__)
gears = Gears(
compilers={'.styl': StylusCompiler.as_handler()},
compressors={'text/css': CleanCSSCompressor.as_handler()},
)
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
|
Use the preferred import style
|
Example: Use the preferred import style
[Using Flask Extensions](http://flask.readthedocs.org/en/latest/extensions/#using-extensions) suggests to import like ``from flask.ext import foo``.
|
Python
|
isc
|
gears/flask-gears
|
from flask import Flask, render_template
from flask_gears import Gears
from gears_stylus import StylusCompiler
from gears_clean_css import CleanCSSCompressor
app = Flask(__name__)
gears = Gears(
compilers={'.styl': StylusCompiler.as_handler()},
compressors={'text/css': CleanCSSCompressor.as_handler()},
)
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
Example: Use the preferred import style
[Using Flask Extensions](http://flask.readthedocs.org/en/latest/extensions/#using-extensions) suggests to import like ``from flask.ext import foo``.
|
from flask import Flask, render_template
from flask.ext.gears import Gears
from gears_stylus import StylusCompiler
from gears_clean_css import CleanCSSCompressor
app = Flask(__name__)
gears = Gears(
compilers={'.styl': StylusCompiler.as_handler()},
compressors={'text/css': CleanCSSCompressor.as_handler()},
)
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before>from flask import Flask, render_template
from flask_gears import Gears
from gears_stylus import StylusCompiler
from gears_clean_css import CleanCSSCompressor
app = Flask(__name__)
gears = Gears(
compilers={'.styl': StylusCompiler.as_handler()},
compressors={'text/css': CleanCSSCompressor.as_handler()},
)
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Example: Use the preferred import style
[Using Flask Extensions](http://flask.readthedocs.org/en/latest/extensions/#using-extensions) suggests to import like ``from flask.ext import foo``.<commit_after>
|
from flask import Flask, render_template
from flask.ext.gears import Gears
from gears_stylus import StylusCompiler
from gears_clean_css import CleanCSSCompressor
app = Flask(__name__)
gears = Gears(
compilers={'.styl': StylusCompiler.as_handler()},
compressors={'text/css': CleanCSSCompressor.as_handler()},
)
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
|
from flask import Flask, render_template
from flask_gears import Gears
from gears_stylus import StylusCompiler
from gears_clean_css import CleanCSSCompressor
app = Flask(__name__)
gears = Gears(
compilers={'.styl': StylusCompiler.as_handler()},
compressors={'text/css': CleanCSSCompressor.as_handler()},
)
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
Example: Use the preferred import style
[Using Flask Extensions](http://flask.readthedocs.org/en/latest/extensions/#using-extensions) suggests to import like ``from flask.ext import foo``.from flask import Flask, render_template
from flask.ext.gears import Gears
from gears_stylus import StylusCompiler
from gears_clean_css import CleanCSSCompressor
app = Flask(__name__)
gears = Gears(
compilers={'.styl': StylusCompiler.as_handler()},
compressors={'text/css': CleanCSSCompressor.as_handler()},
)
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before>from flask import Flask, render_template
from flask_gears import Gears
from gears_stylus import StylusCompiler
from gears_clean_css import CleanCSSCompressor
app = Flask(__name__)
gears = Gears(
compilers={'.styl': StylusCompiler.as_handler()},
compressors={'text/css': CleanCSSCompressor.as_handler()},
)
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Example: Use the preferred import style
[Using Flask Extensions](http://flask.readthedocs.org/en/latest/extensions/#using-extensions) suggests to import like ``from flask.ext import foo``.<commit_after>from flask import Flask, render_template
from flask.ext.gears import Gears
from gears_stylus import StylusCompiler
from gears_clean_css import CleanCSSCompressor
app = Flask(__name__)
gears = Gears(
compilers={'.styl': StylusCompiler.as_handler()},
compressors={'text/css': CleanCSSCompressor.as_handler()},
)
gears.init_app(app)
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
|
a6eaf7d4b43e1bb3177e4eb0e3e288db2d419020
|
halo/_utils.py
|
halo/_utils.py
|
# -*- coding: utf-8 -*-
"""Utilities for Halo library.
"""
import platform
import six
import codecs
from colorama import init, Fore
from termcolor import colored
init(autoreset=True)
def is_supported():
"""Check whether operating system supports main symbols or not.
Returns
-------
boolean
Whether operating system supports main symbols or not
"""
os_arch = platform.system()
if os_arch != 'Windows':
return True
return False
def colored_frame(frame, color):
"""Color the frame with given color and returns.
Parameters
----------
frame : str
Frame to be colored
color : str
Color to be applied
Returns
-------
str
Colored frame
"""
return colored(frame, color, attrs=['bold'])
def is_text_type(text):
"""Check if given parameter is a string or not
Parameters
----------
text : *
Parameter to be checked for text type
Returns
-------
bool
Whether parameter is a string or not
"""
if isinstance(text, six.text_type) or isinstance(text, six.string_types):
return True
return False
def decode_utf_8_text(text):
"""Decode the text from utf-8 format
Parameters
----------
text : str
String to be decoded
Returns
-------
str
Decoded string
"""
try:
return codecs.decode(text, 'utf-8')
except:
return text
|
# -*- coding: utf-8 -*-
"""Utilities for Halo library.
"""
import platform
import six
import codecs
import shutil
from colorama import init, Fore
from termcolor import colored
init(autoreset=True)
def is_supported():
"""Check whether operating system supports main symbols or not.
Returns
-------
boolean
Whether operating system supports main symbols or not
"""
os_arch = platform.system()
if os_arch != 'Windows':
return True
return False
def colored_frame(frame, color):
"""Color the frame with given color and returns.
Parameters
----------
frame : str
Frame to be colored
color : str
Color to be applied
Returns
-------
str
Colored frame
"""
return colored(frame, color, attrs=['bold'])
def is_text_type(text):
"""Check if given parameter is a string or not
Parameters
----------
text : *
Parameter to be checked for text type
Returns
-------
bool
Whether parameter is a string or not
"""
if isinstance(text, six.text_type) or isinstance(text, six.string_types):
return True
return False
def decode_utf_8_text(text):
"""Decode the text from utf-8 format
Parameters
----------
text : str
String to be decoded
Returns
-------
str
Decoded string
"""
try:
return codecs.decode(text, 'utf-8')
except:
return text
def get_terminal_size():
return shutil.get_terminal_size()
|
Add shutil dependency to get the terminal size
|
Add shutil dependency to get the terminal size
|
Python
|
mit
|
manrajgrover/halo,ManrajGrover/halo
|
# -*- coding: utf-8 -*-
"""Utilities for Halo library.
"""
import platform
import six
import codecs
from colorama import init, Fore
from termcolor import colored
init(autoreset=True)
def is_supported():
"""Check whether operating system supports main symbols or not.
Returns
-------
boolean
Whether operating system supports main symbols or not
"""
os_arch = platform.system()
if os_arch != 'Windows':
return True
return False
def colored_frame(frame, color):
"""Color the frame with given color and returns.
Parameters
----------
frame : str
Frame to be colored
color : str
Color to be applied
Returns
-------
str
Colored frame
"""
return colored(frame, color, attrs=['bold'])
def is_text_type(text):
"""Check if given parameter is a string or not
Parameters
----------
text : *
Parameter to be checked for text type
Returns
-------
bool
Whether parameter is a string or not
"""
if isinstance(text, six.text_type) or isinstance(text, six.string_types):
return True
return False
def decode_utf_8_text(text):
"""Decode the text from utf-8 format
Parameters
----------
text : str
String to be decoded
Returns
-------
str
Decoded string
"""
try:
return codecs.decode(text, 'utf-8')
except:
return text
Add shutil dependency to get the terminal size
|
# -*- coding: utf-8 -*-
"""Utilities for Halo library.
"""
import platform
import six
import codecs
import shutil
from colorama import init, Fore
from termcolor import colored
init(autoreset=True)
def is_supported():
"""Check whether operating system supports main symbols or not.
Returns
-------
boolean
Whether operating system supports main symbols or not
"""
os_arch = platform.system()
if os_arch != 'Windows':
return True
return False
def colored_frame(frame, color):
"""Color the frame with given color and returns.
Parameters
----------
frame : str
Frame to be colored
color : str
Color to be applied
Returns
-------
str
Colored frame
"""
return colored(frame, color, attrs=['bold'])
def is_text_type(text):
"""Check if given parameter is a string or not
Parameters
----------
text : *
Parameter to be checked for text type
Returns
-------
bool
Whether parameter is a string or not
"""
if isinstance(text, six.text_type) or isinstance(text, six.string_types):
return True
return False
def decode_utf_8_text(text):
"""Decode the text from utf-8 format
Parameters
----------
text : str
String to be decoded
Returns
-------
str
Decoded string
"""
try:
return codecs.decode(text, 'utf-8')
except:
return text
def get_terminal_size():
return shutil.get_terminal_size()
|
<commit_before># -*- coding: utf-8 -*-
"""Utilities for Halo library.
"""
import platform
import six
import codecs
from colorama import init, Fore
from termcolor import colored
init(autoreset=True)
def is_supported():
"""Check whether operating system supports main symbols or not.
Returns
-------
boolean
Whether operating system supports main symbols or not
"""
os_arch = platform.system()
if os_arch != 'Windows':
return True
return False
def colored_frame(frame, color):
"""Color the frame with given color and returns.
Parameters
----------
frame : str
Frame to be colored
color : str
Color to be applied
Returns
-------
str
Colored frame
"""
return colored(frame, color, attrs=['bold'])
def is_text_type(text):
"""Check if given parameter is a string or not
Parameters
----------
text : *
Parameter to be checked for text type
Returns
-------
bool
Whether parameter is a string or not
"""
if isinstance(text, six.text_type) or isinstance(text, six.string_types):
return True
return False
def decode_utf_8_text(text):
"""Decode the text from utf-8 format
Parameters
----------
text : str
String to be decoded
Returns
-------
str
Decoded string
"""
try:
return codecs.decode(text, 'utf-8')
except:
return text
<commit_msg>Add shutil dependency to get the terminal size<commit_after>
|
# -*- coding: utf-8 -*-
"""Utilities for Halo library.
"""
import platform
import six
import codecs
import shutil
from colorama import init, Fore
from termcolor import colored
init(autoreset=True)
def is_supported():
"""Check whether operating system supports main symbols or not.
Returns
-------
boolean
Whether operating system supports main symbols or not
"""
os_arch = platform.system()
if os_arch != 'Windows':
return True
return False
def colored_frame(frame, color):
"""Color the frame with given color and returns.
Parameters
----------
frame : str
Frame to be colored
color : str
Color to be applied
Returns
-------
str
Colored frame
"""
return colored(frame, color, attrs=['bold'])
def is_text_type(text):
"""Check if given parameter is a string or not
Parameters
----------
text : *
Parameter to be checked for text type
Returns
-------
bool
Whether parameter is a string or not
"""
if isinstance(text, six.text_type) or isinstance(text, six.string_types):
return True
return False
def decode_utf_8_text(text):
"""Decode the text from utf-8 format
Parameters
----------
text : str
String to be decoded
Returns
-------
str
Decoded string
"""
try:
return codecs.decode(text, 'utf-8')
except:
return text
def get_terminal_size():
return shutil.get_terminal_size()
|
# -*- coding: utf-8 -*-
"""Utilities for Halo library.
"""
import platform
import six
import codecs
from colorama import init, Fore
from termcolor import colored
init(autoreset=True)
def is_supported():
"""Check whether operating system supports main symbols or not.
Returns
-------
boolean
Whether operating system supports main symbols or not
"""
os_arch = platform.system()
if os_arch != 'Windows':
return True
return False
def colored_frame(frame, color):
"""Color the frame with given color and returns.
Parameters
----------
frame : str
Frame to be colored
color : str
Color to be applied
Returns
-------
str
Colored frame
"""
return colored(frame, color, attrs=['bold'])
def is_text_type(text):
"""Check if given parameter is a string or not
Parameters
----------
text : *
Parameter to be checked for text type
Returns
-------
bool
Whether parameter is a string or not
"""
if isinstance(text, six.text_type) or isinstance(text, six.string_types):
return True
return False
def decode_utf_8_text(text):
"""Decode the text from utf-8 format
Parameters
----------
text : str
String to be decoded
Returns
-------
str
Decoded string
"""
try:
return codecs.decode(text, 'utf-8')
except:
return text
Add shutil dependency to get the terminal size# -*- coding: utf-8 -*-
"""Utilities for Halo library.
"""
import platform
import six
import codecs
import shutil
from colorama import init, Fore
from termcolor import colored
init(autoreset=True)
def is_supported():
"""Check whether operating system supports main symbols or not.
Returns
-------
boolean
Whether operating system supports main symbols or not
"""
os_arch = platform.system()
if os_arch != 'Windows':
return True
return False
def colored_frame(frame, color):
"""Color the frame with given color and returns.
Parameters
----------
frame : str
Frame to be colored
color : str
Color to be applied
Returns
-------
str
Colored frame
"""
return colored(frame, color, attrs=['bold'])
def is_text_type(text):
"""Check if given parameter is a string or not
Parameters
----------
text : *
Parameter to be checked for text type
Returns
-------
bool
Whether parameter is a string or not
"""
if isinstance(text, six.text_type) or isinstance(text, six.string_types):
return True
return False
def decode_utf_8_text(text):
"""Decode the text from utf-8 format
Parameters
----------
text : str
String to be decoded
Returns
-------
str
Decoded string
"""
try:
return codecs.decode(text, 'utf-8')
except:
return text
def get_terminal_size():
return shutil.get_terminal_size()
|
<commit_before># -*- coding: utf-8 -*-
"""Utilities for Halo library.
"""
import platform
import six
import codecs
from colorama import init, Fore
from termcolor import colored
init(autoreset=True)
def is_supported():
"""Check whether operating system supports main symbols or not.
Returns
-------
boolean
Whether operating system supports main symbols or not
"""
os_arch = platform.system()
if os_arch != 'Windows':
return True
return False
def colored_frame(frame, color):
"""Color the frame with given color and returns.
Parameters
----------
frame : str
Frame to be colored
color : str
Color to be applied
Returns
-------
str
Colored frame
"""
return colored(frame, color, attrs=['bold'])
def is_text_type(text):
"""Check if given parameter is a string or not
Parameters
----------
text : *
Parameter to be checked for text type
Returns
-------
bool
Whether parameter is a string or not
"""
if isinstance(text, six.text_type) or isinstance(text, six.string_types):
return True
return False
def decode_utf_8_text(text):
"""Decode the text from utf-8 format
Parameters
----------
text : str
String to be decoded
Returns
-------
str
Decoded string
"""
try:
return codecs.decode(text, 'utf-8')
except:
return text
<commit_msg>Add shutil dependency to get the terminal size<commit_after># -*- coding: utf-8 -*-
"""Utilities for Halo library.
"""
import platform
import six
import codecs
import shutil
from colorama import init, Fore
from termcolor import colored
init(autoreset=True)
def is_supported():
"""Check whether operating system supports main symbols or not.
Returns
-------
boolean
Whether operating system supports main symbols or not
"""
os_arch = platform.system()
if os_arch != 'Windows':
return True
return False
def colored_frame(frame, color):
"""Color the frame with given color and returns.
Parameters
----------
frame : str
Frame to be colored
color : str
Color to be applied
Returns
-------
str
Colored frame
"""
return colored(frame, color, attrs=['bold'])
def is_text_type(text):
"""Check if given parameter is a string or not
Parameters
----------
text : *
Parameter to be checked for text type
Returns
-------
bool
Whether parameter is a string or not
"""
if isinstance(text, six.text_type) or isinstance(text, six.string_types):
return True
return False
def decode_utf_8_text(text):
"""Decode the text from utf-8 format
Parameters
----------
text : str
String to be decoded
Returns
-------
str
Decoded string
"""
try:
return codecs.decode(text, 'utf-8')
except:
return text
def get_terminal_size():
return shutil.get_terminal_size()
|
9fa562a413900252acd27d6f1b90055df2e95fe2
|
tests/test_apply.py
|
tests/test_apply.py
|
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_local(self):
g = {'AttrSettings': AttrSettings}
cbs.apply('AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_local(self):
self.assertRaises(ValueError, cbs.apply, 'LocalSettings', {})
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
Test all the code paths
|
Test all the code paths
|
Python
|
bsd-2-clause
|
ar45/django-classy-settings,pombredanne/django-classy-settings,tysonclugg/django-classy-settings,funkybob/django-classy-settings
|
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
Test all the code paths
|
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_local(self):
g = {'AttrSettings': AttrSettings}
cbs.apply('AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_local(self):
self.assertRaises(ValueError, cbs.apply, 'LocalSettings', {})
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
<commit_before>import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
<commit_msg>Test all the code paths<commit_after>
|
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_local(self):
g = {'AttrSettings': AttrSettings}
cbs.apply('AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_local(self):
self.assertRaises(ValueError, cbs.apply, 'LocalSettings', {})
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
Test all the code pathsimport unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_local(self):
g = {'AttrSettings': AttrSettings}
cbs.apply('AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_local(self):
self.assertRaises(ValueError, cbs.apply, 'LocalSettings', {})
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
<commit_before>import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
<commit_msg>Test all the code paths<commit_after>import unittest
import cbs
class AttrSettings():
PROJECT_NAME = 'fancy_project'
class MethodSettings():
def PROJECT_NAME(self):
return 'fancy_project'
class TestApply(unittest.TestCase):
def test_apply_settings_attr(self):
g = {}
cbs.apply(AttrSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_method(self):
g = {}
cbs.apply(MethodSettings, g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_local(self):
g = {'AttrSettings': AttrSettings}
cbs.apply('AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_string_reference(self):
g = {}
cbs.apply(__name__ + '.AttrSettings', g)
self.assertEqual(g['PROJECT_NAME'], 'fancy_project')
def test_apply_settings_invalid_string_local(self):
self.assertRaises(ValueError, cbs.apply, 'LocalSettings', {})
def test_apply_settings_invalid_string_reference(self):
self.assertRaises(ImportError, cbs.apply, 'invalid.Class', {})
|
6cc1e7ca79b8730cfd5e0db71dd19aae9848e3d2
|
mownfish/db/api.py
|
mownfish/db/api.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Ethan Zhang<http://github.com/Ethan-Zhang>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tornado.options import define, options
import tornadoasyncmemcache
define("db_addr_list", type=list, default=['192.168.0.176:19803'])
class MemcachedClient(object):
@staticmethod
def instance():
if not hasattr(MemcachedClient, "_instance"):
MemcachedClient._instance = \
tornadoasyncmemcache.ClientPool(options.db_addr_list,
maxclients=100)
return MemcachedClient._instance
def get(key, callback):
MemcachedClient.instance().get(key, callback=callback)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Ethan Zhang<http://github.com/Ethan-Zhang>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tornado.options import define, options
import tornadoasyncmemcache
define("db_addr_list", type=list, default=['192.168.0.176:19803'])
class MemcachedClient(object):
def __new__(cls):
if not hasattr(cls, '_instance'):
cls._instance = \
tornadoasyncmemcache.ClientPool(options.db_addr_list,
maxclients=100)
return cls._instance
def get_memcached(key, callback):
MemcachedClient.instance().get(key, callback=callback)
|
Modify db client to __new__
|
Modify db client to __new__
change db singlton from instance() staticmethod to __new__()
|
Python
|
apache-2.0
|
Ethan-Zhang/mownfish
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Ethan Zhang<http://github.com/Ethan-Zhang>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tornado.options import define, options
import tornadoasyncmemcache
define("db_addr_list", type=list, default=['192.168.0.176:19803'])
class MemcachedClient(object):
@staticmethod
def instance():
if not hasattr(MemcachedClient, "_instance"):
MemcachedClient._instance = \
tornadoasyncmemcache.ClientPool(options.db_addr_list,
maxclients=100)
return MemcachedClient._instance
def get(key, callback):
MemcachedClient.instance().get(key, callback=callback)
Modify db client to __new__
change db singlton from instance() staticmethod to __new__()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Ethan Zhang<http://github.com/Ethan-Zhang>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tornado.options import define, options
import tornadoasyncmemcache
define("db_addr_list", type=list, default=['192.168.0.176:19803'])
class MemcachedClient(object):
def __new__(cls):
if not hasattr(cls, '_instance'):
cls._instance = \
tornadoasyncmemcache.ClientPool(options.db_addr_list,
maxclients=100)
return cls._instance
def get_memcached(key, callback):
MemcachedClient.instance().get(key, callback=callback)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Ethan Zhang<http://github.com/Ethan-Zhang>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tornado.options import define, options
import tornadoasyncmemcache
define("db_addr_list", type=list, default=['192.168.0.176:19803'])
class MemcachedClient(object):
@staticmethod
def instance():
if not hasattr(MemcachedClient, "_instance"):
MemcachedClient._instance = \
tornadoasyncmemcache.ClientPool(options.db_addr_list,
maxclients=100)
return MemcachedClient._instance
def get(key, callback):
MemcachedClient.instance().get(key, callback=callback)
<commit_msg>Modify db client to __new__
change db singlton from instance() staticmethod to __new__()<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Ethan Zhang<http://github.com/Ethan-Zhang>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tornado.options import define, options
import tornadoasyncmemcache
define("db_addr_list", type=list, default=['192.168.0.176:19803'])
class MemcachedClient(object):
def __new__(cls):
if not hasattr(cls, '_instance'):
cls._instance = \
tornadoasyncmemcache.ClientPool(options.db_addr_list,
maxclients=100)
return cls._instance
def get_memcached(key, callback):
MemcachedClient.instance().get(key, callback=callback)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Ethan Zhang<http://github.com/Ethan-Zhang>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tornado.options import define, options
import tornadoasyncmemcache
define("db_addr_list", type=list, default=['192.168.0.176:19803'])
class MemcachedClient(object):
@staticmethod
def instance():
if not hasattr(MemcachedClient, "_instance"):
MemcachedClient._instance = \
tornadoasyncmemcache.ClientPool(options.db_addr_list,
maxclients=100)
return MemcachedClient._instance
def get(key, callback):
MemcachedClient.instance().get(key, callback=callback)
Modify db client to __new__
change db singlton from instance() staticmethod to __new__()#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Ethan Zhang<http://github.com/Ethan-Zhang>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tornado.options import define, options
import tornadoasyncmemcache
define("db_addr_list", type=list, default=['192.168.0.176:19803'])
class MemcachedClient(object):
def __new__(cls):
if not hasattr(cls, '_instance'):
cls._instance = \
tornadoasyncmemcache.ClientPool(options.db_addr_list,
maxclients=100)
return cls._instance
def get_memcached(key, callback):
MemcachedClient.instance().get(key, callback=callback)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Ethan Zhang<http://github.com/Ethan-Zhang>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tornado.options import define, options
import tornadoasyncmemcache
define("db_addr_list", type=list, default=['192.168.0.176:19803'])
class MemcachedClient(object):
@staticmethod
def instance():
if not hasattr(MemcachedClient, "_instance"):
MemcachedClient._instance = \
tornadoasyncmemcache.ClientPool(options.db_addr_list,
maxclients=100)
return MemcachedClient._instance
def get(key, callback):
MemcachedClient.instance().get(key, callback=callback)
<commit_msg>Modify db client to __new__
change db singlton from instance() staticmethod to __new__()<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Ethan Zhang<http://github.com/Ethan-Zhang>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tornado.options import define, options
import tornadoasyncmemcache
define("db_addr_list", type=list, default=['192.168.0.176:19803'])
class MemcachedClient(object):
def __new__(cls):
if not hasattr(cls, '_instance'):
cls._instance = \
tornadoasyncmemcache.ClientPool(options.db_addr_list,
maxclients=100)
return cls._instance
def get_memcached(key, callback):
MemcachedClient.instance().get(key, callback=callback)
|
158b37b1bd45eb1f554386e4866820296f8ea537
|
metal/label_model/lm_defaults.py
|
metal/label_model/lm_defaults.py
|
lm_default_config = {
### GENERAL
'seed': None,
'verbose': True,
'show_plots': True,
### TRAIN
'train_config': {
# Classifier
# Class balance (if learn_class_balance=False, fix to class_balance)
'learn_class_balance': False,
# Class balance initialization / prior
'class_balance_init': None, # (array) If None, assume uniform
# Model params initialization / priors
'mu_init': 0.4,
# L2 regularization (around prior values)
'l2': 0.01,
# Optimizer
'optimizer_config': {
'optimizer': 'sgd',
'optimizer_common': {
'lr': 0.01,
},
# Optimizer - SGD
'sgd_config': {
'momentum': 0.9,
},
},
# Scheduler
'scheduler_config': {
'scheduler': None,
},
# Checkpointer
'checkpoint': False,
# Train loop
'n_epochs': 100,
'print_every': 10,
},
}
|
lm_default_config = {
### GENERAL
'seed': None,
'verbose': True,
'show_plots': True,
### TRAIN
'train_config': {
# Classifier
# Class balance (if learn_class_balance=False, fix to class_balance)
'learn_class_balance': False,
# Class balance initialization / prior
'class_balance_init': None, # (array) If None, assume uniform
# Model params initialization / priors
'mu_init': 0.4,
# Optimizer
'optimizer_config': {
'optimizer': 'sgd',
'optimizer_common': {
'lr': 0.01,
},
# Optimizer - SGD
'sgd_config': {
'momentum': 0.9,
},
},
# Scheduler
'scheduler_config': {
'scheduler': None,
},
# Checkpointer
'checkpoint': False,
# Train loop
'n_epochs': 100,
'print_every': 10,
},
}
|
Remove l2 from lm_default_config since it is currently unused
|
Remove l2 from lm_default_config since it is currently unused
|
Python
|
apache-2.0
|
HazyResearch/metal,HazyResearch/metal
|
lm_default_config = {
### GENERAL
'seed': None,
'verbose': True,
'show_plots': True,
### TRAIN
'train_config': {
# Classifier
# Class balance (if learn_class_balance=False, fix to class_balance)
'learn_class_balance': False,
# Class balance initialization / prior
'class_balance_init': None, # (array) If None, assume uniform
# Model params initialization / priors
'mu_init': 0.4,
# L2 regularization (around prior values)
'l2': 0.01,
# Optimizer
'optimizer_config': {
'optimizer': 'sgd',
'optimizer_common': {
'lr': 0.01,
},
# Optimizer - SGD
'sgd_config': {
'momentum': 0.9,
},
},
# Scheduler
'scheduler_config': {
'scheduler': None,
},
# Checkpointer
'checkpoint': False,
# Train loop
'n_epochs': 100,
'print_every': 10,
},
}
Remove l2 from lm_default_config since it is currently unused
|
lm_default_config = {
### GENERAL
'seed': None,
'verbose': True,
'show_plots': True,
### TRAIN
'train_config': {
# Classifier
# Class balance (if learn_class_balance=False, fix to class_balance)
'learn_class_balance': False,
# Class balance initialization / prior
'class_balance_init': None, # (array) If None, assume uniform
# Model params initialization / priors
'mu_init': 0.4,
# Optimizer
'optimizer_config': {
'optimizer': 'sgd',
'optimizer_common': {
'lr': 0.01,
},
# Optimizer - SGD
'sgd_config': {
'momentum': 0.9,
},
},
# Scheduler
'scheduler_config': {
'scheduler': None,
},
# Checkpointer
'checkpoint': False,
# Train loop
'n_epochs': 100,
'print_every': 10,
},
}
|
<commit_before>lm_default_config = {
### GENERAL
'seed': None,
'verbose': True,
'show_plots': True,
### TRAIN
'train_config': {
# Classifier
# Class balance (if learn_class_balance=False, fix to class_balance)
'learn_class_balance': False,
# Class balance initialization / prior
'class_balance_init': None, # (array) If None, assume uniform
# Model params initialization / priors
'mu_init': 0.4,
# L2 regularization (around prior values)
'l2': 0.01,
# Optimizer
'optimizer_config': {
'optimizer': 'sgd',
'optimizer_common': {
'lr': 0.01,
},
# Optimizer - SGD
'sgd_config': {
'momentum': 0.9,
},
},
# Scheduler
'scheduler_config': {
'scheduler': None,
},
# Checkpointer
'checkpoint': False,
# Train loop
'n_epochs': 100,
'print_every': 10,
},
}
<commit_msg>Remove l2 from lm_default_config since it is currently unused<commit_after>
|
lm_default_config = {
### GENERAL
'seed': None,
'verbose': True,
'show_plots': True,
### TRAIN
'train_config': {
# Classifier
# Class balance (if learn_class_balance=False, fix to class_balance)
'learn_class_balance': False,
# Class balance initialization / prior
'class_balance_init': None, # (array) If None, assume uniform
# Model params initialization / priors
'mu_init': 0.4,
# Optimizer
'optimizer_config': {
'optimizer': 'sgd',
'optimizer_common': {
'lr': 0.01,
},
# Optimizer - SGD
'sgd_config': {
'momentum': 0.9,
},
},
# Scheduler
'scheduler_config': {
'scheduler': None,
},
# Checkpointer
'checkpoint': False,
# Train loop
'n_epochs': 100,
'print_every': 10,
},
}
|
lm_default_config = {
### GENERAL
'seed': None,
'verbose': True,
'show_plots': True,
### TRAIN
'train_config': {
# Classifier
# Class balance (if learn_class_balance=False, fix to class_balance)
'learn_class_balance': False,
# Class balance initialization / prior
'class_balance_init': None, # (array) If None, assume uniform
# Model params initialization / priors
'mu_init': 0.4,
# L2 regularization (around prior values)
'l2': 0.01,
# Optimizer
'optimizer_config': {
'optimizer': 'sgd',
'optimizer_common': {
'lr': 0.01,
},
# Optimizer - SGD
'sgd_config': {
'momentum': 0.9,
},
},
# Scheduler
'scheduler_config': {
'scheduler': None,
},
# Checkpointer
'checkpoint': False,
# Train loop
'n_epochs': 100,
'print_every': 10,
},
}
Remove l2 from lm_default_config since it is currently unusedlm_default_config = {
### GENERAL
'seed': None,
'verbose': True,
'show_plots': True,
### TRAIN
'train_config': {
# Classifier
# Class balance (if learn_class_balance=False, fix to class_balance)
'learn_class_balance': False,
# Class balance initialization / prior
'class_balance_init': None, # (array) If None, assume uniform
# Model params initialization / priors
'mu_init': 0.4,
# Optimizer
'optimizer_config': {
'optimizer': 'sgd',
'optimizer_common': {
'lr': 0.01,
},
# Optimizer - SGD
'sgd_config': {
'momentum': 0.9,
},
},
# Scheduler
'scheduler_config': {
'scheduler': None,
},
# Checkpointer
'checkpoint': False,
# Train loop
'n_epochs': 100,
'print_every': 10,
},
}
|
<commit_before>lm_default_config = {
### GENERAL
'seed': None,
'verbose': True,
'show_plots': True,
### TRAIN
'train_config': {
# Classifier
# Class balance (if learn_class_balance=False, fix to class_balance)
'learn_class_balance': False,
# Class balance initialization / prior
'class_balance_init': None, # (array) If None, assume uniform
# Model params initialization / priors
'mu_init': 0.4,
# L2 regularization (around prior values)
'l2': 0.01,
# Optimizer
'optimizer_config': {
'optimizer': 'sgd',
'optimizer_common': {
'lr': 0.01,
},
# Optimizer - SGD
'sgd_config': {
'momentum': 0.9,
},
},
# Scheduler
'scheduler_config': {
'scheduler': None,
},
# Checkpointer
'checkpoint': False,
# Train loop
'n_epochs': 100,
'print_every': 10,
},
}
<commit_msg>Remove l2 from lm_default_config since it is currently unused<commit_after>lm_default_config = {
### GENERAL
'seed': None,
'verbose': True,
'show_plots': True,
### TRAIN
'train_config': {
# Classifier
# Class balance (if learn_class_balance=False, fix to class_balance)
'learn_class_balance': False,
# Class balance initialization / prior
'class_balance_init': None, # (array) If None, assume uniform
# Model params initialization / priors
'mu_init': 0.4,
# Optimizer
'optimizer_config': {
'optimizer': 'sgd',
'optimizer_common': {
'lr': 0.01,
},
# Optimizer - SGD
'sgd_config': {
'momentum': 0.9,
},
},
# Scheduler
'scheduler_config': {
'scheduler': None,
},
# Checkpointer
'checkpoint': False,
# Train loop
'n_epochs': 100,
'print_every': 10,
},
}
|
d33d059821e391fcf34630cfb3ea8d67a0c6ec59
|
tests/test_views.py
|
tests/test_views.py
|
import unittest
from mongows import views
from tests import MongoWSTestCase
class ViewsTestCase(MongoWSTestCase):
def test_hello(self):
rv = self.app.get('/')
self.assertTrue('Hello World!' in rv.data)
|
import unittest
from mongows import views
from tests import MongoWSTestCase
class ViewsTestCase(MongoWSTestCase):
def test_hello(self):
rv = self.app.get('/')
self.assertTrue('Hello World!' in rv.data)
def test_create_mws_resource(self):
url = '/mws'
rv = self.app.post(url)
self.assertTrue('Not yet implemented' in rv.data)
def test_keep_mws_alive(self):
url = '/mws/res_id/keep-alive'
rv = self.app.post(url)
self.assertTrue('Not yet implemented' in rv.data)
def test_db_collection_find(self):
url = '/mws/res_id/db/collection_name/find'
rv = self.app.get(url)
self.assertTrue('Not yet implemented' in rv.data)
|
Add stub unit tests for stub views funcs.
|
Views: Add stub unit tests for stub views funcs.
|
Python
|
apache-2.0
|
ecbtln/mongo-web-shell,xl76/mongo-web-shell,ecbtln/mongo-web-shell,FuegoFro/mongo-web-shell,mongodb-labs/mongo-web-shell,10gen-labs/mongo-web-shell,pilliq/mongo-web-shell,xl76/mongo-web-shell,pilliq/mongo-web-shell,rcchan/mongo-web-shell,mongodb-labs/mongo-web-shell,rcchan/mongo-web-shell,mongodb-labs/mongo-web-shell,mcomella/mongo-web-shell,FuegoFro/mongo-web-shell,lnickers2004/mongo-web-shell,10gen-labs/mongo-web-shell,ecbtln/mongo-web-shell,pilliq/mongo-web-shell,lnickers2004/mongo-web-shell,FuegoFro/mongo-web-shell,lnickers2004/mongo-web-shell,rcchan/mongo-web-shell,10gen-labs/mongo-web-shell,mcomella/mongo-web-shell,mcomella/mongo-web-shell,lnickers2004/mongo-web-shell
|
import unittest
from mongows import views
from tests import MongoWSTestCase
class ViewsTestCase(MongoWSTestCase):
def test_hello(self):
rv = self.app.get('/')
self.assertTrue('Hello World!' in rv.data)
Views: Add stub unit tests for stub views funcs.
|
import unittest
from mongows import views
from tests import MongoWSTestCase
class ViewsTestCase(MongoWSTestCase):
def test_hello(self):
rv = self.app.get('/')
self.assertTrue('Hello World!' in rv.data)
def test_create_mws_resource(self):
url = '/mws'
rv = self.app.post(url)
self.assertTrue('Not yet implemented' in rv.data)
def test_keep_mws_alive(self):
url = '/mws/res_id/keep-alive'
rv = self.app.post(url)
self.assertTrue('Not yet implemented' in rv.data)
def test_db_collection_find(self):
url = '/mws/res_id/db/collection_name/find'
rv = self.app.get(url)
self.assertTrue('Not yet implemented' in rv.data)
|
<commit_before>import unittest
from mongows import views
from tests import MongoWSTestCase
class ViewsTestCase(MongoWSTestCase):
def test_hello(self):
rv = self.app.get('/')
self.assertTrue('Hello World!' in rv.data)
<commit_msg>Views: Add stub unit tests for stub views funcs.<commit_after>
|
import unittest
from mongows import views
from tests import MongoWSTestCase
class ViewsTestCase(MongoWSTestCase):
def test_hello(self):
rv = self.app.get('/')
self.assertTrue('Hello World!' in rv.data)
def test_create_mws_resource(self):
url = '/mws'
rv = self.app.post(url)
self.assertTrue('Not yet implemented' in rv.data)
def test_keep_mws_alive(self):
url = '/mws/res_id/keep-alive'
rv = self.app.post(url)
self.assertTrue('Not yet implemented' in rv.data)
def test_db_collection_find(self):
url = '/mws/res_id/db/collection_name/find'
rv = self.app.get(url)
self.assertTrue('Not yet implemented' in rv.data)
|
import unittest
from mongows import views
from tests import MongoWSTestCase
class ViewsTestCase(MongoWSTestCase):
def test_hello(self):
rv = self.app.get('/')
self.assertTrue('Hello World!' in rv.data)
Views: Add stub unit tests for stub views funcs.import unittest
from mongows import views
from tests import MongoWSTestCase
class ViewsTestCase(MongoWSTestCase):
def test_hello(self):
rv = self.app.get('/')
self.assertTrue('Hello World!' in rv.data)
def test_create_mws_resource(self):
url = '/mws'
rv = self.app.post(url)
self.assertTrue('Not yet implemented' in rv.data)
def test_keep_mws_alive(self):
url = '/mws/res_id/keep-alive'
rv = self.app.post(url)
self.assertTrue('Not yet implemented' in rv.data)
def test_db_collection_find(self):
url = '/mws/res_id/db/collection_name/find'
rv = self.app.get(url)
self.assertTrue('Not yet implemented' in rv.data)
|
<commit_before>import unittest
from mongows import views
from tests import MongoWSTestCase
class ViewsTestCase(MongoWSTestCase):
def test_hello(self):
rv = self.app.get('/')
self.assertTrue('Hello World!' in rv.data)
<commit_msg>Views: Add stub unit tests for stub views funcs.<commit_after>import unittest
from mongows import views
from tests import MongoWSTestCase
class ViewsTestCase(MongoWSTestCase):
def test_hello(self):
rv = self.app.get('/')
self.assertTrue('Hello World!' in rv.data)
def test_create_mws_resource(self):
url = '/mws'
rv = self.app.post(url)
self.assertTrue('Not yet implemented' in rv.data)
def test_keep_mws_alive(self):
url = '/mws/res_id/keep-alive'
rv = self.app.post(url)
self.assertTrue('Not yet implemented' in rv.data)
def test_db_collection_find(self):
url = '/mws/res_id/db/collection_name/find'
rv = self.app.get(url)
self.assertTrue('Not yet implemented' in rv.data)
|
ebbcbed26731a24e02be6e90751a21a04051bb4b
|
tests/test_write.py
|
tests/test_write.py
|
from __future__ import absolute_import
from ofxparse import OfxParser, OfxPrinter
from unittest import TestCase
from io import StringIO
from os import close, remove
from tempfile import mkstemp
import sys
sys.path.append('..')
from .support import open_file
class TestOfxWrite(TestCase):
def test_write(self):
with open_file('fidelity.ofx') as f:
ofx = OfxParser.parse(f)
self.assertEqual(str(ofx), "")
def test_using_ofx_printer(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
fd, name = mkstemp()
close(fd)
printer = OfxPrinter(ofx=ofx, filename=name)
printer.write(tabs=1)
def test_using_ofx_printer_with_stringio(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
output_buffer = StringIO()
printer = OfxPrinter(ofx=ofx, filename=None)
printer.writeToFile(output_buffer, tabs=1)
assert output_buffer.getvalue().startswith("OFXHEADER")
if __name__ == "__main__":
import unittest
unittest.main()
|
from __future__ import absolute_import
from ofxparse import OfxParser, OfxPrinter
from unittest import TestCase
from six import StringIO
from os import close, remove
from tempfile import mkstemp
import sys
sys.path.append('..')
from .support import open_file
class TestOfxWrite(TestCase):
def test_write(self):
with open_file('fidelity.ofx') as f:
ofx = OfxParser.parse(f)
self.assertEqual(str(ofx), "")
def test_using_ofx_printer(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
fd, name = mkstemp()
close(fd)
printer = OfxPrinter(ofx=ofx, filename=name)
printer.write(tabs=1)
def test_using_ofx_printer_with_stringio(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
output_buffer = StringIO()
printer = OfxPrinter(ofx=ofx, filename=None)
printer.writeToFile(output_buffer, tabs=1)
assert output_buffer.getvalue().startswith("OFXHEADER")
if __name__ == "__main__":
import unittest
unittest.main()
|
Fix test_using_ofx_printer_with_stringio for python 2.7
|
Fix test_using_ofx_printer_with_stringio for python 2.7
|
Python
|
mit
|
rdsteed/ofxparse,udibr/ofxparse,jseutter/ofxparse
|
from __future__ import absolute_import
from ofxparse import OfxParser, OfxPrinter
from unittest import TestCase
from io import StringIO
from os import close, remove
from tempfile import mkstemp
import sys
sys.path.append('..')
from .support import open_file
class TestOfxWrite(TestCase):
def test_write(self):
with open_file('fidelity.ofx') as f:
ofx = OfxParser.parse(f)
self.assertEqual(str(ofx), "")
def test_using_ofx_printer(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
fd, name = mkstemp()
close(fd)
printer = OfxPrinter(ofx=ofx, filename=name)
printer.write(tabs=1)
def test_using_ofx_printer_with_stringio(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
output_buffer = StringIO()
printer = OfxPrinter(ofx=ofx, filename=None)
printer.writeToFile(output_buffer, tabs=1)
assert output_buffer.getvalue().startswith("OFXHEADER")
if __name__ == "__main__":
import unittest
unittest.main()
Fix test_using_ofx_printer_with_stringio for python 2.7
|
from __future__ import absolute_import
from ofxparse import OfxParser, OfxPrinter
from unittest import TestCase
from six import StringIO
from os import close, remove
from tempfile import mkstemp
import sys
sys.path.append('..')
from .support import open_file
class TestOfxWrite(TestCase):
def test_write(self):
with open_file('fidelity.ofx') as f:
ofx = OfxParser.parse(f)
self.assertEqual(str(ofx), "")
def test_using_ofx_printer(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
fd, name = mkstemp()
close(fd)
printer = OfxPrinter(ofx=ofx, filename=name)
printer.write(tabs=1)
def test_using_ofx_printer_with_stringio(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
output_buffer = StringIO()
printer = OfxPrinter(ofx=ofx, filename=None)
printer.writeToFile(output_buffer, tabs=1)
assert output_buffer.getvalue().startswith("OFXHEADER")
if __name__ == "__main__":
import unittest
unittest.main()
|
<commit_before>from __future__ import absolute_import
from ofxparse import OfxParser, OfxPrinter
from unittest import TestCase
from io import StringIO
from os import close, remove
from tempfile import mkstemp
import sys
sys.path.append('..')
from .support import open_file
class TestOfxWrite(TestCase):
def test_write(self):
with open_file('fidelity.ofx') as f:
ofx = OfxParser.parse(f)
self.assertEqual(str(ofx), "")
def test_using_ofx_printer(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
fd, name = mkstemp()
close(fd)
printer = OfxPrinter(ofx=ofx, filename=name)
printer.write(tabs=1)
def test_using_ofx_printer_with_stringio(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
output_buffer = StringIO()
printer = OfxPrinter(ofx=ofx, filename=None)
printer.writeToFile(output_buffer, tabs=1)
assert output_buffer.getvalue().startswith("OFXHEADER")
if __name__ == "__main__":
import unittest
unittest.main()
<commit_msg>Fix test_using_ofx_printer_with_stringio for python 2.7<commit_after>
|
from __future__ import absolute_import
from ofxparse import OfxParser, OfxPrinter
from unittest import TestCase
from six import StringIO
from os import close, remove
from tempfile import mkstemp
import sys
sys.path.append('..')
from .support import open_file
class TestOfxWrite(TestCase):
def test_write(self):
with open_file('fidelity.ofx') as f:
ofx = OfxParser.parse(f)
self.assertEqual(str(ofx), "")
def test_using_ofx_printer(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
fd, name = mkstemp()
close(fd)
printer = OfxPrinter(ofx=ofx, filename=name)
printer.write(tabs=1)
def test_using_ofx_printer_with_stringio(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
output_buffer = StringIO()
printer = OfxPrinter(ofx=ofx, filename=None)
printer.writeToFile(output_buffer, tabs=1)
assert output_buffer.getvalue().startswith("OFXHEADER")
if __name__ == "__main__":
import unittest
unittest.main()
|
from __future__ import absolute_import
from ofxparse import OfxParser, OfxPrinter
from unittest import TestCase
from io import StringIO
from os import close, remove
from tempfile import mkstemp
import sys
sys.path.append('..')
from .support import open_file
class TestOfxWrite(TestCase):
def test_write(self):
with open_file('fidelity.ofx') as f:
ofx = OfxParser.parse(f)
self.assertEqual(str(ofx), "")
def test_using_ofx_printer(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
fd, name = mkstemp()
close(fd)
printer = OfxPrinter(ofx=ofx, filename=name)
printer.write(tabs=1)
def test_using_ofx_printer_with_stringio(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
output_buffer = StringIO()
printer = OfxPrinter(ofx=ofx, filename=None)
printer.writeToFile(output_buffer, tabs=1)
assert output_buffer.getvalue().startswith("OFXHEADER")
if __name__ == "__main__":
import unittest
unittest.main()
Fix test_using_ofx_printer_with_stringio for python 2.7from __future__ import absolute_import
from ofxparse import OfxParser, OfxPrinter
from unittest import TestCase
from six import StringIO
from os import close, remove
from tempfile import mkstemp
import sys
sys.path.append('..')
from .support import open_file
class TestOfxWrite(TestCase):
def test_write(self):
with open_file('fidelity.ofx') as f:
ofx = OfxParser.parse(f)
self.assertEqual(str(ofx), "")
def test_using_ofx_printer(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
fd, name = mkstemp()
close(fd)
printer = OfxPrinter(ofx=ofx, filename=name)
printer.write(tabs=1)
def test_using_ofx_printer_with_stringio(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
output_buffer = StringIO()
printer = OfxPrinter(ofx=ofx, filename=None)
printer.writeToFile(output_buffer, tabs=1)
assert output_buffer.getvalue().startswith("OFXHEADER")
if __name__ == "__main__":
import unittest
unittest.main()
|
<commit_before>from __future__ import absolute_import
from ofxparse import OfxParser, OfxPrinter
from unittest import TestCase
from io import StringIO
from os import close, remove
from tempfile import mkstemp
import sys
sys.path.append('..')
from .support import open_file
class TestOfxWrite(TestCase):
def test_write(self):
with open_file('fidelity.ofx') as f:
ofx = OfxParser.parse(f)
self.assertEqual(str(ofx), "")
def test_using_ofx_printer(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
fd, name = mkstemp()
close(fd)
printer = OfxPrinter(ofx=ofx, filename=name)
printer.write(tabs=1)
def test_using_ofx_printer_with_stringio(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
output_buffer = StringIO()
printer = OfxPrinter(ofx=ofx, filename=None)
printer.writeToFile(output_buffer, tabs=1)
assert output_buffer.getvalue().startswith("OFXHEADER")
if __name__ == "__main__":
import unittest
unittest.main()
<commit_msg>Fix test_using_ofx_printer_with_stringio for python 2.7<commit_after>from __future__ import absolute_import
from ofxparse import OfxParser, OfxPrinter
from unittest import TestCase
from six import StringIO
from os import close, remove
from tempfile import mkstemp
import sys
sys.path.append('..')
from .support import open_file
class TestOfxWrite(TestCase):
def test_write(self):
with open_file('fidelity.ofx') as f:
ofx = OfxParser.parse(f)
self.assertEqual(str(ofx), "")
def test_using_ofx_printer(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
fd, name = mkstemp()
close(fd)
printer = OfxPrinter(ofx=ofx, filename=name)
printer.write(tabs=1)
def test_using_ofx_printer_with_stringio(self):
with open_file('checking.ofx') as f:
ofx = OfxParser.parse(f)
output_buffer = StringIO()
printer = OfxPrinter(ofx=ofx, filename=None)
printer.writeToFile(output_buffer, tabs=1)
assert output_buffer.getvalue().startswith("OFXHEADER")
if __name__ == "__main__":
import unittest
unittest.main()
|
925fefdcdaf32123a9ed4ed2b038bcb11269d77d
|
main/appengine_config.py
|
main/appengine_config.py
|
# coding: utf-8
import os
import sys
sys.path.insert(0, 'libx')
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.insert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
|
# coding: utf-8
import os
import sys
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.insert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
|
Remove duplicate libx path insertion
|
Remove duplicate libx path insertion
|
Python
|
mit
|
gae-init/gae-init-babel,lipis/life-line,lipis/life-line,mdxs/gae-init-babel,gae-init/gae-init-babel,gae-init/gae-init-babel,mdxs/gae-init-babel,gae-init/gae-init-babel,mdxs/gae-init-babel,lipis/life-line
|
# coding: utf-8
import os
import sys
sys.path.insert(0, 'libx')
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.insert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
Remove duplicate libx path insertion
|
# coding: utf-8
import os
import sys
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.insert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
|
<commit_before># coding: utf-8
import os
import sys
sys.path.insert(0, 'libx')
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.insert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
<commit_msg>Remove duplicate libx path insertion<commit_after>
|
# coding: utf-8
import os
import sys
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.insert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
|
# coding: utf-8
import os
import sys
sys.path.insert(0, 'libx')
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.insert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
Remove duplicate libx path insertion# coding: utf-8
import os
import sys
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.insert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
|
<commit_before># coding: utf-8
import os
import sys
sys.path.insert(0, 'libx')
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.insert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
<commit_msg>Remove duplicate libx path insertion<commit_after># coding: utf-8
import os
import sys
if os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
sys.path.insert(0, 'lib.zip')
else:
import re
from google.appengine.tools.devappserver2.python import stubs
re_ = stubs.FakeFile._skip_files.pattern.replace('|^lib/.*', '')
re_ = re.compile(re_)
stubs.FakeFile._skip_files = re_
sys.path.insert(0, 'lib')
sys.path.insert(0, 'libx')
|
412f432cd09d39970171630666bab5fd2cc89924
|
o3d/installer/win/o3d_version.py
|
o3d/installer/win/o3d_version.py
|
#!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 1)
sdk_version = plugin_version
|
#!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 2)
sdk_version = plugin_version
|
Bump version to turn on SET_MAX_FPS.
|
Bump version to turn on SET_MAX_FPS.
Review URL: http://codereview.chromium.org/1875002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@46239 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
gavinp/chromium,gavinp/chromium,adobe/chromium,Crystalnix/house-of-life-chromium,adobe/chromium,adobe/chromium,gavinp/chromium,gavinp/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,adobe/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,ropik/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,adobe/chromium,ropik/chromium,ropik/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,adobe/chromium,gavinp/chromium,adobe/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,gavinp/chromium,yitian134/chromium,ropik/chromium,yitian134/chromium,adobe/chromium,ropik/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,ropik/chromium,adobe/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,yitian134/chromium,ropik/chromium,ropik/chromium,ropik/chromium,yitian134/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium
|
#!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 1)
sdk_version = plugin_version
Bump version to turn on SET_MAX_FPS.
Review URL: http://codereview.chromium.org/1875002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@46239 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 2)
sdk_version = plugin_version
|
<commit_before>#!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 1)
sdk_version = plugin_version
<commit_msg>Bump version to turn on SET_MAX_FPS.
Review URL: http://codereview.chromium.org/1875002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@46239 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 2)
sdk_version = plugin_version
|
#!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 1)
sdk_version = plugin_version
Bump version to turn on SET_MAX_FPS.
Review URL: http://codereview.chromium.org/1875002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@46239 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 2)
sdk_version = plugin_version
|
<commit_before>#!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 1)
sdk_version = plugin_version
<commit_msg>Bump version to turn on SET_MAX_FPS.
Review URL: http://codereview.chromium.org/1875002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@46239 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/python2.4
# Copyright 2008-9 Google Inc. All Rights Reserved.
# version = (major, minor, trunk, patch)
plugin_version = (0, 1, 43, 2)
sdk_version = plugin_version
|
62451e8c5b3d93409fa4bcc7ec29827be6253e88
|
website/registries/utils.py
|
website/registries/utils.py
|
REG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'registered_report': 'Registered Report Protocol Preregistration',
}
def get_campaign_schema(campaign):
from osf.models import RegistrationSchema
if campaign not in REG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(REG_CAMPAIGNS.keys())))
schema_name = REG_CAMPAIGNS[campaign]
return RegistrationSchema.objects.get(name=schema_name, schema_version=2)
def drafts_for_user(user, campaign=None):
from osf.models import DraftRegistration, Node
from guardian.shortcuts import get_objects_for_user
if not user or user.is_anonymous:
return None
node_qs = get_objects_for_user(user, 'admin_node', Node, with_superuser=False).exclude(is_deleted=True)
if campaign:
drafts = DraftRegistration.objects.filter(
registration_schema=get_campaign_schema(campaign),
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=list(node_qs),
initiator=user
)
else:
drafts = DraftRegistration.objects.filter(
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=list(node_qs),
initiator=user
)
return drafts
|
REG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'registered_report': 'Registered Report Protocol Preregistration',
}
def get_campaign_schema(campaign):
from osf.models import RegistrationSchema
if campaign not in REG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(REG_CAMPAIGNS.keys())))
schema_name = REG_CAMPAIGNS[campaign]
return RegistrationSchema.objects.get(name=schema_name, schema_version=2)
def drafts_for_user(user, campaign=None):
from osf.models import DraftRegistration, Node
from guardian.shortcuts import get_objects_for_user
if not user or user.is_anonymous:
return None
node_qs = get_objects_for_user(user, 'admin_node', Node, with_superuser=False).exclude(is_deleted=True).values_list('id', flat=True)
drafts = DraftRegistration.objects.filter(
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=node_qs,
initiator=user
)
if campaign:
drafts = drafts.filter(
registration_schema=get_campaign_schema(campaign),
)
return drafts
|
Speed up draft registrations query.
|
Speed up draft registrations query.
|
Python
|
apache-2.0
|
baylee-d/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,adlius/osf.io,mattclark/osf.io,felliott/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,baylee-d/osf.io,mattclark/osf.io,brianjgeiger/osf.io,mattclark/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,adlius/osf.io,cslzchen/osf.io,aaxelb/osf.io,aaxelb/osf.io,felliott/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,mfraezz/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,aaxelb/osf.io,mfraezz/osf.io,felliott/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,adlius/osf.io
|
REG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'registered_report': 'Registered Report Protocol Preregistration',
}
def get_campaign_schema(campaign):
from osf.models import RegistrationSchema
if campaign not in REG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(REG_CAMPAIGNS.keys())))
schema_name = REG_CAMPAIGNS[campaign]
return RegistrationSchema.objects.get(name=schema_name, schema_version=2)
def drafts_for_user(user, campaign=None):
from osf.models import DraftRegistration, Node
from guardian.shortcuts import get_objects_for_user
if not user or user.is_anonymous:
return None
node_qs = get_objects_for_user(user, 'admin_node', Node, with_superuser=False).exclude(is_deleted=True)
if campaign:
drafts = DraftRegistration.objects.filter(
registration_schema=get_campaign_schema(campaign),
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=list(node_qs),
initiator=user
)
else:
drafts = DraftRegistration.objects.filter(
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=list(node_qs),
initiator=user
)
return drafts
Speed up draft registrations query.
|
REG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'registered_report': 'Registered Report Protocol Preregistration',
}
def get_campaign_schema(campaign):
from osf.models import RegistrationSchema
if campaign not in REG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(REG_CAMPAIGNS.keys())))
schema_name = REG_CAMPAIGNS[campaign]
return RegistrationSchema.objects.get(name=schema_name, schema_version=2)
def drafts_for_user(user, campaign=None):
from osf.models import DraftRegistration, Node
from guardian.shortcuts import get_objects_for_user
if not user or user.is_anonymous:
return None
node_qs = get_objects_for_user(user, 'admin_node', Node, with_superuser=False).exclude(is_deleted=True).values_list('id', flat=True)
drafts = DraftRegistration.objects.filter(
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=node_qs,
initiator=user
)
if campaign:
drafts = drafts.filter(
registration_schema=get_campaign_schema(campaign),
)
return drafts
|
<commit_before>REG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'registered_report': 'Registered Report Protocol Preregistration',
}
def get_campaign_schema(campaign):
from osf.models import RegistrationSchema
if campaign not in REG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(REG_CAMPAIGNS.keys())))
schema_name = REG_CAMPAIGNS[campaign]
return RegistrationSchema.objects.get(name=schema_name, schema_version=2)
def drafts_for_user(user, campaign=None):
from osf.models import DraftRegistration, Node
from guardian.shortcuts import get_objects_for_user
if not user or user.is_anonymous:
return None
node_qs = get_objects_for_user(user, 'admin_node', Node, with_superuser=False).exclude(is_deleted=True)
if campaign:
drafts = DraftRegistration.objects.filter(
registration_schema=get_campaign_schema(campaign),
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=list(node_qs),
initiator=user
)
else:
drafts = DraftRegistration.objects.filter(
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=list(node_qs),
initiator=user
)
return drafts
<commit_msg>Speed up draft registrations query.<commit_after>
|
REG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'registered_report': 'Registered Report Protocol Preregistration',
}
def get_campaign_schema(campaign):
from osf.models import RegistrationSchema
if campaign not in REG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(REG_CAMPAIGNS.keys())))
schema_name = REG_CAMPAIGNS[campaign]
return RegistrationSchema.objects.get(name=schema_name, schema_version=2)
def drafts_for_user(user, campaign=None):
from osf.models import DraftRegistration, Node
from guardian.shortcuts import get_objects_for_user
if not user or user.is_anonymous:
return None
node_qs = get_objects_for_user(user, 'admin_node', Node, with_superuser=False).exclude(is_deleted=True).values_list('id', flat=True)
drafts = DraftRegistration.objects.filter(
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=node_qs,
initiator=user
)
if campaign:
drafts = drafts.filter(
registration_schema=get_campaign_schema(campaign),
)
return drafts
|
REG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'registered_report': 'Registered Report Protocol Preregistration',
}
def get_campaign_schema(campaign):
from osf.models import RegistrationSchema
if campaign not in REG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(REG_CAMPAIGNS.keys())))
schema_name = REG_CAMPAIGNS[campaign]
return RegistrationSchema.objects.get(name=schema_name, schema_version=2)
def drafts_for_user(user, campaign=None):
from osf.models import DraftRegistration, Node
from guardian.shortcuts import get_objects_for_user
if not user or user.is_anonymous:
return None
node_qs = get_objects_for_user(user, 'admin_node', Node, with_superuser=False).exclude(is_deleted=True)
if campaign:
drafts = DraftRegistration.objects.filter(
registration_schema=get_campaign_schema(campaign),
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=list(node_qs),
initiator=user
)
else:
drafts = DraftRegistration.objects.filter(
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=list(node_qs),
initiator=user
)
return drafts
Speed up draft registrations query.REG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'registered_report': 'Registered Report Protocol Preregistration',
}
def get_campaign_schema(campaign):
from osf.models import RegistrationSchema
if campaign not in REG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(REG_CAMPAIGNS.keys())))
schema_name = REG_CAMPAIGNS[campaign]
return RegistrationSchema.objects.get(name=schema_name, schema_version=2)
def drafts_for_user(user, campaign=None):
from osf.models import DraftRegistration, Node
from guardian.shortcuts import get_objects_for_user
if not user or user.is_anonymous:
return None
node_qs = get_objects_for_user(user, 'admin_node', Node, with_superuser=False).exclude(is_deleted=True).values_list('id', flat=True)
drafts = DraftRegistration.objects.filter(
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=node_qs,
initiator=user
)
if campaign:
drafts = drafts.filter(
registration_schema=get_campaign_schema(campaign),
)
return drafts
|
<commit_before>REG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'registered_report': 'Registered Report Protocol Preregistration',
}
def get_campaign_schema(campaign):
from osf.models import RegistrationSchema
if campaign not in REG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(REG_CAMPAIGNS.keys())))
schema_name = REG_CAMPAIGNS[campaign]
return RegistrationSchema.objects.get(name=schema_name, schema_version=2)
def drafts_for_user(user, campaign=None):
from osf.models import DraftRegistration, Node
from guardian.shortcuts import get_objects_for_user
if not user or user.is_anonymous:
return None
node_qs = get_objects_for_user(user, 'admin_node', Node, with_superuser=False).exclude(is_deleted=True)
if campaign:
drafts = DraftRegistration.objects.filter(
registration_schema=get_campaign_schema(campaign),
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=list(node_qs),
initiator=user
)
else:
drafts = DraftRegistration.objects.filter(
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=list(node_qs),
initiator=user
)
return drafts
<commit_msg>Speed up draft registrations query.<commit_after>REG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'registered_report': 'Registered Report Protocol Preregistration',
}
def get_campaign_schema(campaign):
from osf.models import RegistrationSchema
if campaign not in REG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(REG_CAMPAIGNS.keys())))
schema_name = REG_CAMPAIGNS[campaign]
return RegistrationSchema.objects.get(name=schema_name, schema_version=2)
def drafts_for_user(user, campaign=None):
from osf.models import DraftRegistration, Node
from guardian.shortcuts import get_objects_for_user
if not user or user.is_anonymous:
return None
node_qs = get_objects_for_user(user, 'admin_node', Node, with_superuser=False).exclude(is_deleted=True).values_list('id', flat=True)
drafts = DraftRegistration.objects.filter(
approval=None,
registered_node=None,
deleted__isnull=True,
branched_from__in=node_qs,
initiator=user
)
if campaign:
drafts = drafts.filter(
registration_schema=get_campaign_schema(campaign),
)
return drafts
|
c02b09b4f9bf3ed3f8b78d5f65f699407d51d1a2
|
zou/app/models/task_type.py
|
zou/app/models/task_type.py
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(10))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
|
Make task type short field bigger
|
Make task type short field bigger
|
Python
|
agpl-3.0
|
cgwire/zou
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(10))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
Make task type short field bigger
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
|
<commit_before>from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(10))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
<commit_msg>Make task type short field bigger<commit_after>
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(10))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
Make task type short field biggerfrom sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
|
<commit_before>from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(10))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
<commit_msg>Make task type short field bigger<commit_after>from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
|
8afbd0fe7f4732d8484a2a41b91451ec220fc2f8
|
tools/perf/benchmarks/memory.py
|
tools/perf/benchmarks/memory.py
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import memory
class Memory(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import memory
class MemoryTop25(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload2012Q3(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
|
Rename Memory benchmark to avoid conflict with Memory measurement.
|
[telemetry] Rename Memory benchmark to avoid conflict with Memory measurement.
Quick fix for now, but I may need to reconsider how run_measurement resolved name conflicts.
BUG=263511
TEST=None.
R=tonyg@chromium.org
Review URL: https://chromiumcodereview.appspot.com/19915008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@213290 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,jaruba/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,Just-D/chromium-1,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,ChromiumWebApps/chromium,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,anirudhSK/chromium,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,patrickm/chromium.src,Chilledheart/chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,ondra-novak/chromium.src,dednal/chromium.src,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,anirudhSK/chromium,axinging/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,dednal/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,ondra-novak/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,chuan9/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,mogoweb/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,patrickm/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,ltilve/chromium,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,ltilve/chromium,mogoweb/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,anirudhSK/chromium,markYoungH/chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,littlstar/chromium.src,patrickm/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,jaruba/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,littlstar/chromium.src,Chilledheart/chromium,Just-D/chromium-1,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,ChromiumWebApps/chromium,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,ltilve/chromium,anirudhSK/chromium,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,ChromiumWebApps/chromium,markYoungH/chromium.src,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,Just-D/chromium-1,dednal/chromium.src,littlstar/chromium.src,dednal/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,Chilledheart/chromium,jaruba/chromium.src,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,Just-D/chromium-1,littlstar/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,ondra-novak/chromium.src
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import memory
class Memory(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
[telemetry] Rename Memory benchmark to avoid conflict with Memory measurement.
Quick fix for now, but I may need to reconsider how run_measurement resolved name conflicts.
BUG=263511
TEST=None.
R=tonyg@chromium.org
Review URL: https://chromiumcodereview.appspot.com/19915008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@213290 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import memory
class MemoryTop25(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload2012Q3(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
|
<commit_before># Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import memory
class Memory(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
<commit_msg>[telemetry] Rename Memory benchmark to avoid conflict with Memory measurement.
Quick fix for now, but I may need to reconsider how run_measurement resolved name conflicts.
BUG=263511
TEST=None.
R=tonyg@chromium.org
Review URL: https://chromiumcodereview.appspot.com/19915008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@213290 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import memory
class MemoryTop25(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload2012Q3(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import memory
class Memory(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
[telemetry] Rename Memory benchmark to avoid conflict with Memory measurement.
Quick fix for now, but I may need to reconsider how run_measurement resolved name conflicts.
BUG=263511
TEST=None.
R=tonyg@chromium.org
Review URL: https://chromiumcodereview.appspot.com/19915008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@213290 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import memory
class MemoryTop25(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload2012Q3(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
|
<commit_before># Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import memory
class Memory(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
<commit_msg>[telemetry] Rename Memory benchmark to avoid conflict with Memory measurement.
Quick fix for now, but I may need to reconsider how run_measurement resolved name conflicts.
BUG=263511
TEST=None.
R=tonyg@chromium.org
Review URL: https://chromiumcodereview.appspot.com/19915008
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@213290 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import memory
class MemoryTop25(test.Test):
test = memory.Memory
page_set = 'page_sets/top_25.json'
class Reload2012Q3(test.Test):
test = memory.Memory
page_set = 'page_sets/2012Q3.json'
|
39671789613e1811f2282d45a7c8970b0262e5ea
|
mopidy_jukebox/models.py
|
mopidy_jukebox/models.py
|
"""
Models for the Jukebox application
User - All users
Vote - Votes on songs
"""
import logging
from peewee import SqliteDatabase, Model, CharField, DateTimeField, ForeignKeyField
db = SqliteDatabase(None)
logger = logging.getLogger(__name__)
class User(Model):
name = CharField()
@staticmethod
def current():
return User.get(User.name == 'q')
class Meta:
database = db
class Vote(Model):
# references a Mopidy track
track_uri = CharField()
user = ForeignKeyField(User, related_name='voter')
timestamp = DateTimeField()
class Meta:
database = db
def init(db_file):
# Create db
db.init(db_file)
# Create tables
if not Vote.table_exists():
Vote.create_table()
if not User.table_exists():
User.create_table()
# create dummy user
User(name="q").save()
|
"""
Models for the Jukebox application
User - All users
Vote - Votes on songs
"""
import datetime
import logging
from peewee import SqliteDatabase, Model, CharField, DateTimeField, ForeignKeyField, UUIDField
db = SqliteDatabase(None)
logger = logging.getLogger(__name__)
class User(Model):
id = CharField(primary_key=True)
name = CharField()
picture = CharField()
email = CharField()
@staticmethod
def current():
return User.get(User.name == 'q')
class Meta:
database = db
class Vote(Model):
# references a Mopidy track
track_uri = CharField()
user = ForeignKeyField(User, related_name='voter')
timestamp = DateTimeField()
class Meta:
database = db
class Session(Model):
user = ForeignKeyField(User)
secret = UUIDField()
expires = DateTimeField(default=datetime.datetime.now() + datetime.timedelta(days=30)) # expires after 30 days
class Meta:
database = db
def init(db_file):
# Create db
db.init(db_file)
# Create tables
if not Vote.table_exists():
Vote.create_table()
if not User.table_exists():
User.create_table()
if not Session.table_exists():
Session.create_table()
|
Create model for session, add properties for user
|
Create model for session, add properties for user
|
Python
|
mit
|
qurben/mopidy-jukebox,qurben/mopidy-jukebox,qurben/mopidy-jukebox
|
"""
Models for the Jukebox application
User - All users
Vote - Votes on songs
"""
import logging
from peewee import SqliteDatabase, Model, CharField, DateTimeField, ForeignKeyField
db = SqliteDatabase(None)
logger = logging.getLogger(__name__)
class User(Model):
name = CharField()
@staticmethod
def current():
return User.get(User.name == 'q')
class Meta:
database = db
class Vote(Model):
# references a Mopidy track
track_uri = CharField()
user = ForeignKeyField(User, related_name='voter')
timestamp = DateTimeField()
class Meta:
database = db
def init(db_file):
# Create db
db.init(db_file)
# Create tables
if not Vote.table_exists():
Vote.create_table()
if not User.table_exists():
User.create_table()
# create dummy user
User(name="q").save()
Create model for session, add properties for user
|
"""
Models for the Jukebox application
User - All users
Vote - Votes on songs
"""
import datetime
import logging
from peewee import SqliteDatabase, Model, CharField, DateTimeField, ForeignKeyField, UUIDField
db = SqliteDatabase(None)
logger = logging.getLogger(__name__)
class User(Model):
id = CharField(primary_key=True)
name = CharField()
picture = CharField()
email = CharField()
@staticmethod
def current():
return User.get(User.name == 'q')
class Meta:
database = db
class Vote(Model):
# references a Mopidy track
track_uri = CharField()
user = ForeignKeyField(User, related_name='voter')
timestamp = DateTimeField()
class Meta:
database = db
class Session(Model):
user = ForeignKeyField(User)
secret = UUIDField()
expires = DateTimeField(default=datetime.datetime.now() + datetime.timedelta(days=30)) # expires after 30 days
class Meta:
database = db
def init(db_file):
# Create db
db.init(db_file)
# Create tables
if not Vote.table_exists():
Vote.create_table()
if not User.table_exists():
User.create_table()
if not Session.table_exists():
Session.create_table()
|
<commit_before>"""
Models for the Jukebox application
User - All users
Vote - Votes on songs
"""
import logging
from peewee import SqliteDatabase, Model, CharField, DateTimeField, ForeignKeyField
db = SqliteDatabase(None)
logger = logging.getLogger(__name__)
class User(Model):
name = CharField()
@staticmethod
def current():
return User.get(User.name == 'q')
class Meta:
database = db
class Vote(Model):
# references a Mopidy track
track_uri = CharField()
user = ForeignKeyField(User, related_name='voter')
timestamp = DateTimeField()
class Meta:
database = db
def init(db_file):
# Create db
db.init(db_file)
# Create tables
if not Vote.table_exists():
Vote.create_table()
if not User.table_exists():
User.create_table()
# create dummy user
User(name="q").save()
<commit_msg>Create model for session, add properties for user<commit_after>
|
"""
Models for the Jukebox application
User - All users
Vote - Votes on songs
"""
import datetime
import logging
from peewee import SqliteDatabase, Model, CharField, DateTimeField, ForeignKeyField, UUIDField
db = SqliteDatabase(None)
logger = logging.getLogger(__name__)
class User(Model):
id = CharField(primary_key=True)
name = CharField()
picture = CharField()
email = CharField()
@staticmethod
def current():
return User.get(User.name == 'q')
class Meta:
database = db
class Vote(Model):
# references a Mopidy track
track_uri = CharField()
user = ForeignKeyField(User, related_name='voter')
timestamp = DateTimeField()
class Meta:
database = db
class Session(Model):
user = ForeignKeyField(User)
secret = UUIDField()
expires = DateTimeField(default=datetime.datetime.now() + datetime.timedelta(days=30)) # expires after 30 days
class Meta:
database = db
def init(db_file):
# Create db
db.init(db_file)
# Create tables
if not Vote.table_exists():
Vote.create_table()
if not User.table_exists():
User.create_table()
if not Session.table_exists():
Session.create_table()
|
"""
Models for the Jukebox application
User - All users
Vote - Votes on songs
"""
import logging
from peewee import SqliteDatabase, Model, CharField, DateTimeField, ForeignKeyField
db = SqliteDatabase(None)
logger = logging.getLogger(__name__)
class User(Model):
name = CharField()
@staticmethod
def current():
return User.get(User.name == 'q')
class Meta:
database = db
class Vote(Model):
# references a Mopidy track
track_uri = CharField()
user = ForeignKeyField(User, related_name='voter')
timestamp = DateTimeField()
class Meta:
database = db
def init(db_file):
# Create db
db.init(db_file)
# Create tables
if not Vote.table_exists():
Vote.create_table()
if not User.table_exists():
User.create_table()
# create dummy user
User(name="q").save()
Create model for session, add properties for user"""
Models for the Jukebox application
User - All users
Vote - Votes on songs
"""
import datetime
import logging
from peewee import SqliteDatabase, Model, CharField, DateTimeField, ForeignKeyField, UUIDField
db = SqliteDatabase(None)
logger = logging.getLogger(__name__)
class User(Model):
id = CharField(primary_key=True)
name = CharField()
picture = CharField()
email = CharField()
@staticmethod
def current():
return User.get(User.name == 'q')
class Meta:
database = db
class Vote(Model):
# references a Mopidy track
track_uri = CharField()
user = ForeignKeyField(User, related_name='voter')
timestamp = DateTimeField()
class Meta:
database = db
class Session(Model):
user = ForeignKeyField(User)
secret = UUIDField()
expires = DateTimeField(default=datetime.datetime.now() + datetime.timedelta(days=30)) # expires after 30 days
class Meta:
database = db
def init(db_file):
# Create db
db.init(db_file)
# Create tables
if not Vote.table_exists():
Vote.create_table()
if not User.table_exists():
User.create_table()
if not Session.table_exists():
Session.create_table()
|
<commit_before>"""
Models for the Jukebox application
User - All users
Vote - Votes on songs
"""
import logging
from peewee import SqliteDatabase, Model, CharField, DateTimeField, ForeignKeyField
db = SqliteDatabase(None)
logger = logging.getLogger(__name__)
class User(Model):
name = CharField()
@staticmethod
def current():
return User.get(User.name == 'q')
class Meta:
database = db
class Vote(Model):
# references a Mopidy track
track_uri = CharField()
user = ForeignKeyField(User, related_name='voter')
timestamp = DateTimeField()
class Meta:
database = db
def init(db_file):
# Create db
db.init(db_file)
# Create tables
if not Vote.table_exists():
Vote.create_table()
if not User.table_exists():
User.create_table()
# create dummy user
User(name="q").save()
<commit_msg>Create model for session, add properties for user<commit_after>"""
Models for the Jukebox application
User - All users
Vote - Votes on songs
"""
import datetime
import logging
from peewee import SqliteDatabase, Model, CharField, DateTimeField, ForeignKeyField, UUIDField
db = SqliteDatabase(None)
logger = logging.getLogger(__name__)
class User(Model):
id = CharField(primary_key=True)
name = CharField()
picture = CharField()
email = CharField()
@staticmethod
def current():
return User.get(User.name == 'q')
class Meta:
database = db
class Vote(Model):
# references a Mopidy track
track_uri = CharField()
user = ForeignKeyField(User, related_name='voter')
timestamp = DateTimeField()
class Meta:
database = db
class Session(Model):
user = ForeignKeyField(User)
secret = UUIDField()
expires = DateTimeField(default=datetime.datetime.now() + datetime.timedelta(days=30)) # expires after 30 days
class Meta:
database = db
def init(db_file):
# Create db
db.init(db_file)
# Create tables
if not Vote.table_exists():
Vote.create_table()
if not User.table_exists():
User.create_table()
if not Session.table_exists():
Session.create_table()
|
c7650f69e1a1d7ff16e72a741b329b32636a5a3d
|
lizard_apps/views.py
|
lizard_apps/views.py
|
# -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans, see LICENSE.rst.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic import TemplateView
from lizard_apps.models import Screen
class AppScreenView(TemplateView):
content_type = "application/javascript"
template_name = "lizard_apps/script.js"
def screen(self):
return Screen.objects.get(slug=self.kwargs['slug'])
|
# -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans, see LICENSE.rst.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic import TemplateView
from django.shortcuts import get_object_or_404
from lizard_apps.models import Screen
class AppScreenView(TemplateView):
content_type = "application/javascript"
template_name = "lizard_apps/script.js"
def screen(self):
return get_object_or_404(Screen, slug=self.kwargs['slug'])
|
Return 404 instead of incorrect JS file when screen does not exist.
|
Return 404 instead of incorrect JS file when screen does not exist.
|
Python
|
mit
|
lizardsystem/lizard-apps,lizardsystem/lizard-apps
|
# -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans, see LICENSE.rst.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic import TemplateView
from lizard_apps.models import Screen
class AppScreenView(TemplateView):
content_type = "application/javascript"
template_name = "lizard_apps/script.js"
def screen(self):
return Screen.objects.get(slug=self.kwargs['slug'])
Return 404 instead of incorrect JS file when screen does not exist.
|
# -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans, see LICENSE.rst.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic import TemplateView
from django.shortcuts import get_object_or_404
from lizard_apps.models import Screen
class AppScreenView(TemplateView):
content_type = "application/javascript"
template_name = "lizard_apps/script.js"
def screen(self):
return get_object_or_404(Screen, slug=self.kwargs['slug'])
|
<commit_before># -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans, see LICENSE.rst.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic import TemplateView
from lizard_apps.models import Screen
class AppScreenView(TemplateView):
content_type = "application/javascript"
template_name = "lizard_apps/script.js"
def screen(self):
return Screen.objects.get(slug=self.kwargs['slug'])
<commit_msg>Return 404 instead of incorrect JS file when screen does not exist.<commit_after>
|
# -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans, see LICENSE.rst.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic import TemplateView
from django.shortcuts import get_object_or_404
from lizard_apps.models import Screen
class AppScreenView(TemplateView):
content_type = "application/javascript"
template_name = "lizard_apps/script.js"
def screen(self):
return get_object_or_404(Screen, slug=self.kwargs['slug'])
|
# -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans, see LICENSE.rst.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic import TemplateView
from lizard_apps.models import Screen
class AppScreenView(TemplateView):
content_type = "application/javascript"
template_name = "lizard_apps/script.js"
def screen(self):
return Screen.objects.get(slug=self.kwargs['slug'])
Return 404 instead of incorrect JS file when screen does not exist.# -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans, see LICENSE.rst.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic import TemplateView
from django.shortcuts import get_object_or_404
from lizard_apps.models import Screen
class AppScreenView(TemplateView):
content_type = "application/javascript"
template_name = "lizard_apps/script.js"
def screen(self):
return get_object_or_404(Screen, slug=self.kwargs['slug'])
|
<commit_before># -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans, see LICENSE.rst.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic import TemplateView
from lizard_apps.models import Screen
class AppScreenView(TemplateView):
content_type = "application/javascript"
template_name = "lizard_apps/script.js"
def screen(self):
return Screen.objects.get(slug=self.kwargs['slug'])
<commit_msg>Return 404 instead of incorrect JS file when screen does not exist.<commit_after># -*- coding: utf-8 -*-
# (c) Nelen & Schuurmans, see LICENSE.rst.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.views.generic import TemplateView
from django.shortcuts import get_object_or_404
from lizard_apps.models import Screen
class AppScreenView(TemplateView):
content_type = "application/javascript"
template_name = "lizard_apps/script.js"
def screen(self):
return get_object_or_404(Screen, slug=self.kwargs['slug'])
|
360bdaa2df7673bc2090476df077c86c6f7c5633
|
utils/exceptions.py
|
utils/exceptions.py
|
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, *args, code=None, **kwargs):
self.code = code
super().__init__(*args, **kwargs)
|
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, code=None, *args):
self.code = code
super().__init__(code, *args)
|
Change constructor to be more appropriate
|
Change constructor to be more appropriate
|
Python
|
mit
|
BeatButton/beattie-bot,BeatButton/beattie
|
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, *args, code=None, **kwargs):
self.code = code
super().__init__(*args, **kwargs)
Change constructor to be more appropriate
|
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, code=None, *args):
self.code = code
super().__init__(code, *args)
|
<commit_before>class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, *args, code=None, **kwargs):
self.code = code
super().__init__(*args, **kwargs)
<commit_msg>Change constructor to be more appropriate<commit_after>
|
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, code=None, *args):
self.code = code
super().__init__(code, *args)
|
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, *args, code=None, **kwargs):
self.code = code
super().__init__(*args, **kwargs)
Change constructor to be more appropriateclass ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, code=None, *args):
self.code = code
super().__init__(code, *args)
|
<commit_before>class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, *args, code=None, **kwargs):
self.code = code
super().__init__(*args, **kwargs)
<commit_msg>Change constructor to be more appropriate<commit_after>class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, code=None, *args):
self.code = code
super().__init__(code, *args)
|
597ad813ca91a2fe71f96c0843e2631059a88358
|
debug_toolbar/panels/request_vars.py
|
debug_toolbar/panels/request_vars.py
|
from django.template.loader import render_to_string
from debug_toolbar.panels import DebugPanel
class RequestVarsDebugPanel(DebugPanel):
"""
A panel to display request variables (POST/GET, session, cookies).
"""
name = 'RequestVars'
has_content = True
def nav_title(self):
return 'Request Vars'
def title(self):
return 'Request Vars'
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context = {
'get': [(k, self.request.GET.getlist(k)) for k in self.request.GET.iterkeys()],
'post': [(k, self.request.POST.getlist(k)) for k in self.request.POST.iterkeys()],
'session': [(k, self.request.session.get(k)) for k in self.request.session.iterkeys()],
'cookies': [(k, self.request.COOKIES.get(k)) for k in self.request.COOKIES.iterkeys()],
}
return render_to_string('debug_toolbar/panels/request_vars.html', context)
|
from django.template.loader import render_to_string
from debug_toolbar.panels import DebugPanel
class RequestVarsDebugPanel(DebugPanel):
"""
A panel to display request variables (POST/GET, session, cookies).
"""
name = 'RequestVars'
has_content = True
def nav_title(self):
return 'Request Vars'
def title(self):
return 'Request Vars'
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context = {
'get': [(k, self.request.GET.getlist(k)) for k in self.request.GET.iterkeys()],
'post': [(k, self.request.POST.getlist(k)) for k in self.request.POST.iterkeys()],
'cookies': [(k, self.request.COOKIES.get(k)) for k in self.request.COOKIES.iterkeys()],
}
if hasattr(self.request, 'session'):
context['session'] = [(k, self.request.session.get(k)) for k in self.request.session.iterkeys()],
return render_to_string('debug_toolbar/panels/request_vars.html', context)
|
Allow request vars to work even with disabled session middleware.
|
Allow request vars to work even with disabled session middleware.
|
Python
|
bsd-3-clause
|
alex/django-debug-toolbar,Endika/django-debug-toolbar,spookylukey/django-debug-toolbar,stored/django-debug-toolbar,ivelum/django-debug-toolbar,stored/django-debug-toolbar,peap/django-debug-toolbar,ivelum/django-debug-toolbar,ivelum/django-debug-toolbar,spookylukey/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,calvinpy/django-debug-toolbar,Endika/django-debug-toolbar,peap/django-debug-toolbar,lincolnloop/django-debug-logging,calvinpy/django-debug-toolbar,seperman/django-debug-toolbar,guilhermetavares/django-debug-toolbar,tim-schilling/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,sidja/django-debug-toolbar,lamby/pkg-python-django-debug-toolbar,jazzband/django-debug-toolbar,msaelices/django-debug-toolbar,msaelices/django-debug-toolbar,Endika/django-debug-toolbar,guilhermetavares/django-debug-toolbar,megcunningham/django-debug-toolbar,seperman/django-debug-toolbar,stored/django-debug-toolbar,megcunningham/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,binarydud/django-debug-toolbar,barseghyanartur/django-debug-toolbar,megcunningham/django-debug-toolbar,peap/django-debug-toolbar,jazzband/django-debug-toolbar,tim-schilling/django-debug-toolbar,pevzi/django-debug-toolbar,pevzi/django-debug-toolbar,binarydud/django-debug-toolbar,jazzband/django-debug-toolbar,lamby/pkg-python-django-debug-toolbar,spookylukey/django-debug-toolbar,sidja/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,sidja/django-debug-toolbar,seperman/django-debug-toolbar,tim-schilling/django-debug-toolbar,barseghyanartur/django-debug-toolbar,lamby/pkg-python-django-debug-toolbar,pevzi/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,calvinpy/django-debug-toolbar,alex/django-debug-toolbar,lincolnloop/django-debug-logging,barseghyanartur/django-debug-toolbar,guilhermetavares/django-debug-toolbar
|
from django.template.loader import render_to_string
from debug_toolbar.panels import DebugPanel
class RequestVarsDebugPanel(DebugPanel):
"""
A panel to display request variables (POST/GET, session, cookies).
"""
name = 'RequestVars'
has_content = True
def nav_title(self):
return 'Request Vars'
def title(self):
return 'Request Vars'
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context = {
'get': [(k, self.request.GET.getlist(k)) for k in self.request.GET.iterkeys()],
'post': [(k, self.request.POST.getlist(k)) for k in self.request.POST.iterkeys()],
'session': [(k, self.request.session.get(k)) for k in self.request.session.iterkeys()],
'cookies': [(k, self.request.COOKIES.get(k)) for k in self.request.COOKIES.iterkeys()],
}
return render_to_string('debug_toolbar/panels/request_vars.html', context)Allow request vars to work even with disabled session middleware.
|
from django.template.loader import render_to_string
from debug_toolbar.panels import DebugPanel
class RequestVarsDebugPanel(DebugPanel):
"""
A panel to display request variables (POST/GET, session, cookies).
"""
name = 'RequestVars'
has_content = True
def nav_title(self):
return 'Request Vars'
def title(self):
return 'Request Vars'
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context = {
'get': [(k, self.request.GET.getlist(k)) for k in self.request.GET.iterkeys()],
'post': [(k, self.request.POST.getlist(k)) for k in self.request.POST.iterkeys()],
'cookies': [(k, self.request.COOKIES.get(k)) for k in self.request.COOKIES.iterkeys()],
}
if hasattr(self.request, 'session'):
context['session'] = [(k, self.request.session.get(k)) for k in self.request.session.iterkeys()],
return render_to_string('debug_toolbar/panels/request_vars.html', context)
|
<commit_before>from django.template.loader import render_to_string
from debug_toolbar.panels import DebugPanel
class RequestVarsDebugPanel(DebugPanel):
"""
A panel to display request variables (POST/GET, session, cookies).
"""
name = 'RequestVars'
has_content = True
def nav_title(self):
return 'Request Vars'
def title(self):
return 'Request Vars'
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context = {
'get': [(k, self.request.GET.getlist(k)) for k in self.request.GET.iterkeys()],
'post': [(k, self.request.POST.getlist(k)) for k in self.request.POST.iterkeys()],
'session': [(k, self.request.session.get(k)) for k in self.request.session.iterkeys()],
'cookies': [(k, self.request.COOKIES.get(k)) for k in self.request.COOKIES.iterkeys()],
}
return render_to_string('debug_toolbar/panels/request_vars.html', context)<commit_msg>Allow request vars to work even with disabled session middleware.<commit_after>
|
from django.template.loader import render_to_string
from debug_toolbar.panels import DebugPanel
class RequestVarsDebugPanel(DebugPanel):
"""
A panel to display request variables (POST/GET, session, cookies).
"""
name = 'RequestVars'
has_content = True
def nav_title(self):
return 'Request Vars'
def title(self):
return 'Request Vars'
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context = {
'get': [(k, self.request.GET.getlist(k)) for k in self.request.GET.iterkeys()],
'post': [(k, self.request.POST.getlist(k)) for k in self.request.POST.iterkeys()],
'cookies': [(k, self.request.COOKIES.get(k)) for k in self.request.COOKIES.iterkeys()],
}
if hasattr(self.request, 'session'):
context['session'] = [(k, self.request.session.get(k)) for k in self.request.session.iterkeys()],
return render_to_string('debug_toolbar/panels/request_vars.html', context)
|
from django.template.loader import render_to_string
from debug_toolbar.panels import DebugPanel
class RequestVarsDebugPanel(DebugPanel):
"""
A panel to display request variables (POST/GET, session, cookies).
"""
name = 'RequestVars'
has_content = True
def nav_title(self):
return 'Request Vars'
def title(self):
return 'Request Vars'
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context = {
'get': [(k, self.request.GET.getlist(k)) for k in self.request.GET.iterkeys()],
'post': [(k, self.request.POST.getlist(k)) for k in self.request.POST.iterkeys()],
'session': [(k, self.request.session.get(k)) for k in self.request.session.iterkeys()],
'cookies': [(k, self.request.COOKIES.get(k)) for k in self.request.COOKIES.iterkeys()],
}
return render_to_string('debug_toolbar/panels/request_vars.html', context)Allow request vars to work even with disabled session middleware.from django.template.loader import render_to_string
from debug_toolbar.panels import DebugPanel
class RequestVarsDebugPanel(DebugPanel):
"""
A panel to display request variables (POST/GET, session, cookies).
"""
name = 'RequestVars'
has_content = True
def nav_title(self):
return 'Request Vars'
def title(self):
return 'Request Vars'
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context = {
'get': [(k, self.request.GET.getlist(k)) for k in self.request.GET.iterkeys()],
'post': [(k, self.request.POST.getlist(k)) for k in self.request.POST.iterkeys()],
'cookies': [(k, self.request.COOKIES.get(k)) for k in self.request.COOKIES.iterkeys()],
}
if hasattr(self.request, 'session'):
context['session'] = [(k, self.request.session.get(k)) for k in self.request.session.iterkeys()],
return render_to_string('debug_toolbar/panels/request_vars.html', context)
|
<commit_before>from django.template.loader import render_to_string
from debug_toolbar.panels import DebugPanel
class RequestVarsDebugPanel(DebugPanel):
"""
A panel to display request variables (POST/GET, session, cookies).
"""
name = 'RequestVars'
has_content = True
def nav_title(self):
return 'Request Vars'
def title(self):
return 'Request Vars'
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context = {
'get': [(k, self.request.GET.getlist(k)) for k in self.request.GET.iterkeys()],
'post': [(k, self.request.POST.getlist(k)) for k in self.request.POST.iterkeys()],
'session': [(k, self.request.session.get(k)) for k in self.request.session.iterkeys()],
'cookies': [(k, self.request.COOKIES.get(k)) for k in self.request.COOKIES.iterkeys()],
}
return render_to_string('debug_toolbar/panels/request_vars.html', context)<commit_msg>Allow request vars to work even with disabled session middleware.<commit_after>from django.template.loader import render_to_string
from debug_toolbar.panels import DebugPanel
class RequestVarsDebugPanel(DebugPanel):
"""
A panel to display request variables (POST/GET, session, cookies).
"""
name = 'RequestVars'
has_content = True
def nav_title(self):
return 'Request Vars'
def title(self):
return 'Request Vars'
def url(self):
return ''
def process_request(self, request):
self.request = request
def content(self):
context = {
'get': [(k, self.request.GET.getlist(k)) for k in self.request.GET.iterkeys()],
'post': [(k, self.request.POST.getlist(k)) for k in self.request.POST.iterkeys()],
'cookies': [(k, self.request.COOKIES.get(k)) for k in self.request.COOKIES.iterkeys()],
}
if hasattr(self.request, 'session'):
context['session'] = [(k, self.request.session.get(k)) for k in self.request.session.iterkeys()],
return render_to_string('debug_toolbar/panels/request_vars.html', context)
|
b1f1c95ba28546d166568b5bb202dfa87e5edb3b
|
protractor/test.py
|
protractor/test.py
|
# -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
protractor_command += ' --baseUrl {}'.format(self.live_server_url)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().items():
protractor_command += ' --params.{key}={value}'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
# -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
protractor_command += ' --baseUrl {}'.format(self.live_server_url)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().items():
protractor_command += ' --params.{key}="{value}"'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
Add quotes so all params are strings
|
Add quotes so all params are strings
|
Python
|
mit
|
jpulec/django-protractor
|
# -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
protractor_command += ' --baseUrl {}'.format(self.live_server_url)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().items():
protractor_command += ' --params.{key}={value}'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
Add quotes so all params are strings
|
# -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
protractor_command += ' --baseUrl {}'.format(self.live_server_url)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().items():
protractor_command += ' --params.{key}="{value}"'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
<commit_before># -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
protractor_command += ' --baseUrl {}'.format(self.live_server_url)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().items():
protractor_command += ' --params.{key}={value}'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
<commit_msg>Add quotes so all params are strings<commit_after>
|
# -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
protractor_command += ' --baseUrl {}'.format(self.live_server_url)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().items():
protractor_command += ' --params.{key}="{value}"'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
# -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
protractor_command += ' --baseUrl {}'.format(self.live_server_url)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().items():
protractor_command += ' --params.{key}={value}'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
Add quotes so all params are strings# -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
protractor_command += ' --baseUrl {}'.format(self.live_server_url)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().items():
protractor_command += ' --params.{key}="{value}"'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
<commit_before># -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
protractor_command += ' --baseUrl {}'.format(self.live_server_url)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().items():
protractor_command += ' --params.{key}={value}'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
<commit_msg>Add quotes so all params are strings<commit_after># -*- coding: utf-8 -*-
import os
import subprocess
class ProtractorTestCaseMixin(object):
protractor_conf = 'protractor.conf.js'
suite = None
specs = None
@classmethod
def setUpClass(cls):
super(ProtractorTestCaseMixin, cls).setUpClass()
with open(os.devnull, 'wb') as f:
subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f)
cls.webdriver = subprocess.Popen(
['webdriver-manager', 'start'], stdout=f, stderr=f)
@classmethod
def tearDownClass(cls):
cls.webdriver.kill()
super(ProtractorTestCaseMixin, cls).tearDownClass()
def get_protractor_params(self):
"""A hook for adding params that protractor will receive."""
return {
'live_server_url': self.live_server_url
}
def test_run(self):
protractor_command = 'protractor {}'.format(self.protractor_conf)
protractor_command += ' --baseUrl {}'.format(self.live_server_url)
if self.specs:
protractor_command += ' --specs {}'.format(','.join(self.specs))
if self.suite:
protractor_command += ' --suite {}'.format(self.suite)
for key, value in self.get_protractor_params().items():
protractor_command += ' --params.{key}="{value}"'.format(
key=key, value=value
)
return_code = subprocess.call(protractor_command.split())
self.assertEqual(return_code, 0)
|
7740ff36679b13be9d63b333cff35f913e0066dc
|
python/tests/py3/test_asyncio.py
|
python/tests/py3/test_asyncio.py
|
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def main():
print('Hello, ', end='')
await asyncio.sleep(1)
print('World!')
# Python 3.7+
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(workspace):
result = await async_task('World')
assert result == 'Hello, World!'
async def async_task(input):
await asyncio.sleep(1)
return 'Hello, %s!' % input
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
|
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
async def main():
await asyncio.gather(say_hello_async('World'), do_something_else())
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, ...World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(capsys):
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
await asyncio.gather(say_hello_async('World'), do_something_else())
out, _ = capsys.readouterr()
assert out == 'Hello, ...World!'
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
|
Make hello world (asyncio) more involved
|
[python] Make hello world (asyncio) more involved
|
Python
|
mit
|
imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning
|
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def main():
print('Hello, ', end='')
await asyncio.sleep(1)
print('World!')
# Python 3.7+
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(workspace):
result = await async_task('World')
assert result == 'Hello, World!'
async def async_task(input):
await asyncio.sleep(1)
return 'Hello, %s!' % input
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
[python] Make hello world (asyncio) more involved
|
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
async def main():
await asyncio.gather(say_hello_async('World'), do_something_else())
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, ...World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(capsys):
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
await asyncio.gather(say_hello_async('World'), do_something_else())
out, _ = capsys.readouterr()
assert out == 'Hello, ...World!'
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
|
<commit_before>import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def main():
print('Hello, ', end='')
await asyncio.sleep(1)
print('World!')
# Python 3.7+
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(workspace):
result = await async_task('World')
assert result == 'Hello, World!'
async def async_task(input):
await asyncio.sleep(1)
return 'Hello, %s!' % input
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
<commit_msg>[python] Make hello world (asyncio) more involved<commit_after>
|
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
async def main():
await asyncio.gather(say_hello_async('World'), do_something_else())
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, ...World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(capsys):
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
await asyncio.gather(say_hello_async('World'), do_something_else())
out, _ = capsys.readouterr()
assert out == 'Hello, ...World!'
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
|
import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def main():
print('Hello, ', end='')
await asyncio.sleep(1)
print('World!')
# Python 3.7+
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(workspace):
result = await async_task('World')
assert result == 'Hello, World!'
async def async_task(input):
await asyncio.sleep(1)
return 'Hello, %s!' % input
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
[python] Make hello world (asyncio) more involvedimport asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
async def main():
await asyncio.gather(say_hello_async('World'), do_something_else())
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, ...World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(capsys):
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
await asyncio.gather(say_hello_async('World'), do_something_else())
out, _ = capsys.readouterr()
assert out == 'Hello, ...World!'
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
|
<commit_before>import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def main():
print('Hello, ', end='')
await asyncio.sleep(1)
print('World!')
# Python 3.7+
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(workspace):
result = await async_task('World')
assert result == 'Hello, World!'
async def async_task(input):
await asyncio.sleep(1)
return 'Hello, %s!' % input
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
<commit_msg>[python] Make hello world (asyncio) more involved<commit_after>import asyncio
import pytest
def test_hello_world(workspace):
workspace.src('main.py', r"""
import asyncio
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
async def main():
await asyncio.gather(say_hello_async('World'), do_something_else())
asyncio.run(main())
""")
r = workspace.run('python main.py')
assert r.out == 'Hello, ...World!'
@pytest.mark.asyncio
async def test_hello_world__pytest_asyncio(capsys):
async def do_something_else():
print('...', end='')
await asyncio.sleep(1)
print('!', end='')
async def say_hello_async(who):
print('Hello, ', end='')
await asyncio.sleep(1)
print(who, end='')
await asyncio.gather(say_hello_async('World'), do_something_else())
out, _ = capsys.readouterr()
assert out == 'Hello, ...World!'
def test_import_asyncio_not_needed_for_using_async_await_keywords(workspace):
workspace.src('main.py', r"""
async def main():
pass
print(type(main()))
""")
r = workspace.run('python main.py')
assert r.out == "<class 'coroutine'>"
assert r.err == "main.py:4: RuntimeWarning: coroutine 'main' was never awaited\n print(type(main()))"
|
30fa612a2ef5ebcc6b8d84aa9b57cca098b3d8ad
|
py2neo/__init__.py
|
py2neo/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Nigel Small <nigel@nigelsmall.com>"
__copyright__ = "2011-2014, Nigel Small"
__email__ = "nigel@nigelsmall.com"
__license__ = "Apache License, Version 2.0"
__package__ = "py2neo"
__version__ = "2.0.beta" # TODO: update this before release
from py2neo.batch import *
from py2neo.core import *
from py2neo.cypher import *
from py2neo.error import *
from py2neo.legacy import *
from py2neo.packages.httpstream.watch import watch
node = Node.cast
rel = Relationship.cast
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Nigel Small <nigel@nigelsmall.com>"
__copyright__ = "2011-2014, Nigel Small"
__email__ = "nigel@nigelsmall.com"
__license__ = "Apache License, Version 2.0"
__package__ = "py2neo"
__version__ = "2.0.beta" # TODO: update this before release
from py2neo.batch import *
from py2neo.core import *
from py2neo.cypher import *
from py2neo.error import *
from py2neo.lang import *
from py2neo.legacy import *
from py2neo.packages.httpstream.watch import watch
node = Node.cast
rel = Relationship.cast
|
Add lang to global export list.
|
Add lang to global export list.
|
Python
|
apache-2.0
|
nigelsmall/py2neo,nicolewhite/py2neo,technige/py2neo,fpieper/py2neo,fpieper/py2neo,technige/py2neo,technige/py2neo,fpieper/py2neo,nigelsmall/py2neo,nicolewhite/py2neo,nicolewhite/py2neo
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Nigel Small <nigel@nigelsmall.com>"
__copyright__ = "2011-2014, Nigel Small"
__email__ = "nigel@nigelsmall.com"
__license__ = "Apache License, Version 2.0"
__package__ = "py2neo"
__version__ = "2.0.beta" # TODO: update this before release
from py2neo.batch import *
from py2neo.core import *
from py2neo.cypher import *
from py2neo.error import *
from py2neo.legacy import *
from py2neo.packages.httpstream.watch import watch
node = Node.cast
rel = Relationship.cast
Add lang to global export list.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Nigel Small <nigel@nigelsmall.com>"
__copyright__ = "2011-2014, Nigel Small"
__email__ = "nigel@nigelsmall.com"
__license__ = "Apache License, Version 2.0"
__package__ = "py2neo"
__version__ = "2.0.beta" # TODO: update this before release
from py2neo.batch import *
from py2neo.core import *
from py2neo.cypher import *
from py2neo.error import *
from py2neo.lang import *
from py2neo.legacy import *
from py2neo.packages.httpstream.watch import watch
node = Node.cast
rel = Relationship.cast
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Nigel Small <nigel@nigelsmall.com>"
__copyright__ = "2011-2014, Nigel Small"
__email__ = "nigel@nigelsmall.com"
__license__ = "Apache License, Version 2.0"
__package__ = "py2neo"
__version__ = "2.0.beta" # TODO: update this before release
from py2neo.batch import *
from py2neo.core import *
from py2neo.cypher import *
from py2neo.error import *
from py2neo.legacy import *
from py2neo.packages.httpstream.watch import watch
node = Node.cast
rel = Relationship.cast
<commit_msg>Add lang to global export list.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Nigel Small <nigel@nigelsmall.com>"
__copyright__ = "2011-2014, Nigel Small"
__email__ = "nigel@nigelsmall.com"
__license__ = "Apache License, Version 2.0"
__package__ = "py2neo"
__version__ = "2.0.beta" # TODO: update this before release
from py2neo.batch import *
from py2neo.core import *
from py2neo.cypher import *
from py2neo.error import *
from py2neo.lang import *
from py2neo.legacy import *
from py2neo.packages.httpstream.watch import watch
node = Node.cast
rel = Relationship.cast
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Nigel Small <nigel@nigelsmall.com>"
__copyright__ = "2011-2014, Nigel Small"
__email__ = "nigel@nigelsmall.com"
__license__ = "Apache License, Version 2.0"
__package__ = "py2neo"
__version__ = "2.0.beta" # TODO: update this before release
from py2neo.batch import *
from py2neo.core import *
from py2neo.cypher import *
from py2neo.error import *
from py2neo.legacy import *
from py2neo.packages.httpstream.watch import watch
node = Node.cast
rel = Relationship.cast
Add lang to global export list.#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Nigel Small <nigel@nigelsmall.com>"
__copyright__ = "2011-2014, Nigel Small"
__email__ = "nigel@nigelsmall.com"
__license__ = "Apache License, Version 2.0"
__package__ = "py2neo"
__version__ = "2.0.beta" # TODO: update this before release
from py2neo.batch import *
from py2neo.core import *
from py2neo.cypher import *
from py2neo.error import *
from py2neo.lang import *
from py2neo.legacy import *
from py2neo.packages.httpstream.watch import watch
node = Node.cast
rel = Relationship.cast
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Nigel Small <nigel@nigelsmall.com>"
__copyright__ = "2011-2014, Nigel Small"
__email__ = "nigel@nigelsmall.com"
__license__ = "Apache License, Version 2.0"
__package__ = "py2neo"
__version__ = "2.0.beta" # TODO: update this before release
from py2neo.batch import *
from py2neo.core import *
from py2neo.cypher import *
from py2neo.error import *
from py2neo.legacy import *
from py2neo.packages.httpstream.watch import watch
node = Node.cast
rel = Relationship.cast
<commit_msg>Add lang to global export list.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Nigel Small <nigel@nigelsmall.com>"
__copyright__ = "2011-2014, Nigel Small"
__email__ = "nigel@nigelsmall.com"
__license__ = "Apache License, Version 2.0"
__package__ = "py2neo"
__version__ = "2.0.beta" # TODO: update this before release
from py2neo.batch import *
from py2neo.core import *
from py2neo.cypher import *
from py2neo.error import *
from py2neo.lang import *
from py2neo.legacy import *
from py2neo.packages.httpstream.watch import watch
node = Node.cast
rel = Relationship.cast
|
144280ff8c656fb589e92a3f0fe5cba7ce63d85d
|
tailor/listeners/mainlistener.py
|
tailor/listeners/mainlistener.py
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
Implement UpperCamelCase name check for enums
|
Implement UpperCamelCase name check for enums
|
Python
|
mit
|
sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
Implement UpperCamelCase name check for enums
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
<commit_before>from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
<commit_msg>Implement UpperCamelCase name check for enums<commit_after>
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
Implement UpperCamelCase name check for enumsfrom tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
<commit_before>from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
pass
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
<commit_msg>Implement UpperCamelCase name check for enums<commit_after>from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
pass
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
55237ad67dddaf070a40ea1ab64f84799356edfa
|
requests/_oauth.py
|
requests/_oauth.py
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module contains the path hack necessary for oauthlib to be vendored into
requests while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
Comment typo fix and move newline.
|
Comment typo fix and move newline.
|
Python
|
isc
|
revolunet/requests,Bluehorn/requests,revolunet/requests,psf/requests
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
Comment typo fix and move newline.
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module contains the path hack necessary for oauthlib to be vendored into
requests while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
<commit_before># -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
<commit_msg>Comment typo fix and move newline.<commit_after>
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module contains the path hack necessary for oauthlib to be vendored into
requests while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
Comment typo fix and move newline.# -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module contains the path hack necessary for oauthlib to be vendored into
requests while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
<commit_before># -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
<commit_msg>Comment typo fix and move newline.<commit_after># -*- coding: utf-8 -*-
"""
requests._oauth
~~~~~~~~~~~~~~~
This module contains the path hack necessary for oauthlib to be vendored into
requests while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'packages')
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
|
ef0d59781fbc9dcd89334843e5b6fc1461aed246
|
rollbar/contrib/asgi/__init__.py
|
rollbar/contrib/asgi/__init__.py
|
__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIApp) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
|
__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp as ASGIAppType, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIAppType) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
|
Use unique identifier name for ASGIApp type
|
Use unique identifier name for ASGIApp type
Due to collision with ASGIApp class decorator
|
Python
|
mit
|
rollbar/pyrollbar
|
__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIApp) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
Use unique identifier name for ASGIApp type
Due to collision with ASGIApp class decorator
|
__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp as ASGIAppType, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIAppType) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
|
<commit_before>__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIApp) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
<commit_msg>Use unique identifier name for ASGIApp type
Due to collision with ASGIApp class decorator<commit_after>
|
__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp as ASGIAppType, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIAppType) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
|
__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIApp) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
Use unique identifier name for ASGIApp type
Due to collision with ASGIApp class decorator__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp as ASGIAppType, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIAppType) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
|
<commit_before>__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIApp) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
<commit_msg>Use unique identifier name for ASGIApp type
Due to collision with ASGIApp class decorator<commit_after>__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp as ASGIAppType, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIAppType) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
|
2b83d2dd0c3e0230968a5ab2bd55a647eee2eb3a
|
packs/aws/actions/run.py
|
packs/aws/actions/run.py
|
from lib import action
class ActionManager(action.BaseAction):
def run(self, **kwargs):
action = kwargs['action']
del kwargs['action']
module_path = kwargs['module_path']
del kwargs['module_path']
if action == 'run_instances':
kwargs['user_data'] = self.st2_user_data()
if action == 'create_tags':
kwargs['tags'] = self.split_tags(kwargs['tags'])
if 'cls' in kwargs.keys():
cls = kwargs['cls']
del kwargs['cls']
return self.do_method(module_path, cls, action, **kwargs)
else:
return self.do_function(module_path, action, **kwargs)
|
from lib import action
class ActionManager(action.BaseAction):
def run(self, **kwargs):
action = kwargs['action']
del kwargs['action']
module_path = kwargs['module_path']
del kwargs['module_path']
if action == 'run_instances':
kwargs['user_data'] = self.st2_user_data()
if action == 'create_tags':
kwargs['tags'] = self.split_tags(kwargs['tags'])
if action in ('add_a', 'update_a'):
kwargs['value'] = kwargs['value'].split(',')
if 'cls' in kwargs.keys():
cls = kwargs['cls']
del kwargs['cls']
return self.do_method(module_path, cls, action, **kwargs)
else:
return self.do_function(module_path, action, **kwargs)
|
Support DNS round-robin balancing through Route53
|
Support DNS round-robin balancing through Route53
Our codegen actions for adding/updating A records in Route53 only support a single IP as a value. Changing to accept a comma-separated list, which will add an unweighted round-robin A record.
Should also add WRR at some point probably, but I just don't care enough.
|
Python
|
apache-2.0
|
StackStorm/st2contrib,StackStorm/st2contrib,StackStorm/st2contrib
|
from lib import action
class ActionManager(action.BaseAction):
def run(self, **kwargs):
action = kwargs['action']
del kwargs['action']
module_path = kwargs['module_path']
del kwargs['module_path']
if action == 'run_instances':
kwargs['user_data'] = self.st2_user_data()
if action == 'create_tags':
kwargs['tags'] = self.split_tags(kwargs['tags'])
if 'cls' in kwargs.keys():
cls = kwargs['cls']
del kwargs['cls']
return self.do_method(module_path, cls, action, **kwargs)
else:
return self.do_function(module_path, action, **kwargs)
Support DNS round-robin balancing through Route53
Our codegen actions for adding/updating A records in Route53 only support a single IP as a value. Changing to accept a comma-separated list, which will add an unweighted round-robin A record.
Should also add WRR at some point probably, but I just don't care enough.
|
from lib import action
class ActionManager(action.BaseAction):
def run(self, **kwargs):
action = kwargs['action']
del kwargs['action']
module_path = kwargs['module_path']
del kwargs['module_path']
if action == 'run_instances':
kwargs['user_data'] = self.st2_user_data()
if action == 'create_tags':
kwargs['tags'] = self.split_tags(kwargs['tags'])
if action in ('add_a', 'update_a'):
kwargs['value'] = kwargs['value'].split(',')
if 'cls' in kwargs.keys():
cls = kwargs['cls']
del kwargs['cls']
return self.do_method(module_path, cls, action, **kwargs)
else:
return self.do_function(module_path, action, **kwargs)
|
<commit_before>from lib import action
class ActionManager(action.BaseAction):
def run(self, **kwargs):
action = kwargs['action']
del kwargs['action']
module_path = kwargs['module_path']
del kwargs['module_path']
if action == 'run_instances':
kwargs['user_data'] = self.st2_user_data()
if action == 'create_tags':
kwargs['tags'] = self.split_tags(kwargs['tags'])
if 'cls' in kwargs.keys():
cls = kwargs['cls']
del kwargs['cls']
return self.do_method(module_path, cls, action, **kwargs)
else:
return self.do_function(module_path, action, **kwargs)
<commit_msg>Support DNS round-robin balancing through Route53
Our codegen actions for adding/updating A records in Route53 only support a single IP as a value. Changing to accept a comma-separated list, which will add an unweighted round-robin A record.
Should also add WRR at some point probably, but I just don't care enough.<commit_after>
|
from lib import action
class ActionManager(action.BaseAction):
def run(self, **kwargs):
action = kwargs['action']
del kwargs['action']
module_path = kwargs['module_path']
del kwargs['module_path']
if action == 'run_instances':
kwargs['user_data'] = self.st2_user_data()
if action == 'create_tags':
kwargs['tags'] = self.split_tags(kwargs['tags'])
if action in ('add_a', 'update_a'):
kwargs['value'] = kwargs['value'].split(',')
if 'cls' in kwargs.keys():
cls = kwargs['cls']
del kwargs['cls']
return self.do_method(module_path, cls, action, **kwargs)
else:
return self.do_function(module_path, action, **kwargs)
|
from lib import action
class ActionManager(action.BaseAction):
def run(self, **kwargs):
action = kwargs['action']
del kwargs['action']
module_path = kwargs['module_path']
del kwargs['module_path']
if action == 'run_instances':
kwargs['user_data'] = self.st2_user_data()
if action == 'create_tags':
kwargs['tags'] = self.split_tags(kwargs['tags'])
if 'cls' in kwargs.keys():
cls = kwargs['cls']
del kwargs['cls']
return self.do_method(module_path, cls, action, **kwargs)
else:
return self.do_function(module_path, action, **kwargs)
Support DNS round-robin balancing through Route53
Our codegen actions for adding/updating A records in Route53 only support a single IP as a value. Changing to accept a comma-separated list, which will add an unweighted round-robin A record.
Should also add WRR at some point probably, but I just don't care enough.from lib import action
class ActionManager(action.BaseAction):
def run(self, **kwargs):
action = kwargs['action']
del kwargs['action']
module_path = kwargs['module_path']
del kwargs['module_path']
if action == 'run_instances':
kwargs['user_data'] = self.st2_user_data()
if action == 'create_tags':
kwargs['tags'] = self.split_tags(kwargs['tags'])
if action in ('add_a', 'update_a'):
kwargs['value'] = kwargs['value'].split(',')
if 'cls' in kwargs.keys():
cls = kwargs['cls']
del kwargs['cls']
return self.do_method(module_path, cls, action, **kwargs)
else:
return self.do_function(module_path, action, **kwargs)
|
<commit_before>from lib import action
class ActionManager(action.BaseAction):
def run(self, **kwargs):
action = kwargs['action']
del kwargs['action']
module_path = kwargs['module_path']
del kwargs['module_path']
if action == 'run_instances':
kwargs['user_data'] = self.st2_user_data()
if action == 'create_tags':
kwargs['tags'] = self.split_tags(kwargs['tags'])
if 'cls' in kwargs.keys():
cls = kwargs['cls']
del kwargs['cls']
return self.do_method(module_path, cls, action, **kwargs)
else:
return self.do_function(module_path, action, **kwargs)
<commit_msg>Support DNS round-robin balancing through Route53
Our codegen actions for adding/updating A records in Route53 only support a single IP as a value. Changing to accept a comma-separated list, which will add an unweighted round-robin A record.
Should also add WRR at some point probably, but I just don't care enough.<commit_after>from lib import action
class ActionManager(action.BaseAction):
def run(self, **kwargs):
action = kwargs['action']
del kwargs['action']
module_path = kwargs['module_path']
del kwargs['module_path']
if action == 'run_instances':
kwargs['user_data'] = self.st2_user_data()
if action == 'create_tags':
kwargs['tags'] = self.split_tags(kwargs['tags'])
if action in ('add_a', 'update_a'):
kwargs['value'] = kwargs['value'].split(',')
if 'cls' in kwargs.keys():
cls = kwargs['cls']
del kwargs['cls']
return self.do_method(module_path, cls, action, **kwargs)
else:
return self.do_function(module_path, action, **kwargs)
|
ee3634fbee7e0bd311337007743b30934aca73ba
|
pyfibot/modules/module_thetvdb.py
|
pyfibot/modules/module_thetvdb.py
|
#!/usr/bin/python
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
latest_season = series[series.keys()[-1]]
for episode_no, episode in latest_season.items():
firstaired = episode['firstaired']
if not firstaired:
break
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
return
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
|
#!/usr/bin/python
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
episodes = []
# find all episodes with airdate > now
for season_no, season in series.items():
for episode_no, episode in season.items():
firstaired = episode['firstaired']
if not firstaired:
continue
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
episodes.append(episode)
# if any episodes were found, find out the one with airdate closest to now
if episodes:
# sort the list just in case
episodes = sorted(episodes, key=firstaired)
episode = episodes[0]
td = datetime.strptime(episode['firstaired'], "%Y-%m-%d") - now
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
else:
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
|
Fix episode finding logic to handle specials and cases where episodes are out of order in tvdb api result
|
Fix episode finding logic to handle specials and cases where episodes are out of order in tvdb api result
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@374 dda364a1-ef19-0410-af65-756c83048fb2
|
Python
|
bsd-3-clause
|
rnyberg/pyfibot,rnyberg/pyfibot,aapa/pyfibot,huqa/pyfibot,lepinkainen/pyfibot,lepinkainen/pyfibot,EArmour/pyfibot,aapa/pyfibot,EArmour/pyfibot,huqa/pyfibot
|
#!/usr/bin/python
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
latest_season = series[series.keys()[-1]]
for episode_no, episode in latest_season.items():
firstaired = episode['firstaired']
if not firstaired:
break
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
return
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
Fix episode finding logic to handle specials and cases where episodes are out of order in tvdb api result
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@374 dda364a1-ef19-0410-af65-756c83048fb2
|
#!/usr/bin/python
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
episodes = []
# find all episodes with airdate > now
for season_no, season in series.items():
for episode_no, episode in season.items():
firstaired = episode['firstaired']
if not firstaired:
continue
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
episodes.append(episode)
# if any episodes were found, find out the one with airdate closest to now
if episodes:
# sort the list just in case
episodes = sorted(episodes, key=firstaired)
episode = episodes[0]
td = datetime.strptime(episode['firstaired'], "%Y-%m-%d") - now
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
else:
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
|
<commit_before>#!/usr/bin/python
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
latest_season = series[series.keys()[-1]]
for episode_no, episode in latest_season.items():
firstaired = episode['firstaired']
if not firstaired:
break
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
return
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
<commit_msg>Fix episode finding logic to handle specials and cases where episodes are out of order in tvdb api result
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@374 dda364a1-ef19-0410-af65-756c83048fb2<commit_after>
|
#!/usr/bin/python
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
episodes = []
# find all episodes with airdate > now
for season_no, season in series.items():
for episode_no, episode in season.items():
firstaired = episode['firstaired']
if not firstaired:
continue
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
episodes.append(episode)
# if any episodes were found, find out the one with airdate closest to now
if episodes:
# sort the list just in case
episodes = sorted(episodes, key=firstaired)
episode = episodes[0]
td = datetime.strptime(episode['firstaired'], "%Y-%m-%d") - now
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
else:
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
|
#!/usr/bin/python
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
latest_season = series[series.keys()[-1]]
for episode_no, episode in latest_season.items():
firstaired = episode['firstaired']
if not firstaired:
break
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
return
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
Fix episode finding logic to handle specials and cases where episodes are out of order in tvdb api result
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@374 dda364a1-ef19-0410-af65-756c83048fb2#!/usr/bin/python
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
episodes = []
# find all episodes with airdate > now
for season_no, season in series.items():
for episode_no, episode in season.items():
firstaired = episode['firstaired']
if not firstaired:
continue
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
episodes.append(episode)
# if any episodes were found, find out the one with airdate closest to now
if episodes:
# sort the list just in case
episodes = sorted(episodes, key=firstaired)
episode = episodes[0]
td = datetime.strptime(episode['firstaired'], "%Y-%m-%d") - now
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
else:
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
|
<commit_before>#!/usr/bin/python
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
latest_season = series[series.keys()[-1]]
for episode_no, episode in latest_season.items():
firstaired = episode['firstaired']
if not firstaired:
break
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
return
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
<commit_msg>Fix episode finding logic to handle specials and cases where episodes are out of order in tvdb api result
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@374 dda364a1-ef19-0410-af65-756c83048fb2<commit_after>#!/usr/bin/python
from datetime import datetime, timedelta
import tvdb_api
import tvdb_exceptions
def command_ep(bot, user, channel, args):
t = tvdb_api.Tvdb()
now = datetime.now()
try:
series = t[args]
except tvdb_exceptions.tvdb_shownotfound:
bot.say(channel, "Series '%s' not found" % args)
return
episodes = []
# find all episodes with airdate > now
for season_no, season in series.items():
for episode_no, episode in season.items():
firstaired = episode['firstaired']
if not firstaired:
continue
airdate = datetime.strptime(firstaired, "%Y-%m-%d")
td = airdate - now
# find the next unaired episode
if td > timedelta(0, 0, 0):
episodes.append(episode)
# if any episodes were found, find out the one with airdate closest to now
if episodes:
# sort the list just in case
episodes = sorted(episodes, key=firstaired)
episode = episodes[0]
td = datetime.strptime(episode['firstaired'], "%Y-%m-%d") - now
msg = "Next episode of %s '%s' airs %s (%d days)" % (series.data['seriesname'], episode['episodename'], episode['firstaired'], td.days)
bot.say(channel, msg.encode("UTF-8"))
else:
msg = "No new episode airdates found for %s" % series.data['seriesname']
bot.say(channel, msg.encode("UTF-8"))
|
5b194d658e85dee0415a087704acfc9bdb23dd00
|
server/__init__.py
|
server/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
try:
from .base import * # noqa
except ImportError as exc:
# If our import failed because either girder or a girder plugin is
# unavailable, log it and start anyway (we may be running in a girder-less
# environment).
import logging as logger
logger.getLogger().setLevel(logger.INFO)
logger.debug('Girder is unavailable. Run as a girder plugin for girder '
'access.')
girder = None
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
try:
import girder
# This should include all of the explicitly required Girder plugins.
import girder.plugins.worker
except ImportError:
# If our import failed because either girder or a girder plugin is
# unavailable, log it and start anyway (we may be running in a girder-less
# environment).
import logging as logger
logger.getLogger().setLevel(logger.INFO)
logger.debug('Girder is unavailable. Run as a girder plugin for girder '
'access.')
girder = None
else:
# if girder is available, and we fail to import anything else, girder will
# show the failure
from .base import load # noqa
|
Allow girder to show load errors in more cases.
|
Allow girder to show load errors in more cases.
If girder is installed but requirements of large_image are not, the
plugin would report as enabled but not be functional. This checks if
girder is available, and, if so, allows girder to report errors.
Eventually, it will be nice to separate large_image without girder from
the necessary girder parts to make this easier to manage. For now, this
should make diagnosis easier.
As a simple test, when everything works under girder, uninstall a
required module (cachetools, for instance), and restart girder. Before,
no error was reported. With this change, the missing module is
reported.
|
Python
|
apache-2.0
|
girder/large_image,girder/large_image,DigitalSlideArchive/large_image,DigitalSlideArchive/large_image,girder/large_image,DigitalSlideArchive/large_image
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
try:
from .base import * # noqa
except ImportError as exc:
# If our import failed because either girder or a girder plugin is
# unavailable, log it and start anyway (we may be running in a girder-less
# environment).
import logging as logger
logger.getLogger().setLevel(logger.INFO)
logger.debug('Girder is unavailable. Run as a girder plugin for girder '
'access.')
girder = None
Allow girder to show load errors in more cases.
If girder is installed but requirements of large_image are not, the
plugin would report as enabled but not be functional. This checks if
girder is available, and, if so, allows girder to report errors.
Eventually, it will be nice to separate large_image without girder from
the necessary girder parts to make this easier to manage. For now, this
should make diagnosis easier.
As a simple test, when everything works under girder, uninstall a
required module (cachetools, for instance), and restart girder. Before,
no error was reported. With this change, the missing module is
reported.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
try:
import girder
# This should include all of the explicitly required Girder plugins.
import girder.plugins.worker
except ImportError:
# If our import failed because either girder or a girder plugin is
# unavailable, log it and start anyway (we may be running in a girder-less
# environment).
import logging as logger
logger.getLogger().setLevel(logger.INFO)
logger.debug('Girder is unavailable. Run as a girder plugin for girder '
'access.')
girder = None
else:
# if girder is available, and we fail to import anything else, girder will
# show the failure
from .base import load # noqa
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
try:
from .base import * # noqa
except ImportError as exc:
# If our import failed because either girder or a girder plugin is
# unavailable, log it and start anyway (we may be running in a girder-less
# environment).
import logging as logger
logger.getLogger().setLevel(logger.INFO)
logger.debug('Girder is unavailable. Run as a girder plugin for girder '
'access.')
girder = None
<commit_msg>Allow girder to show load errors in more cases.
If girder is installed but requirements of large_image are not, the
plugin would report as enabled but not be functional. This checks if
girder is available, and, if so, allows girder to report errors.
Eventually, it will be nice to separate large_image without girder from
the necessary girder parts to make this easier to manage. For now, this
should make diagnosis easier.
As a simple test, when everything works under girder, uninstall a
required module (cachetools, for instance), and restart girder. Before,
no error was reported. With this change, the missing module is
reported.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
try:
import girder
# This should include all of the explicitly required Girder plugins.
import girder.plugins.worker
except ImportError:
# If our import failed because either girder or a girder plugin is
# unavailable, log it and start anyway (we may be running in a girder-less
# environment).
import logging as logger
logger.getLogger().setLevel(logger.INFO)
logger.debug('Girder is unavailable. Run as a girder plugin for girder '
'access.')
girder = None
else:
# if girder is available, and we fail to import anything else, girder will
# show the failure
from .base import load # noqa
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
try:
from .base import * # noqa
except ImportError as exc:
# If our import failed because either girder or a girder plugin is
# unavailable, log it and start anyway (we may be running in a girder-less
# environment).
import logging as logger
logger.getLogger().setLevel(logger.INFO)
logger.debug('Girder is unavailable. Run as a girder plugin for girder '
'access.')
girder = None
Allow girder to show load errors in more cases.
If girder is installed but requirements of large_image are not, the
plugin would report as enabled but not be functional. This checks if
girder is available, and, if so, allows girder to report errors.
Eventually, it will be nice to separate large_image without girder from
the necessary girder parts to make this easier to manage. For now, this
should make diagnosis easier.
As a simple test, when everything works under girder, uninstall a
required module (cachetools, for instance), and restart girder. Before,
no error was reported. With this change, the missing module is
reported.#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
try:
import girder
# This should include all of the explicitly required Girder plugins.
import girder.plugins.worker
except ImportError:
# If our import failed because either girder or a girder plugin is
# unavailable, log it and start anyway (we may be running in a girder-less
# environment).
import logging as logger
logger.getLogger().setLevel(logger.INFO)
logger.debug('Girder is unavailable. Run as a girder plugin for girder '
'access.')
girder = None
else:
# if girder is available, and we fail to import anything else, girder will
# show the failure
from .base import load # noqa
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
try:
from .base import * # noqa
except ImportError as exc:
# If our import failed because either girder or a girder plugin is
# unavailable, log it and start anyway (we may be running in a girder-less
# environment).
import logging as logger
logger.getLogger().setLevel(logger.INFO)
logger.debug('Girder is unavailable. Run as a girder plugin for girder '
'access.')
girder = None
<commit_msg>Allow girder to show load errors in more cases.
If girder is installed but requirements of large_image are not, the
plugin would report as enabled but not be functional. This checks if
girder is available, and, if so, allows girder to report errors.
Eventually, it will be nice to separate large_image without girder from
the necessary girder parts to make this easier to manage. For now, this
should make diagnosis easier.
As a simple test, when everything works under girder, uninstall a
required module (cachetools, for instance), and restart girder. Before,
no error was reported. With this change, the missing module is
reported.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
try:
import girder
# This should include all of the explicitly required Girder plugins.
import girder.plugins.worker
except ImportError:
# If our import failed because either girder or a girder plugin is
# unavailable, log it and start anyway (we may be running in a girder-less
# environment).
import logging as logger
logger.getLogger().setLevel(logger.INFO)
logger.debug('Girder is unavailable. Run as a girder plugin for girder '
'access.')
girder = None
else:
# if girder is available, and we fail to import anything else, girder will
# show the failure
from .base import load # noqa
|
4aa6714284cb45a2747cea8e0f38e8fbcd8ec0bc
|
pymatgen/core/design_patterns.py
|
pymatgen/core/design_patterns.py
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
class NullFile(object):
"""A file object that is associated to /dev/null."""
def __new__(cls):
import os
return open(os.devnull, 'w')
def __init__(self):
"""no-op"""
class NullStream(object):
"""A fake stream with a no-op write.."""
def write(*args):
"""no-op"""
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
|
Move NullFile and NullStream to monty
|
Move NullFile and NullStream to monty
|
Python
|
mit
|
Bismarrck/pymatgen,Bismarrck/pymatgen,sonium0/pymatgen,rousseab/pymatgen,Dioptas/pymatgen,migueldiascosta/pymatgen,yanikou19/pymatgen,ctoher/pymatgen,migueldiascosta/pymatgen,yanikou19/pymatgen,rousseab/pymatgen,sonium0/pymatgen,ctoher/pymatgen,ctoher/pymatgen,rousseab/pymatgen,sonium0/pymatgen,Bismarrck/pymatgen,migueldiascosta/pymatgen,Bismarrck/pymatgen,Bismarrck/pymatgen,yanikou19/pymatgen,Dioptas/pymatgen
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
class NullFile(object):
"""A file object that is associated to /dev/null."""
def __new__(cls):
import os
return open(os.devnull, 'w')
def __init__(self):
"""no-op"""
class NullStream(object):
"""A fake stream with a no-op write.."""
def write(*args):
"""no-op"""
Move NullFile and NullStream to monty
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
|
<commit_before># coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
class NullFile(object):
"""A file object that is associated to /dev/null."""
def __new__(cls):
import os
return open(os.devnull, 'w')
def __init__(self):
"""no-op"""
class NullStream(object):
"""A fake stream with a no-op write.."""
def write(*args):
"""no-op"""
<commit_msg>Move NullFile and NullStream to monty<commit_after>
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
class NullFile(object):
"""A file object that is associated to /dev/null."""
def __new__(cls):
import os
return open(os.devnull, 'w')
def __init__(self):
"""no-op"""
class NullStream(object):
"""A fake stream with a no-op write.."""
def write(*args):
"""no-op"""
Move NullFile and NullStream to monty# coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
|
<commit_before># coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
class NullFile(object):
"""A file object that is associated to /dev/null."""
def __new__(cls):
import os
return open(os.devnull, 'w')
def __init__(self):
"""no-op"""
class NullStream(object):
"""A fake stream with a no-op write.."""
def write(*args):
"""no-op"""
<commit_msg>Move NullFile and NullStream to monty<commit_after># coding: utf-8
from __future__ import division, unicode_literals
"""
This module defines some useful design patterns.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Sep 23, 2011"
class Enum(set):
"""
Creates an enum out of a set.
"""
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
|
b2fd89928f2462c4c8de8a6028c65996d69bfd31
|
motion_test.py
|
motion_test.py
|
from gpiozero import MotionSensor
##Quick script to check communication between motion sensor and Pi on GPIO 4.
pir = MotionSensor(4)
i = 0
while(i < 5):
if pir.motion_detected:
print i , 'motion detected'
i+=1;
|
from gpiozero import MotionSensor
##Send an email upon motion detecion
pir = MotionSensor(4)
i = 0
while(i < 1):
if pir.motion_detected:
print i , 'motion detected'
execfile("send_email.py")
i+=1;
|
Send email when motion is detected
|
Send email when motion is detected
|
Python
|
mit
|
efagerberg/PiCam
|
from gpiozero import MotionSensor
##Quick script to check communication between motion sensor and Pi on GPIO 4.
pir = MotionSensor(4)
i = 0
while(i < 5):
if pir.motion_detected:
print i , 'motion detected'
i+=1;Send email when motion is detected
|
from gpiozero import MotionSensor
##Send an email upon motion detecion
pir = MotionSensor(4)
i = 0
while(i < 1):
if pir.motion_detected:
print i , 'motion detected'
execfile("send_email.py")
i+=1;
|
<commit_before>from gpiozero import MotionSensor
##Quick script to check communication between motion sensor and Pi on GPIO 4.
pir = MotionSensor(4)
i = 0
while(i < 5):
if pir.motion_detected:
print i , 'motion detected'
i+=1;<commit_msg>Send email when motion is detected<commit_after>
|
from gpiozero import MotionSensor
##Send an email upon motion detecion
pir = MotionSensor(4)
i = 0
while(i < 1):
if pir.motion_detected:
print i , 'motion detected'
execfile("send_email.py")
i+=1;
|
from gpiozero import MotionSensor
##Quick script to check communication between motion sensor and Pi on GPIO 4.
pir = MotionSensor(4)
i = 0
while(i < 5):
if pir.motion_detected:
print i , 'motion detected'
i+=1;Send email when motion is detectedfrom gpiozero import MotionSensor
##Send an email upon motion detecion
pir = MotionSensor(4)
i = 0
while(i < 1):
if pir.motion_detected:
print i , 'motion detected'
execfile("send_email.py")
i+=1;
|
<commit_before>from gpiozero import MotionSensor
##Quick script to check communication between motion sensor and Pi on GPIO 4.
pir = MotionSensor(4)
i = 0
while(i < 5):
if pir.motion_detected:
print i , 'motion detected'
i+=1;<commit_msg>Send email when motion is detected<commit_after>from gpiozero import MotionSensor
##Send an email upon motion detecion
pir = MotionSensor(4)
i = 0
while(i < 1):
if pir.motion_detected:
print i , 'motion detected'
execfile("send_email.py")
i+=1;
|
4c96f2dc52810c10ef6d73732be0ecd8745c4567
|
moviePlayer.py
|
moviePlayer.py
|
import tkinter as tk
from time import sleep
from movie01 import reel
window = tk.Tk()
def main():
window.title("Tkinter Movie Player")
button = tk.Button(window, text = "Play", command = processPlay)
button.pack()
window.mainloop()
def processPlay():
TIME_STEP = 0.3
label = tk.Label(window, text = "")
label.pack()
count = 0
while count < len(reel):
s = ""
frame = reel[count]
for line in frame:
s += line + "\n"
count += 1
label["text"] = s
label.pack()
window.update()
sleep(TIME_STEP)
main()
|
import tkinter as tk
from time import sleep
from movie01 import reel
window = tk.Tk()
def main():
window.title("Tkinter Movie Player")
button = tk.Button(window, text = "Play", command = processPlay)
button.pack()
window.mainloop()
def processPlay():
TIME_STEP = 0.3
label = tk.Label(window, text = "", font = ("Courier"))
label.pack()
count = 0
while count < len(reel):
s = ""
frame = reel[count]
for line in frame:
s += line + "\n"
count += 1
label["text"] = s
label.pack()
window.update()
sleep(TIME_STEP)
main()
|
Change font of ASCII to Courier
|
Change font of ASCII to Courier
|
Python
|
apache-2.0
|
awhittle3/ASCII-Movie
|
import tkinter as tk
from time import sleep
from movie01 import reel
window = tk.Tk()
def main():
window.title("Tkinter Movie Player")
button = tk.Button(window, text = "Play", command = processPlay)
button.pack()
window.mainloop()
def processPlay():
TIME_STEP = 0.3
label = tk.Label(window, text = "")
label.pack()
count = 0
while count < len(reel):
s = ""
frame = reel[count]
for line in frame:
s += line + "\n"
count += 1
label["text"] = s
label.pack()
window.update()
sleep(TIME_STEP)
main()Change font of ASCII to Courier
|
import tkinter as tk
from time import sleep
from movie01 import reel
window = tk.Tk()
def main():
window.title("Tkinter Movie Player")
button = tk.Button(window, text = "Play", command = processPlay)
button.pack()
window.mainloop()
def processPlay():
TIME_STEP = 0.3
label = tk.Label(window, text = "", font = ("Courier"))
label.pack()
count = 0
while count < len(reel):
s = ""
frame = reel[count]
for line in frame:
s += line + "\n"
count += 1
label["text"] = s
label.pack()
window.update()
sleep(TIME_STEP)
main()
|
<commit_before>import tkinter as tk
from time import sleep
from movie01 import reel
window = tk.Tk()
def main():
window.title("Tkinter Movie Player")
button = tk.Button(window, text = "Play", command = processPlay)
button.pack()
window.mainloop()
def processPlay():
TIME_STEP = 0.3
label = tk.Label(window, text = "")
label.pack()
count = 0
while count < len(reel):
s = ""
frame = reel[count]
for line in frame:
s += line + "\n"
count += 1
label["text"] = s
label.pack()
window.update()
sleep(TIME_STEP)
main()<commit_msg>Change font of ASCII to Courier<commit_after>
|
import tkinter as tk
from time import sleep
from movie01 import reel
window = tk.Tk()
def main():
window.title("Tkinter Movie Player")
button = tk.Button(window, text = "Play", command = processPlay)
button.pack()
window.mainloop()
def processPlay():
TIME_STEP = 0.3
label = tk.Label(window, text = "", font = ("Courier"))
label.pack()
count = 0
while count < len(reel):
s = ""
frame = reel[count]
for line in frame:
s += line + "\n"
count += 1
label["text"] = s
label.pack()
window.update()
sleep(TIME_STEP)
main()
|
import tkinter as tk
from time import sleep
from movie01 import reel
window = tk.Tk()
def main():
window.title("Tkinter Movie Player")
button = tk.Button(window, text = "Play", command = processPlay)
button.pack()
window.mainloop()
def processPlay():
TIME_STEP = 0.3
label = tk.Label(window, text = "")
label.pack()
count = 0
while count < len(reel):
s = ""
frame = reel[count]
for line in frame:
s += line + "\n"
count += 1
label["text"] = s
label.pack()
window.update()
sleep(TIME_STEP)
main()Change font of ASCII to Courierimport tkinter as tk
from time import sleep
from movie01 import reel
window = tk.Tk()
def main():
window.title("Tkinter Movie Player")
button = tk.Button(window, text = "Play", command = processPlay)
button.pack()
window.mainloop()
def processPlay():
TIME_STEP = 0.3
label = tk.Label(window, text = "", font = ("Courier"))
label.pack()
count = 0
while count < len(reel):
s = ""
frame = reel[count]
for line in frame:
s += line + "\n"
count += 1
label["text"] = s
label.pack()
window.update()
sleep(TIME_STEP)
main()
|
<commit_before>import tkinter as tk
from time import sleep
from movie01 import reel
window = tk.Tk()
def main():
window.title("Tkinter Movie Player")
button = tk.Button(window, text = "Play", command = processPlay)
button.pack()
window.mainloop()
def processPlay():
TIME_STEP = 0.3
label = tk.Label(window, text = "")
label.pack()
count = 0
while count < len(reel):
s = ""
frame = reel[count]
for line in frame:
s += line + "\n"
count += 1
label["text"] = s
label.pack()
window.update()
sleep(TIME_STEP)
main()<commit_msg>Change font of ASCII to Courier<commit_after>import tkinter as tk
from time import sleep
from movie01 import reel
window = tk.Tk()
def main():
window.title("Tkinter Movie Player")
button = tk.Button(window, text = "Play", command = processPlay)
button.pack()
window.mainloop()
def processPlay():
TIME_STEP = 0.3
label = tk.Label(window, text = "", font = ("Courier"))
label.pack()
count = 0
while count < len(reel):
s = ""
frame = reel[count]
for line in frame:
s += line + "\n"
count += 1
label["text"] = s
label.pack()
window.update()
sleep(TIME_STEP)
main()
|
2bd5887a62d0f6bfd6f9290604effad322e8ab1e
|
myElsClient.py
|
myElsClient.py
|
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "https://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
def setInstToken(self, instToken):
"""Sets an institutional token for customer authentication"""
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
# request/response execution functions
def execRequest(self,pathStr,queryStr):
"""Constructs and send the actual request"""
headers = {
"X-ELS-APIKey" : self.apiKey
}
r = requests.get(
self.__base_url + pathStr + queryStr,
headers = headers
)
return r
|
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "https://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
def setInstToken(self, instToken):
"""Sets an institutional token for customer authentication"""
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
# request/response execution functions
def execRequest(self,pathStr,queryStr):
"""Constructs and sends the actual request; returns response."""
headers = {
"X-ELS-APIKey" : self.apiKey
}
r = requests.get(
self.__base_url + pathStr + queryStr,
headers = headers
)
if r.status_code == 200:
return r
else:
print "error"
|
Add basic HTTP error handling.
|
Add basic HTTP error handling.
|
Python
|
bsd-3-clause
|
ElsevierDev/elsapy
|
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "https://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
def setInstToken(self, instToken):
"""Sets an institutional token for customer authentication"""
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
# request/response execution functions
def execRequest(self,pathStr,queryStr):
"""Constructs and send the actual request"""
headers = {
"X-ELS-APIKey" : self.apiKey
}
r = requests.get(
self.__base_url + pathStr + queryStr,
headers = headers
)
return r
Add basic HTTP error handling.
|
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "https://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
def setInstToken(self, instToken):
"""Sets an institutional token for customer authentication"""
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
# request/response execution functions
def execRequest(self,pathStr,queryStr):
"""Constructs and sends the actual request; returns response."""
headers = {
"X-ELS-APIKey" : self.apiKey
}
r = requests.get(
self.__base_url + pathStr + queryStr,
headers = headers
)
if r.status_code == 200:
return r
else:
print "error"
|
<commit_before>import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "https://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
def setInstToken(self, instToken):
"""Sets an institutional token for customer authentication"""
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
# request/response execution functions
def execRequest(self,pathStr,queryStr):
"""Constructs and send the actual request"""
headers = {
"X-ELS-APIKey" : self.apiKey
}
r = requests.get(
self.__base_url + pathStr + queryStr,
headers = headers
)
return r
<commit_msg>Add basic HTTP error handling.<commit_after>
|
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "https://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
def setInstToken(self, instToken):
"""Sets an institutional token for customer authentication"""
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
# request/response execution functions
def execRequest(self,pathStr,queryStr):
"""Constructs and sends the actual request; returns response."""
headers = {
"X-ELS-APIKey" : self.apiKey
}
r = requests.get(
self.__base_url + pathStr + queryStr,
headers = headers
)
if r.status_code == 200:
return r
else:
print "error"
|
import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "https://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
def setInstToken(self, instToken):
"""Sets an institutional token for customer authentication"""
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
# request/response execution functions
def execRequest(self,pathStr,queryStr):
"""Constructs and send the actual request"""
headers = {
"X-ELS-APIKey" : self.apiKey
}
r = requests.get(
self.__base_url + pathStr + queryStr,
headers = headers
)
return r
Add basic HTTP error handling.import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "https://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
def setInstToken(self, instToken):
"""Sets an institutional token for customer authentication"""
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
# request/response execution functions
def execRequest(self,pathStr,queryStr):
"""Constructs and sends the actual request; returns response."""
headers = {
"X-ELS-APIKey" : self.apiKey
}
r = requests.get(
self.__base_url + pathStr + queryStr,
headers = headers
)
if r.status_code == 200:
return r
else:
print "error"
|
<commit_before>import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "https://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
def setInstToken(self, instToken):
"""Sets an institutional token for customer authentication"""
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
# request/response execution functions
def execRequest(self,pathStr,queryStr):
"""Constructs and send the actual request"""
headers = {
"X-ELS-APIKey" : self.apiKey
}
r = requests.get(
self.__base_url + pathStr + queryStr,
headers = headers
)
return r
<commit_msg>Add basic HTTP error handling.<commit_after>import requests
class myElsClient:
"""A class that implements a Python interface to api.elsevier.com"""
# local variables
__base_url = "https://api.elsevier.com/"
# constructors
def __init__(self, apiKey):
"""Instantiates a client with a given API Key."""
self.apiKey = apiKey
# configuration functions
def setInstToken(self, instToken):
"""Sets an institutional token for customer authentication"""
self.instToken = instToken
# utility access functions
def getBaseURL(self):
"""Returns the base URL currently configured for Elsevier's APIs"""
return self.__base_url
# request/response execution functions
def execRequest(self,pathStr,queryStr):
"""Constructs and sends the actual request; returns response."""
headers = {
"X-ELS-APIKey" : self.apiKey
}
r = requests.get(
self.__base_url + pathStr + queryStr,
headers = headers
)
if r.status_code == 200:
return r
else:
print "error"
|
e7eb0697f9362cc5ec5b8a21b064873eda6ed329
|
apps/basaltApp/scripts/photoDataExport.py
|
apps/basaltApp/scripts/photoDataExport.py
|
#! /usr/bin/env python
import django
from datetime import datetime
import pytz
django.setup()
from basaltApp.models import BasaltImageSet, BasaltSingleImage
from geocamTrack.utils import getClosestPosition
hawaiiStandardTime = pytz.timezone('US/Hawaii')
startTime = datetime(2016, 11, 8, 0, 0, 0, tzinfo=hawaiiStandardTime)
endTime = datetime(2016, 11, 9, 0, 0, 0, tzinfo=hawaiiStandardTime)
imgList = BasaltImageSet.objects.filter(creation_time__gte=startTime).filter(creation_time__lte=endTime)
print "Found %d images." % imgList.count()
for img in imgList:
if img.flight:
print img.flight.name
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
else:
print "<none>"
|
#! /usr/bin/env python
import django
from datetime import datetime
import pytz
django.setup()
from basaltApp.models import BasaltImageSet, BasaltSingleImage, BasaltResource
from geocamTrack.utils import getClosestPosition
hawaiiStandardTime = pytz.timezone('US/Hawaii')
startTime = datetime(2016, 11, 8, 0, 0, 0, tzinfo=hawaiiStandardTime)
endTime = datetime(2016, 11, 9, 0, 0, 0, tzinfo=hawaiiStandardTime)
ev1Resource = BasaltResource.objects.get(name="EV1")
print "Resource:", ev1Resource
imgList = BasaltImageSet.objects.filter(acquisition_time__gte=startTime).filter(acquisition_time__lte=endTime)
print "Found %d images." % imgList.count()
for img in imgList:
position = getClosestPosition(resource=ev1Resource, timestamp=img.acquisition_time)
if img.flight:
print "F: %s, N: %s, P: %s" % (img.flight.name, img.name, position)
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
else:
position = getClosestPosition(resource=ev1Resource, timestamp=img.acquisition_time)
print "F: %s, N: %s, P: %s" % ("<none>", img.name, position)
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
|
Add postion lookup stuff to script. Use acquistion_time for timestamp
|
Add postion lookup stuff to script. Use acquistion_time for timestamp
|
Python
|
apache-2.0
|
xgds/xgds_basalt,xgds/xgds_basalt,xgds/xgds_basalt,xgds/xgds_basalt
|
#! /usr/bin/env python
import django
from datetime import datetime
import pytz
django.setup()
from basaltApp.models import BasaltImageSet, BasaltSingleImage
from geocamTrack.utils import getClosestPosition
hawaiiStandardTime = pytz.timezone('US/Hawaii')
startTime = datetime(2016, 11, 8, 0, 0, 0, tzinfo=hawaiiStandardTime)
endTime = datetime(2016, 11, 9, 0, 0, 0, tzinfo=hawaiiStandardTime)
imgList = BasaltImageSet.objects.filter(creation_time__gte=startTime).filter(creation_time__lte=endTime)
print "Found %d images." % imgList.count()
for img in imgList:
if img.flight:
print img.flight.name
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
else:
print "<none>"
Add postion lookup stuff to script. Use acquistion_time for timestamp
|
#! /usr/bin/env python
import django
from datetime import datetime
import pytz
django.setup()
from basaltApp.models import BasaltImageSet, BasaltSingleImage, BasaltResource
from geocamTrack.utils import getClosestPosition
hawaiiStandardTime = pytz.timezone('US/Hawaii')
startTime = datetime(2016, 11, 8, 0, 0, 0, tzinfo=hawaiiStandardTime)
endTime = datetime(2016, 11, 9, 0, 0, 0, tzinfo=hawaiiStandardTime)
ev1Resource = BasaltResource.objects.get(name="EV1")
print "Resource:", ev1Resource
imgList = BasaltImageSet.objects.filter(acquisition_time__gte=startTime).filter(acquisition_time__lte=endTime)
print "Found %d images." % imgList.count()
for img in imgList:
position = getClosestPosition(resource=ev1Resource, timestamp=img.acquisition_time)
if img.flight:
print "F: %s, N: %s, P: %s" % (img.flight.name, img.name, position)
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
else:
position = getClosestPosition(resource=ev1Resource, timestamp=img.acquisition_time)
print "F: %s, N: %s, P: %s" % ("<none>", img.name, position)
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
|
<commit_before>#! /usr/bin/env python
import django
from datetime import datetime
import pytz
django.setup()
from basaltApp.models import BasaltImageSet, BasaltSingleImage
from geocamTrack.utils import getClosestPosition
hawaiiStandardTime = pytz.timezone('US/Hawaii')
startTime = datetime(2016, 11, 8, 0, 0, 0, tzinfo=hawaiiStandardTime)
endTime = datetime(2016, 11, 9, 0, 0, 0, tzinfo=hawaiiStandardTime)
imgList = BasaltImageSet.objects.filter(creation_time__gte=startTime).filter(creation_time__lte=endTime)
print "Found %d images." % imgList.count()
for img in imgList:
if img.flight:
print img.flight.name
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
else:
print "<none>"
<commit_msg>Add postion lookup stuff to script. Use acquistion_time for timestamp<commit_after>
|
#! /usr/bin/env python
import django
from datetime import datetime
import pytz
django.setup()
from basaltApp.models import BasaltImageSet, BasaltSingleImage, BasaltResource
from geocamTrack.utils import getClosestPosition
hawaiiStandardTime = pytz.timezone('US/Hawaii')
startTime = datetime(2016, 11, 8, 0, 0, 0, tzinfo=hawaiiStandardTime)
endTime = datetime(2016, 11, 9, 0, 0, 0, tzinfo=hawaiiStandardTime)
ev1Resource = BasaltResource.objects.get(name="EV1")
print "Resource:", ev1Resource
imgList = BasaltImageSet.objects.filter(acquisition_time__gte=startTime).filter(acquisition_time__lte=endTime)
print "Found %d images." % imgList.count()
for img in imgList:
position = getClosestPosition(resource=ev1Resource, timestamp=img.acquisition_time)
if img.flight:
print "F: %s, N: %s, P: %s" % (img.flight.name, img.name, position)
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
else:
position = getClosestPosition(resource=ev1Resource, timestamp=img.acquisition_time)
print "F: %s, N: %s, P: %s" % ("<none>", img.name, position)
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
|
#! /usr/bin/env python
import django
from datetime import datetime
import pytz
django.setup()
from basaltApp.models import BasaltImageSet, BasaltSingleImage
from geocamTrack.utils import getClosestPosition
hawaiiStandardTime = pytz.timezone('US/Hawaii')
startTime = datetime(2016, 11, 8, 0, 0, 0, tzinfo=hawaiiStandardTime)
endTime = datetime(2016, 11, 9, 0, 0, 0, tzinfo=hawaiiStandardTime)
imgList = BasaltImageSet.objects.filter(creation_time__gte=startTime).filter(creation_time__lte=endTime)
print "Found %d images." % imgList.count()
for img in imgList:
if img.flight:
print img.flight.name
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
else:
print "<none>"
Add postion lookup stuff to script. Use acquistion_time for timestamp#! /usr/bin/env python
import django
from datetime import datetime
import pytz
django.setup()
from basaltApp.models import BasaltImageSet, BasaltSingleImage, BasaltResource
from geocamTrack.utils import getClosestPosition
hawaiiStandardTime = pytz.timezone('US/Hawaii')
startTime = datetime(2016, 11, 8, 0, 0, 0, tzinfo=hawaiiStandardTime)
endTime = datetime(2016, 11, 9, 0, 0, 0, tzinfo=hawaiiStandardTime)
ev1Resource = BasaltResource.objects.get(name="EV1")
print "Resource:", ev1Resource
imgList = BasaltImageSet.objects.filter(acquisition_time__gte=startTime).filter(acquisition_time__lte=endTime)
print "Found %d images." % imgList.count()
for img in imgList:
position = getClosestPosition(resource=ev1Resource, timestamp=img.acquisition_time)
if img.flight:
print "F: %s, N: %s, P: %s" % (img.flight.name, img.name, position)
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
else:
position = getClosestPosition(resource=ev1Resource, timestamp=img.acquisition_time)
print "F: %s, N: %s, P: %s" % ("<none>", img.name, position)
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
|
<commit_before>#! /usr/bin/env python
import django
from datetime import datetime
import pytz
django.setup()
from basaltApp.models import BasaltImageSet, BasaltSingleImage
from geocamTrack.utils import getClosestPosition
hawaiiStandardTime = pytz.timezone('US/Hawaii')
startTime = datetime(2016, 11, 8, 0, 0, 0, tzinfo=hawaiiStandardTime)
endTime = datetime(2016, 11, 9, 0, 0, 0, tzinfo=hawaiiStandardTime)
imgList = BasaltImageSet.objects.filter(creation_time__gte=startTime).filter(creation_time__lte=endTime)
print "Found %d images." % imgList.count()
for img in imgList:
if img.flight:
print img.flight.name
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
else:
print "<none>"
<commit_msg>Add postion lookup stuff to script. Use acquistion_time for timestamp<commit_after>#! /usr/bin/env python
import django
from datetime import datetime
import pytz
django.setup()
from basaltApp.models import BasaltImageSet, BasaltSingleImage, BasaltResource
from geocamTrack.utils import getClosestPosition
hawaiiStandardTime = pytz.timezone('US/Hawaii')
startTime = datetime(2016, 11, 8, 0, 0, 0, tzinfo=hawaiiStandardTime)
endTime = datetime(2016, 11, 9, 0, 0, 0, tzinfo=hawaiiStandardTime)
ev1Resource = BasaltResource.objects.get(name="EV1")
print "Resource:", ev1Resource
imgList = BasaltImageSet.objects.filter(acquisition_time__gte=startTime).filter(acquisition_time__lte=endTime)
print "Found %d images." % imgList.count()
for img in imgList:
position = getClosestPosition(resource=ev1Resource, timestamp=img.acquisition_time)
if img.flight:
print "F: %s, N: %s, P: %s" % (img.flight.name, img.name, position)
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
else:
position = getClosestPosition(resource=ev1Resource, timestamp=img.acquisition_time)
print "F: %s, N: %s, P: %s" % ("<none>", img.name, position)
singleImages = img.images
print " Images:"
for si in singleImages.all():
print " Thumb: %s (%s x %s)" % (si.thumbnail, si.width, si.height)
|
d12401b25c3ec8a2087601d5fb85731fba77be04
|
wellsfargo/tasks.py
|
wellsfargo/tasks.py
|
from __future__ import absolute_import
from celery import shared_task
from django.core.exceptions import ValidationError
from wellsfargo.connector import actions
from wellsfargo.models import AccountMetadata
import logging
logger = logging.getLogger(__name__)
@shared_task(bind=True, ignore_result=True)
def reconcile_accounts(self):
for m in AccountMetadata.objects.order_by('account_id').all():
account = m.account
logger.info('Reconciling account %s' % account)
try:
resp = actions.submit_inquiry(account)
resp.reconcile()
except ValidationError as e:
logging.error('Failed to reconcile account %s due to ValidationError[%s]' % (account, e.message))
|
from __future__ import absolute_import
from celery import shared_task
from django.core.exceptions import ValidationError
from wellsfargo.connector import actions
from wellsfargo.models import AccountMetadata
import logging
logger = logging.getLogger(__name__)
@shared_task(bind=True, ignore_result=True)
def reconcile_accounts(self):
for m in AccountMetadata.objects.order_by('account_id').all():
account = m.account
logger.info('Reconciling account %s' % account)
try:
resp = actions.submit_inquiry(account)
resp.reconcile()
except ValidationError as e:
logging.warning('Failed to reconcile account %s due to ValidationError[%s]' % (account, e.message))
|
Downgrade 'Failed to reconcile' from error to warning
|
Downgrade 'Failed to reconcile' from error to warning
|
Python
|
isc
|
thelabnyc/django-oscar-wfrs,thelabnyc/django-oscar-wfrs
|
from __future__ import absolute_import
from celery import shared_task
from django.core.exceptions import ValidationError
from wellsfargo.connector import actions
from wellsfargo.models import AccountMetadata
import logging
logger = logging.getLogger(__name__)
@shared_task(bind=True, ignore_result=True)
def reconcile_accounts(self):
for m in AccountMetadata.objects.order_by('account_id').all():
account = m.account
logger.info('Reconciling account %s' % account)
try:
resp = actions.submit_inquiry(account)
resp.reconcile()
except ValidationError as e:
logging.error('Failed to reconcile account %s due to ValidationError[%s]' % (account, e.message))
Downgrade 'Failed to reconcile' from error to warning
|
from __future__ import absolute_import
from celery import shared_task
from django.core.exceptions import ValidationError
from wellsfargo.connector import actions
from wellsfargo.models import AccountMetadata
import logging
logger = logging.getLogger(__name__)
@shared_task(bind=True, ignore_result=True)
def reconcile_accounts(self):
for m in AccountMetadata.objects.order_by('account_id').all():
account = m.account
logger.info('Reconciling account %s' % account)
try:
resp = actions.submit_inquiry(account)
resp.reconcile()
except ValidationError as e:
logging.warning('Failed to reconcile account %s due to ValidationError[%s]' % (account, e.message))
|
<commit_before>from __future__ import absolute_import
from celery import shared_task
from django.core.exceptions import ValidationError
from wellsfargo.connector import actions
from wellsfargo.models import AccountMetadata
import logging
logger = logging.getLogger(__name__)
@shared_task(bind=True, ignore_result=True)
def reconcile_accounts(self):
for m in AccountMetadata.objects.order_by('account_id').all():
account = m.account
logger.info('Reconciling account %s' % account)
try:
resp = actions.submit_inquiry(account)
resp.reconcile()
except ValidationError as e:
logging.error('Failed to reconcile account %s due to ValidationError[%s]' % (account, e.message))
<commit_msg>Downgrade 'Failed to reconcile' from error to warning<commit_after>
|
from __future__ import absolute_import
from celery import shared_task
from django.core.exceptions import ValidationError
from wellsfargo.connector import actions
from wellsfargo.models import AccountMetadata
import logging
logger = logging.getLogger(__name__)
@shared_task(bind=True, ignore_result=True)
def reconcile_accounts(self):
for m in AccountMetadata.objects.order_by('account_id').all():
account = m.account
logger.info('Reconciling account %s' % account)
try:
resp = actions.submit_inquiry(account)
resp.reconcile()
except ValidationError as e:
logging.warning('Failed to reconcile account %s due to ValidationError[%s]' % (account, e.message))
|
from __future__ import absolute_import
from celery import shared_task
from django.core.exceptions import ValidationError
from wellsfargo.connector import actions
from wellsfargo.models import AccountMetadata
import logging
logger = logging.getLogger(__name__)
@shared_task(bind=True, ignore_result=True)
def reconcile_accounts(self):
for m in AccountMetadata.objects.order_by('account_id').all():
account = m.account
logger.info('Reconciling account %s' % account)
try:
resp = actions.submit_inquiry(account)
resp.reconcile()
except ValidationError as e:
logging.error('Failed to reconcile account %s due to ValidationError[%s]' % (account, e.message))
Downgrade 'Failed to reconcile' from error to warningfrom __future__ import absolute_import
from celery import shared_task
from django.core.exceptions import ValidationError
from wellsfargo.connector import actions
from wellsfargo.models import AccountMetadata
import logging
logger = logging.getLogger(__name__)
@shared_task(bind=True, ignore_result=True)
def reconcile_accounts(self):
for m in AccountMetadata.objects.order_by('account_id').all():
account = m.account
logger.info('Reconciling account %s' % account)
try:
resp = actions.submit_inquiry(account)
resp.reconcile()
except ValidationError as e:
logging.warning('Failed to reconcile account %s due to ValidationError[%s]' % (account, e.message))
|
<commit_before>from __future__ import absolute_import
from celery import shared_task
from django.core.exceptions import ValidationError
from wellsfargo.connector import actions
from wellsfargo.models import AccountMetadata
import logging
logger = logging.getLogger(__name__)
@shared_task(bind=True, ignore_result=True)
def reconcile_accounts(self):
for m in AccountMetadata.objects.order_by('account_id').all():
account = m.account
logger.info('Reconciling account %s' % account)
try:
resp = actions.submit_inquiry(account)
resp.reconcile()
except ValidationError as e:
logging.error('Failed to reconcile account %s due to ValidationError[%s]' % (account, e.message))
<commit_msg>Downgrade 'Failed to reconcile' from error to warning<commit_after>from __future__ import absolute_import
from celery import shared_task
from django.core.exceptions import ValidationError
from wellsfargo.connector import actions
from wellsfargo.models import AccountMetadata
import logging
logger = logging.getLogger(__name__)
@shared_task(bind=True, ignore_result=True)
def reconcile_accounts(self):
for m in AccountMetadata.objects.order_by('account_id').all():
account = m.account
logger.info('Reconciling account %s' % account)
try:
resp = actions.submit_inquiry(account)
resp.reconcile()
except ValidationError as e:
logging.warning('Failed to reconcile account %s due to ValidationError[%s]' % (account, e.message))
|
d3a9657b7318327a59c3eee08a25f1e5c4ba4edf
|
django_casscache.py
|
django_casscache.py
|
"""
django_casscache
~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
|
"""
django_casscache
~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
Add a method to noop the make_key in Django
|
Add a method to noop the make_key in Django
|
Python
|
bsd-3-clause
|
mattrobenolt/django-casscache
|
"""
django_casscache
~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
Add a method to noop the make_key in Django
|
"""
django_casscache
~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
<commit_before>"""
django_casscache
~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
<commit_msg>Add a method to noop the make_key in Django<commit_after>
|
"""
django_casscache
~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
"""
django_casscache
~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
Add a method to noop the make_key in Django"""
django_casscache
~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
<commit_before>"""
django_casscache
~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
<commit_msg>Add a method to noop the make_key in Django<commit_after>"""
django_casscache
~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
8a25b5f76ffe5b32f6c1a8d691c3d78ce3fb07c8
|
fluent_contents/utils/search.py
|
fluent_contents/utils/search.py
|
"""
Internal utils for search.
"""
from django.utils.encoding import force_unicode
from django.utils.html import strip_tags
import six
def get_search_field_values(contentitem):
"""
Extract the search fields from the model.
"""
plugin = contentitem.plugin
values = []
for field_name in plugin.search_fields:
value = getattr(contentitem, field_name)
# Just assume all strings may contain HTML.
# Not checking for just the PluginHtmlField here.
if value and isinstance(value, six.string_types):
value = get_cleaned_string(value)
values.append(value)
return values
def get_search_text(contentitem):
bits = get_search_field_values(contentitem)
return clean_join(u" ", bits)
def get_cleaned_string(data):
"""
Cleanup a string/HTML output to consist of words only.
"""
return strip_tags(force_unicode(data))
def clean_join(separator, iterable):
"""
Filters out iterable to only join non empty items.
"""
return separator.join(filter(None, iterable))
#def get_cleaned_bits(data):
# return smart_split(get_cleaned_bits(data))
|
"""
Internal utils for search.
"""
from django.utils.encoding import force_text
from django.utils.html import strip_tags
import six
def get_search_field_values(contentitem):
"""
Extract the search fields from the model.
"""
plugin = contentitem.plugin
values = []
for field_name in plugin.search_fields:
value = getattr(contentitem, field_name)
# Just assume all strings may contain HTML.
# Not checking for just the PluginHtmlField here.
if value and isinstance(value, six.string_types):
value = get_cleaned_string(value)
values.append(value)
return values
def get_search_text(contentitem):
bits = get_search_field_values(contentitem)
return clean_join(u" ", bits)
def get_cleaned_string(data):
"""
Cleanup a string/HTML output to consist of words only.
"""
return strip_tags(force_text(data))
def clean_join(separator, iterable):
"""
Filters out iterable to only join non empty items.
"""
return separator.join(filter(None, iterable))
#def get_cleaned_bits(data):
# return smart_split(get_cleaned_bits(data))
|
Fix force_unicode for Python 3, use force_text()
|
Fix force_unicode for Python 3, use force_text()
|
Python
|
apache-2.0
|
django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents
|
"""
Internal utils for search.
"""
from django.utils.encoding import force_unicode
from django.utils.html import strip_tags
import six
def get_search_field_values(contentitem):
"""
Extract the search fields from the model.
"""
plugin = contentitem.plugin
values = []
for field_name in plugin.search_fields:
value = getattr(contentitem, field_name)
# Just assume all strings may contain HTML.
# Not checking for just the PluginHtmlField here.
if value and isinstance(value, six.string_types):
value = get_cleaned_string(value)
values.append(value)
return values
def get_search_text(contentitem):
bits = get_search_field_values(contentitem)
return clean_join(u" ", bits)
def get_cleaned_string(data):
"""
Cleanup a string/HTML output to consist of words only.
"""
return strip_tags(force_unicode(data))
def clean_join(separator, iterable):
"""
Filters out iterable to only join non empty items.
"""
return separator.join(filter(None, iterable))
#def get_cleaned_bits(data):
# return smart_split(get_cleaned_bits(data))
Fix force_unicode for Python 3, use force_text()
|
"""
Internal utils for search.
"""
from django.utils.encoding import force_text
from django.utils.html import strip_tags
import six
def get_search_field_values(contentitem):
"""
Extract the search fields from the model.
"""
plugin = contentitem.plugin
values = []
for field_name in plugin.search_fields:
value = getattr(contentitem, field_name)
# Just assume all strings may contain HTML.
# Not checking for just the PluginHtmlField here.
if value and isinstance(value, six.string_types):
value = get_cleaned_string(value)
values.append(value)
return values
def get_search_text(contentitem):
bits = get_search_field_values(contentitem)
return clean_join(u" ", bits)
def get_cleaned_string(data):
"""
Cleanup a string/HTML output to consist of words only.
"""
return strip_tags(force_text(data))
def clean_join(separator, iterable):
"""
Filters out iterable to only join non empty items.
"""
return separator.join(filter(None, iterable))
#def get_cleaned_bits(data):
# return smart_split(get_cleaned_bits(data))
|
<commit_before>"""
Internal utils for search.
"""
from django.utils.encoding import force_unicode
from django.utils.html import strip_tags
import six
def get_search_field_values(contentitem):
"""
Extract the search fields from the model.
"""
plugin = contentitem.plugin
values = []
for field_name in plugin.search_fields:
value = getattr(contentitem, field_name)
# Just assume all strings may contain HTML.
# Not checking for just the PluginHtmlField here.
if value and isinstance(value, six.string_types):
value = get_cleaned_string(value)
values.append(value)
return values
def get_search_text(contentitem):
bits = get_search_field_values(contentitem)
return clean_join(u" ", bits)
def get_cleaned_string(data):
"""
Cleanup a string/HTML output to consist of words only.
"""
return strip_tags(force_unicode(data))
def clean_join(separator, iterable):
"""
Filters out iterable to only join non empty items.
"""
return separator.join(filter(None, iterable))
#def get_cleaned_bits(data):
# return smart_split(get_cleaned_bits(data))
<commit_msg>Fix force_unicode for Python 3, use force_text()<commit_after>
|
"""
Internal utils for search.
"""
from django.utils.encoding import force_text
from django.utils.html import strip_tags
import six
def get_search_field_values(contentitem):
"""
Extract the search fields from the model.
"""
plugin = contentitem.plugin
values = []
for field_name in plugin.search_fields:
value = getattr(contentitem, field_name)
# Just assume all strings may contain HTML.
# Not checking for just the PluginHtmlField here.
if value and isinstance(value, six.string_types):
value = get_cleaned_string(value)
values.append(value)
return values
def get_search_text(contentitem):
bits = get_search_field_values(contentitem)
return clean_join(u" ", bits)
def get_cleaned_string(data):
"""
Cleanup a string/HTML output to consist of words only.
"""
return strip_tags(force_text(data))
def clean_join(separator, iterable):
"""
Filters out iterable to only join non empty items.
"""
return separator.join(filter(None, iterable))
#def get_cleaned_bits(data):
# return smart_split(get_cleaned_bits(data))
|
"""
Internal utils for search.
"""
from django.utils.encoding import force_unicode
from django.utils.html import strip_tags
import six
def get_search_field_values(contentitem):
"""
Extract the search fields from the model.
"""
plugin = contentitem.plugin
values = []
for field_name in plugin.search_fields:
value = getattr(contentitem, field_name)
# Just assume all strings may contain HTML.
# Not checking for just the PluginHtmlField here.
if value and isinstance(value, six.string_types):
value = get_cleaned_string(value)
values.append(value)
return values
def get_search_text(contentitem):
bits = get_search_field_values(contentitem)
return clean_join(u" ", bits)
def get_cleaned_string(data):
"""
Cleanup a string/HTML output to consist of words only.
"""
return strip_tags(force_unicode(data))
def clean_join(separator, iterable):
"""
Filters out iterable to only join non empty items.
"""
return separator.join(filter(None, iterable))
#def get_cleaned_bits(data):
# return smart_split(get_cleaned_bits(data))
Fix force_unicode for Python 3, use force_text()"""
Internal utils for search.
"""
from django.utils.encoding import force_text
from django.utils.html import strip_tags
import six
def get_search_field_values(contentitem):
"""
Extract the search fields from the model.
"""
plugin = contentitem.plugin
values = []
for field_name in plugin.search_fields:
value = getattr(contentitem, field_name)
# Just assume all strings may contain HTML.
# Not checking for just the PluginHtmlField here.
if value and isinstance(value, six.string_types):
value = get_cleaned_string(value)
values.append(value)
return values
def get_search_text(contentitem):
bits = get_search_field_values(contentitem)
return clean_join(u" ", bits)
def get_cleaned_string(data):
"""
Cleanup a string/HTML output to consist of words only.
"""
return strip_tags(force_text(data))
def clean_join(separator, iterable):
"""
Filters out iterable to only join non empty items.
"""
return separator.join(filter(None, iterable))
#def get_cleaned_bits(data):
# return smart_split(get_cleaned_bits(data))
|
<commit_before>"""
Internal utils for search.
"""
from django.utils.encoding import force_unicode
from django.utils.html import strip_tags
import six
def get_search_field_values(contentitem):
"""
Extract the search fields from the model.
"""
plugin = contentitem.plugin
values = []
for field_name in plugin.search_fields:
value = getattr(contentitem, field_name)
# Just assume all strings may contain HTML.
# Not checking for just the PluginHtmlField here.
if value and isinstance(value, six.string_types):
value = get_cleaned_string(value)
values.append(value)
return values
def get_search_text(contentitem):
bits = get_search_field_values(contentitem)
return clean_join(u" ", bits)
def get_cleaned_string(data):
"""
Cleanup a string/HTML output to consist of words only.
"""
return strip_tags(force_unicode(data))
def clean_join(separator, iterable):
"""
Filters out iterable to only join non empty items.
"""
return separator.join(filter(None, iterable))
#def get_cleaned_bits(data):
# return smart_split(get_cleaned_bits(data))
<commit_msg>Fix force_unicode for Python 3, use force_text()<commit_after>"""
Internal utils for search.
"""
from django.utils.encoding import force_text
from django.utils.html import strip_tags
import six
def get_search_field_values(contentitem):
"""
Extract the search fields from the model.
"""
plugin = contentitem.plugin
values = []
for field_name in plugin.search_fields:
value = getattr(contentitem, field_name)
# Just assume all strings may contain HTML.
# Not checking for just the PluginHtmlField here.
if value and isinstance(value, six.string_types):
value = get_cleaned_string(value)
values.append(value)
return values
def get_search_text(contentitem):
bits = get_search_field_values(contentitem)
return clean_join(u" ", bits)
def get_cleaned_string(data):
"""
Cleanup a string/HTML output to consist of words only.
"""
return strip_tags(force_text(data))
def clean_join(separator, iterable):
"""
Filters out iterable to only join non empty items.
"""
return separator.join(filter(None, iterable))
#def get_cleaned_bits(data):
# return smart_split(get_cleaned_bits(data))
|
2685b94838c8ec7ce31da60bc6f28953152c788a
|
pixelmap/pixelmap.py
|
pixelmap/pixelmap.py
|
"""Pixelmap
Cool pixelmap of Pixels.
Last updated: March 7, 2017
"""
from pixel import Pixel
class Pixelmap:
def __init__(self, width, height):
"""Pixelmap constructor
:param width: Width of map in pixels.
:param height: Height of map in pixels.
"""
self.width = width
self.height = height
self.map_matrix = [[0]*self.width for _ in range(self.height)]
for row in range(self.height):
for col in range(self.width):
self.map_matrix[row][col] = Pixel()
def __str__(self):
"""Human readable pixelmap description.
Pretty much just the matrix.
:return: Description of pixelmap.
"""
return str('\n'.join([''.join(['{:4}'.format(item) for item in row]) for row in self.map_matrix]))
def __repr__(self):
"""Internal representation
Just use str for now.
"""
return self.__str__()
|
"""Pixelmap
Cool pixelmap of Pixels.
Last updated: March 11, 2017
"""
from .pixel import Pixel
class Pixelmap:
def __init__(self, cols, rows, default_val=None):
"""Pixelmap constructor
:param cols: Width of map in pixels.
:param rows: Height of map in pixels.
:param default_val: Default value for pixels.
"""
assert cols >= 0, 'Invalid Pixelmap width'
assert rows >= 0, 'Invalid Pixelmap height'
self.cols = cols
self.rows = rows
self.map_matrix = [[0]*self.cols for _ in range(self.rows)]
for row in range(self.rows):
for col in range(self.cols):
self.map_matrix[row][col] = Pixel(default_val)
def num_cols(self):
return self.cols
def num_rows(self):
return self.rows
def __str__(self):
"""Human readable pixelmap description.
Pretty much just the matrix.
:return: Description of pixelmap.
"""
return str('\n'.join([''.join(['{:6}'.format(str(item)) for item in row]) for row in self.map_matrix]))
|
Add default value for matrix and methods to get columns and rows.
|
Add default value for matrix and methods to get columns and rows.
|
Python
|
mit
|
yebra06/pixelmap
|
"""Pixelmap
Cool pixelmap of Pixels.
Last updated: March 7, 2017
"""
from pixel import Pixel
class Pixelmap:
def __init__(self, width, height):
"""Pixelmap constructor
:param width: Width of map in pixels.
:param height: Height of map in pixels.
"""
self.width = width
self.height = height
self.map_matrix = [[0]*self.width for _ in range(self.height)]
for row in range(self.height):
for col in range(self.width):
self.map_matrix[row][col] = Pixel()
def __str__(self):
"""Human readable pixelmap description.
Pretty much just the matrix.
:return: Description of pixelmap.
"""
return str('\n'.join([''.join(['{:4}'.format(item) for item in row]) for row in self.map_matrix]))
def __repr__(self):
"""Internal representation
Just use str for now.
"""
return self.__str__()
Add default value for matrix and methods to get columns and rows.
|
"""Pixelmap
Cool pixelmap of Pixels.
Last updated: March 11, 2017
"""
from .pixel import Pixel
class Pixelmap:
def __init__(self, cols, rows, default_val=None):
"""Pixelmap constructor
:param cols: Width of map in pixels.
:param rows: Height of map in pixels.
:param default_val: Default value for pixels.
"""
assert cols >= 0, 'Invalid Pixelmap width'
assert rows >= 0, 'Invalid Pixelmap height'
self.cols = cols
self.rows = rows
self.map_matrix = [[0]*self.cols for _ in range(self.rows)]
for row in range(self.rows):
for col in range(self.cols):
self.map_matrix[row][col] = Pixel(default_val)
def num_cols(self):
return self.cols
def num_rows(self):
return self.rows
def __str__(self):
"""Human readable pixelmap description.
Pretty much just the matrix.
:return: Description of pixelmap.
"""
return str('\n'.join([''.join(['{:6}'.format(str(item)) for item in row]) for row in self.map_matrix]))
|
<commit_before>"""Pixelmap
Cool pixelmap of Pixels.
Last updated: March 7, 2017
"""
from pixel import Pixel
class Pixelmap:
def __init__(self, width, height):
"""Pixelmap constructor
:param width: Width of map in pixels.
:param height: Height of map in pixels.
"""
self.width = width
self.height = height
self.map_matrix = [[0]*self.width for _ in range(self.height)]
for row in range(self.height):
for col in range(self.width):
self.map_matrix[row][col] = Pixel()
def __str__(self):
"""Human readable pixelmap description.
Pretty much just the matrix.
:return: Description of pixelmap.
"""
return str('\n'.join([''.join(['{:4}'.format(item) for item in row]) for row in self.map_matrix]))
def __repr__(self):
"""Internal representation
Just use str for now.
"""
return self.__str__()
<commit_msg>Add default value for matrix and methods to get columns and rows.<commit_after>
|
"""Pixelmap
Cool pixelmap of Pixels.
Last updated: March 11, 2017
"""
from .pixel import Pixel
class Pixelmap:
def __init__(self, cols, rows, default_val=None):
"""Pixelmap constructor
:param cols: Width of map in pixels.
:param rows: Height of map in pixels.
:param default_val: Default value for pixels.
"""
assert cols >= 0, 'Invalid Pixelmap width'
assert rows >= 0, 'Invalid Pixelmap height'
self.cols = cols
self.rows = rows
self.map_matrix = [[0]*self.cols for _ in range(self.rows)]
for row in range(self.rows):
for col in range(self.cols):
self.map_matrix[row][col] = Pixel(default_val)
def num_cols(self):
return self.cols
def num_rows(self):
return self.rows
def __str__(self):
"""Human readable pixelmap description.
Pretty much just the matrix.
:return: Description of pixelmap.
"""
return str('\n'.join([''.join(['{:6}'.format(str(item)) for item in row]) for row in self.map_matrix]))
|
"""Pixelmap
Cool pixelmap of Pixels.
Last updated: March 7, 2017
"""
from pixel import Pixel
class Pixelmap:
def __init__(self, width, height):
"""Pixelmap constructor
:param width: Width of map in pixels.
:param height: Height of map in pixels.
"""
self.width = width
self.height = height
self.map_matrix = [[0]*self.width for _ in range(self.height)]
for row in range(self.height):
for col in range(self.width):
self.map_matrix[row][col] = Pixel()
def __str__(self):
"""Human readable pixelmap description.
Pretty much just the matrix.
:return: Description of pixelmap.
"""
return str('\n'.join([''.join(['{:4}'.format(item) for item in row]) for row in self.map_matrix]))
def __repr__(self):
"""Internal representation
Just use str for now.
"""
return self.__str__()
Add default value for matrix and methods to get columns and rows."""Pixelmap
Cool pixelmap of Pixels.
Last updated: March 11, 2017
"""
from .pixel import Pixel
class Pixelmap:
def __init__(self, cols, rows, default_val=None):
"""Pixelmap constructor
:param cols: Width of map in pixels.
:param rows: Height of map in pixels.
:param default_val: Default value for pixels.
"""
assert cols >= 0, 'Invalid Pixelmap width'
assert rows >= 0, 'Invalid Pixelmap height'
self.cols = cols
self.rows = rows
self.map_matrix = [[0]*self.cols for _ in range(self.rows)]
for row in range(self.rows):
for col in range(self.cols):
self.map_matrix[row][col] = Pixel(default_val)
def num_cols(self):
return self.cols
def num_rows(self):
return self.rows
def __str__(self):
"""Human readable pixelmap description.
Pretty much just the matrix.
:return: Description of pixelmap.
"""
return str('\n'.join([''.join(['{:6}'.format(str(item)) for item in row]) for row in self.map_matrix]))
|
<commit_before>"""Pixelmap
Cool pixelmap of Pixels.
Last updated: March 7, 2017
"""
from pixel import Pixel
class Pixelmap:
def __init__(self, width, height):
"""Pixelmap constructor
:param width: Width of map in pixels.
:param height: Height of map in pixels.
"""
self.width = width
self.height = height
self.map_matrix = [[0]*self.width for _ in range(self.height)]
for row in range(self.height):
for col in range(self.width):
self.map_matrix[row][col] = Pixel()
def __str__(self):
"""Human readable pixelmap description.
Pretty much just the matrix.
:return: Description of pixelmap.
"""
return str('\n'.join([''.join(['{:4}'.format(item) for item in row]) for row in self.map_matrix]))
def __repr__(self):
"""Internal representation
Just use str for now.
"""
return self.__str__()
<commit_msg>Add default value for matrix and methods to get columns and rows.<commit_after>"""Pixelmap
Cool pixelmap of Pixels.
Last updated: March 11, 2017
"""
from .pixel import Pixel
class Pixelmap:
def __init__(self, cols, rows, default_val=None):
"""Pixelmap constructor
:param cols: Width of map in pixels.
:param rows: Height of map in pixels.
:param default_val: Default value for pixels.
"""
assert cols >= 0, 'Invalid Pixelmap width'
assert rows >= 0, 'Invalid Pixelmap height'
self.cols = cols
self.rows = rows
self.map_matrix = [[0]*self.cols for _ in range(self.rows)]
for row in range(self.rows):
for col in range(self.cols):
self.map_matrix[row][col] = Pixel(default_val)
def num_cols(self):
return self.cols
def num_rows(self):
return self.rows
def __str__(self):
"""Human readable pixelmap description.
Pretty much just the matrix.
:return: Description of pixelmap.
"""
return str('\n'.join([''.join(['{:6}'.format(str(item)) for item in row]) for row in self.map_matrix]))
|
96733510eeee4b06c3b509097e7c26fd143d687f
|
plugins/clue/clue.py
|
plugins/clue/clue.py
|
from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("\n?\s*>", data['text']):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
|
from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("^\s*>", data['text'], re.MULTILINE):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
|
Fix to only match '>' at the beginning of a line
|
Fix to only match '>' at the beginning of a line
Which was the intention with the '\n' in the pattern before, but I had
made it optional for the common case of the '>' being at the beginning
of the message, which of course had the side effect of allowing the
'>' to be matched anywhere (bleh).
Now, with a MULTLINE-mode regexp and '^' in the pattern, the '>' will
only be treated as significant at the beginning of a line, (preceded
by optional whitespace).
|
Python
|
mit
|
cworth-gh/stony
|
from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("\n?\s*>", data['text']):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
Fix to only match '>' at the beginning of a line
Which was the intention with the '\n' in the pattern before, but I had
made it optional for the common case of the '>' being at the beginning
of the message, which of course had the side effect of allowing the
'>' to be matched anywhere (bleh).
Now, with a MULTLINE-mode regexp and '^' in the pattern, the '>' will
only be treated as significant at the beginning of a line, (preceded
by optional whitespace).
|
from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("^\s*>", data['text'], re.MULTILINE):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
|
<commit_before>from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("\n?\s*>", data['text']):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
<commit_msg>Fix to only match '>' at the beginning of a line
Which was the intention with the '\n' in the pattern before, but I had
made it optional for the common case of the '>' being at the beginning
of the message, which of course had the side effect of allowing the
'>' to be matched anywhere (bleh).
Now, with a MULTLINE-mode regexp and '^' in the pattern, the '>' will
only be treated as significant at the beginning of a line, (preceded
by optional whitespace).<commit_after>
|
from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("^\s*>", data['text'], re.MULTILINE):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
|
from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("\n?\s*>", data['text']):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
Fix to only match '>' at the beginning of a line
Which was the intention with the '\n' in the pattern before, but I had
made it optional for the common case of the '>' being at the beginning
of the message, which of course had the side effect of allowing the
'>' to be matched anywhere (bleh).
Now, with a MULTLINE-mode regexp and '^' in the pattern, the '>' will
only be treated as significant at the beginning of a line, (preceded
by optional whitespace).from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("^\s*>", data['text'], re.MULTILINE):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
|
<commit_before>from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("\n?\s*>", data['text']):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
<commit_msg>Fix to only match '>' at the beginning of a line
Which was the intention with the '\n' in the pattern before, but I had
made it optional for the common case of the '>' being at the beginning
of the message, which of course had the side effect of allowing the
'>' to be matched anywhere (bleh).
Now, with a MULTLINE-mode regexp and '^' in the pattern, the '>' will
only be treated as significant at the beginning of a line, (preceded
by optional whitespace).<commit_after>from __future__ import unicode_literals
import re
crontable = []
outputs = []
state = {}
class ClueState:
def __init__(self):
self.count = 0
self.clue = ''
def process_message(data):
channel = data['channel']
if channel not in state.keys():
state[channel] = ClueState()
st = state[channel]
# Count the number of messages we have seen in this channel since
# stony last repeated a clue.
st.count = st.count + 1
if re.search("^\s*>", data['text'], re.MULTILINE):
st.clue = data['text']
st.count = 1
else:
if st.count % 10 == 0:
outputs.append([channel, st.clue])
|
4a32bd6bdc91564276a4e46210fc9019dd1b8a89
|
statement_format.py
|
statement_format.py
|
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = None
output['Paid In'][output['Paid In'] < 0] = None
output['Balance'] = df['Balance (GBP)']
print(output)
output.to_csv('output.csv', index=False)
|
import json
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
conversions = json.load(open('description_conversion.json'))
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = (df['Counter Party'] + ' ' + df['Reference']).replace(conversions)
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = None
output['Paid In'][output['Paid In'] < 0] = None
output['Balance'] = df['Balance (GBP)']
output.to_csv('output.csv', index=False)
|
Correct operation. Now to fix panda warnings
|
Correct operation. Now to fix panda warnings
|
Python
|
mit
|
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
|
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = None
output['Paid In'][output['Paid In'] < 0] = None
output['Balance'] = df['Balance (GBP)']
print(output)
output.to_csv('output.csv', index=False)
Correct operation. Now to fix panda warnings
|
import json
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
conversions = json.load(open('description_conversion.json'))
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = (df['Counter Party'] + ' ' + df['Reference']).replace(conversions)
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = None
output['Paid In'][output['Paid In'] < 0] = None
output['Balance'] = df['Balance (GBP)']
output.to_csv('output.csv', index=False)
|
<commit_before>import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = None
output['Paid In'][output['Paid In'] < 0] = None
output['Balance'] = df['Balance (GBP)']
print(output)
output.to_csv('output.csv', index=False)
<commit_msg>Correct operation. Now to fix panda warnings<commit_after>
|
import json
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
conversions = json.load(open('description_conversion.json'))
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = (df['Counter Party'] + ' ' + df['Reference']).replace(conversions)
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = None
output['Paid In'][output['Paid In'] < 0] = None
output['Balance'] = df['Balance (GBP)']
output.to_csv('output.csv', index=False)
|
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = None
output['Paid In'][output['Paid In'] < 0] = None
output['Balance'] = df['Balance (GBP)']
print(output)
output.to_csv('output.csv', index=False)
Correct operation. Now to fix panda warningsimport json
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
conversions = json.load(open('description_conversion.json'))
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = (df['Counter Party'] + ' ' + df['Reference']).replace(conversions)
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = None
output['Paid In'][output['Paid In'] < 0] = None
output['Balance'] = df['Balance (GBP)']
output.to_csv('output.csv', index=False)
|
<commit_before>import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = df['Reference']
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = None
output['Paid In'][output['Paid In'] < 0] = None
output['Balance'] = df['Balance (GBP)']
print(output)
output.to_csv('output.csv', index=False)
<commit_msg>Correct operation. Now to fix panda warnings<commit_after>import json
import pandas as pd
def fn(row):
if row['Type'] == 'DIRECT DEBIT':
return 'DD'
if row['Type'] == 'DIRECT CREDIT' or row['Spending Category'] == 'INCOME':
return 'BP'
if row['Amount (GBP)'] < 0:
return 'SO'
raise Exception('Unintended state')
df = pd.read_csv('statement.csv')
conversions = json.load(open('description_conversion.json'))
output = df[['Date']]
output['Type'] = df.apply(fn, axis=1)
output['Description'] = (df['Counter Party'] + ' ' + df['Reference']).replace(conversions)
output['Paid Out'] = df['Amount (GBP)'].copy()
output['Paid In'] = df['Amount (GBP)'].copy()
output['Paid Out'] = output['Paid Out'] * -1
output['Paid Out'][output['Paid Out'] < 0] = None
output['Paid In'][output['Paid In'] < 0] = None
output['Balance'] = df['Balance (GBP)']
output.to_csv('output.csv', index=False)
|
896f7b82eb1c84538a94e65d8ff55282e36c6818
|
squadron/exthandlers/__init__.py
|
squadron/exthandlers/__init__.py
|
from .dir import ext_dir
from .makegit import ext_git
from .download import ext_download
from .template import ext_template
extension_handles = {
'dir':ext_dir,
'git':ext_git,
'download':ext_download,
'virtualenv',ext_virtualenv,
'tpl':ext_template
}
|
from .dir import ext_dir
from .makegit import ext_git
from .download import ext_download
from .template import ext_template
from .virtualenv import ext_virtualenv
extension_handles = {
'dir':ext_dir,
'git':ext_git,
'download':ext_download,
'virtualenv':ext_virtualenv,
'tpl':ext_template
}
|
Fix broken tests because of virtualenv handler
|
Fix broken tests because of virtualenv handler
|
Python
|
mit
|
gosquadron/squadron,gosquadron/squadron
|
from .dir import ext_dir
from .makegit import ext_git
from .download import ext_download
from .template import ext_template
extension_handles = {
'dir':ext_dir,
'git':ext_git,
'download':ext_download,
'virtualenv',ext_virtualenv,
'tpl':ext_template
}
Fix broken tests because of virtualenv handler
|
from .dir import ext_dir
from .makegit import ext_git
from .download import ext_download
from .template import ext_template
from .virtualenv import ext_virtualenv
extension_handles = {
'dir':ext_dir,
'git':ext_git,
'download':ext_download,
'virtualenv':ext_virtualenv,
'tpl':ext_template
}
|
<commit_before>from .dir import ext_dir
from .makegit import ext_git
from .download import ext_download
from .template import ext_template
extension_handles = {
'dir':ext_dir,
'git':ext_git,
'download':ext_download,
'virtualenv',ext_virtualenv,
'tpl':ext_template
}
<commit_msg>Fix broken tests because of virtualenv handler<commit_after>
|
from .dir import ext_dir
from .makegit import ext_git
from .download import ext_download
from .template import ext_template
from .virtualenv import ext_virtualenv
extension_handles = {
'dir':ext_dir,
'git':ext_git,
'download':ext_download,
'virtualenv':ext_virtualenv,
'tpl':ext_template
}
|
from .dir import ext_dir
from .makegit import ext_git
from .download import ext_download
from .template import ext_template
extension_handles = {
'dir':ext_dir,
'git':ext_git,
'download':ext_download,
'virtualenv',ext_virtualenv,
'tpl':ext_template
}
Fix broken tests because of virtualenv handlerfrom .dir import ext_dir
from .makegit import ext_git
from .download import ext_download
from .template import ext_template
from .virtualenv import ext_virtualenv
extension_handles = {
'dir':ext_dir,
'git':ext_git,
'download':ext_download,
'virtualenv':ext_virtualenv,
'tpl':ext_template
}
|
<commit_before>from .dir import ext_dir
from .makegit import ext_git
from .download import ext_download
from .template import ext_template
extension_handles = {
'dir':ext_dir,
'git':ext_git,
'download':ext_download,
'virtualenv',ext_virtualenv,
'tpl':ext_template
}
<commit_msg>Fix broken tests because of virtualenv handler<commit_after>from .dir import ext_dir
from .makegit import ext_git
from .download import ext_download
from .template import ext_template
from .virtualenv import ext_virtualenv
extension_handles = {
'dir':ext_dir,
'git':ext_git,
'download':ext_download,
'virtualenv':ext_virtualenv,
'tpl':ext_template
}
|
9afa8829f0ded4c19f0467f1a5e2c8539f33ac31
|
profile_bs_xf03id/startup/52-suspenders.py
|
profile_bs_xf03id/startup/52-suspenders.py
|
from bluesky.suspenders import (SuspendFloor, SuspendBoolHigh, SuspendBoolLow)
from bluesky.global_state import get_gs
gs = get_gs()
RE = gs.RE
# Here are some conditions that will cause scans to pause automatically:
# - when the beam current goes below a certain threshold
susp_current = SuspendFloor(beamline_status.beam_current,
suspend_thresh=100.0,
resume_thresh=105.0,
# message='beam current too low',
)
# RE.install_suspender(susp_current)
# - when the shutter is closed
susp_shutter = SuspendBoolLow(beamline_status.shutter_status,
# message='shutter not open',
)
# RE.install_suspender(susp_shutter)
# - if the beamline isn't enabled
susp_enabled = SuspendBoolLow(beamline_status.beamline_enabled,
# message='beamline is not enabled',
)
# RE.install_suspender(susp_enabled)
|
from bluesky.suspenders import (SuspendFloor, SuspendBoolHigh, SuspendBoolLow)
from bluesky.global_state import get_gs
gs = get_gs()
RE = gs.RE
# Here are some conditions that will cause scans to pause automatically:
# - when the beam current goes below a certain threshold
susp_current = SuspendFloor(beamline_status.beam_current,
suspend_thresh=100.0,
resume_thresh=105.0,
tripped_message='beam current too low',
)
# - when the shutter is closed
susp_shutter = SuspendBoolLow(beamline_status.shutter_status,
tripped_message='shutter not open',
)
# - if the beamline isn't enabled
susp_enabled = SuspendBoolLow(beamline_status.beamline_enabled,
tripped_message='beamline is not enabled',
)
# Install all suspenders:
# RE.install_suspender(susp_current)
# RE.install_suspender(susp_shutter)
# RE.install_suspender(susp_enabled)
|
Add a tripped message to the suspenders, but disable for now
|
Add a tripped message to the suspenders, but disable for now
|
Python
|
bsd-2-clause
|
NSLS-II-HXN/ipython_ophyd,NSLS-II-HXN/ipython_ophyd
|
from bluesky.suspenders import (SuspendFloor, SuspendBoolHigh, SuspendBoolLow)
from bluesky.global_state import get_gs
gs = get_gs()
RE = gs.RE
# Here are some conditions that will cause scans to pause automatically:
# - when the beam current goes below a certain threshold
susp_current = SuspendFloor(beamline_status.beam_current,
suspend_thresh=100.0,
resume_thresh=105.0,
# message='beam current too low',
)
# RE.install_suspender(susp_current)
# - when the shutter is closed
susp_shutter = SuspendBoolLow(beamline_status.shutter_status,
# message='shutter not open',
)
# RE.install_suspender(susp_shutter)
# - if the beamline isn't enabled
susp_enabled = SuspendBoolLow(beamline_status.beamline_enabled,
# message='beamline is not enabled',
)
# RE.install_suspender(susp_enabled)
Add a tripped message to the suspenders, but disable for now
|
from bluesky.suspenders import (SuspendFloor, SuspendBoolHigh, SuspendBoolLow)
from bluesky.global_state import get_gs
gs = get_gs()
RE = gs.RE
# Here are some conditions that will cause scans to pause automatically:
# - when the beam current goes below a certain threshold
susp_current = SuspendFloor(beamline_status.beam_current,
suspend_thresh=100.0,
resume_thresh=105.0,
tripped_message='beam current too low',
)
# - when the shutter is closed
susp_shutter = SuspendBoolLow(beamline_status.shutter_status,
tripped_message='shutter not open',
)
# - if the beamline isn't enabled
susp_enabled = SuspendBoolLow(beamline_status.beamline_enabled,
tripped_message='beamline is not enabled',
)
# Install all suspenders:
# RE.install_suspender(susp_current)
# RE.install_suspender(susp_shutter)
# RE.install_suspender(susp_enabled)
|
<commit_before>from bluesky.suspenders import (SuspendFloor, SuspendBoolHigh, SuspendBoolLow)
from bluesky.global_state import get_gs
gs = get_gs()
RE = gs.RE
# Here are some conditions that will cause scans to pause automatically:
# - when the beam current goes below a certain threshold
susp_current = SuspendFloor(beamline_status.beam_current,
suspend_thresh=100.0,
resume_thresh=105.0,
# message='beam current too low',
)
# RE.install_suspender(susp_current)
# - when the shutter is closed
susp_shutter = SuspendBoolLow(beamline_status.shutter_status,
# message='shutter not open',
)
# RE.install_suspender(susp_shutter)
# - if the beamline isn't enabled
susp_enabled = SuspendBoolLow(beamline_status.beamline_enabled,
# message='beamline is not enabled',
)
# RE.install_suspender(susp_enabled)
<commit_msg>Add a tripped message to the suspenders, but disable for now<commit_after>
|
from bluesky.suspenders import (SuspendFloor, SuspendBoolHigh, SuspendBoolLow)
from bluesky.global_state import get_gs
gs = get_gs()
RE = gs.RE
# Here are some conditions that will cause scans to pause automatically:
# - when the beam current goes below a certain threshold
susp_current = SuspendFloor(beamline_status.beam_current,
suspend_thresh=100.0,
resume_thresh=105.0,
tripped_message='beam current too low',
)
# - when the shutter is closed
susp_shutter = SuspendBoolLow(beamline_status.shutter_status,
tripped_message='shutter not open',
)
# - if the beamline isn't enabled
susp_enabled = SuspendBoolLow(beamline_status.beamline_enabled,
tripped_message='beamline is not enabled',
)
# Install all suspenders:
# RE.install_suspender(susp_current)
# RE.install_suspender(susp_shutter)
# RE.install_suspender(susp_enabled)
|
from bluesky.suspenders import (SuspendFloor, SuspendBoolHigh, SuspendBoolLow)
from bluesky.global_state import get_gs
gs = get_gs()
RE = gs.RE
# Here are some conditions that will cause scans to pause automatically:
# - when the beam current goes below a certain threshold
susp_current = SuspendFloor(beamline_status.beam_current,
suspend_thresh=100.0,
resume_thresh=105.0,
# message='beam current too low',
)
# RE.install_suspender(susp_current)
# - when the shutter is closed
susp_shutter = SuspendBoolLow(beamline_status.shutter_status,
# message='shutter not open',
)
# RE.install_suspender(susp_shutter)
# - if the beamline isn't enabled
susp_enabled = SuspendBoolLow(beamline_status.beamline_enabled,
# message='beamline is not enabled',
)
# RE.install_suspender(susp_enabled)
Add a tripped message to the suspenders, but disable for nowfrom bluesky.suspenders import (SuspendFloor, SuspendBoolHigh, SuspendBoolLow)
from bluesky.global_state import get_gs
gs = get_gs()
RE = gs.RE
# Here are some conditions that will cause scans to pause automatically:
# - when the beam current goes below a certain threshold
susp_current = SuspendFloor(beamline_status.beam_current,
suspend_thresh=100.0,
resume_thresh=105.0,
tripped_message='beam current too low',
)
# - when the shutter is closed
susp_shutter = SuspendBoolLow(beamline_status.shutter_status,
tripped_message='shutter not open',
)
# - if the beamline isn't enabled
susp_enabled = SuspendBoolLow(beamline_status.beamline_enabled,
tripped_message='beamline is not enabled',
)
# Install all suspenders:
# RE.install_suspender(susp_current)
# RE.install_suspender(susp_shutter)
# RE.install_suspender(susp_enabled)
|
<commit_before>from bluesky.suspenders import (SuspendFloor, SuspendBoolHigh, SuspendBoolLow)
from bluesky.global_state import get_gs
gs = get_gs()
RE = gs.RE
# Here are some conditions that will cause scans to pause automatically:
# - when the beam current goes below a certain threshold
susp_current = SuspendFloor(beamline_status.beam_current,
suspend_thresh=100.0,
resume_thresh=105.0,
# message='beam current too low',
)
# RE.install_suspender(susp_current)
# - when the shutter is closed
susp_shutter = SuspendBoolLow(beamline_status.shutter_status,
# message='shutter not open',
)
# RE.install_suspender(susp_shutter)
# - if the beamline isn't enabled
susp_enabled = SuspendBoolLow(beamline_status.beamline_enabled,
# message='beamline is not enabled',
)
# RE.install_suspender(susp_enabled)
<commit_msg>Add a tripped message to the suspenders, but disable for now<commit_after>from bluesky.suspenders import (SuspendFloor, SuspendBoolHigh, SuspendBoolLow)
from bluesky.global_state import get_gs
gs = get_gs()
RE = gs.RE
# Here are some conditions that will cause scans to pause automatically:
# - when the beam current goes below a certain threshold
susp_current = SuspendFloor(beamline_status.beam_current,
suspend_thresh=100.0,
resume_thresh=105.0,
tripped_message='beam current too low',
)
# - when the shutter is closed
susp_shutter = SuspendBoolLow(beamline_status.shutter_status,
tripped_message='shutter not open',
)
# - if the beamline isn't enabled
susp_enabled = SuspendBoolLow(beamline_status.beamline_enabled,
tripped_message='beamline is not enabled',
)
# Install all suspenders:
# RE.install_suspender(susp_current)
# RE.install_suspender(susp_shutter)
# RE.install_suspender(susp_enabled)
|
0641de52396a97f109d02d2ee05967eb31a56a39
|
swiftly/__init__.py
|
swiftly/__init__.py
|
"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.6'
|
"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.7'
|
Work on master is now 1.7 dev work
|
Work on master is now 1.7 dev work
|
Python
|
apache-2.0
|
dpgoetz/swiftly,gholt/swiftly,rackerlabs/swiftly
|
"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.6'
Work on master is now 1.7 dev work
|
"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.7'
|
<commit_before>"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.6'
<commit_msg>Work on master is now 1.7 dev work<commit_after>
|
"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.7'
|
"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.6'
Work on master is now 1.7 dev work"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.7'
|
<commit_before>"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.6'
<commit_msg>Work on master is now 1.7 dev work<commit_after>"""
Client for Swift
Copyright 2012 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ['VERSION']
#: Version str for Swiftly: "major.minor".
#: If the second number is even, it's an official release.
#: If the second number is odd, it's a development release.
VERSION = '1.7'
|
48fd99751ddd000bb179214c69ee65ac7f70d2a2
|
scripts/remove-all-annotations.py
|
scripts/remove-all-annotations.py
|
#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector',
'class_class',
'class',
'relation'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
Make remove all annotation also remove classes and relations
|
Make remove all annotation also remove classes and relations
|
Python
|
agpl-3.0
|
fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,htem/CATMAID,htem/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID
|
#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
Make remove all annotation also remove classes and relations
|
#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector',
'class_class',
'class',
'relation'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
<commit_before>#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
<commit_msg>Make remove all annotation also remove classes and relations<commit_after>
|
#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector',
'class_class',
'class',
'relation'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
Make remove all annotation also remove classes and relations#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector',
'class_class',
'class',
'relation'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
<commit_before>#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
<commit_msg>Make remove all annotation also remove classes and relations<commit_after>#!/usr/bin/python
# This is a small helper script to remove all annotations from a
# project.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
import sys
import psycopg2
import os
from common import db_connection, conf
if len(sys.argv) != 1:
print >> sys.stderr, "Usage:", sys.argv[0]
sys.exit(1)
c = db_connection.cursor()
print """Warning: this script removes all annotations from all projects
in the database '%s'""" % (conf['database'],)
print "To continue, type 'Yes' followed by Enter."
reply = raw_input()
if reply != 'Yes':
sys.exit(2)
tables_to_truncate = (
'treenode_class_instance',
'connector_class_instance',
'treenode_connector',
'class_instance_class_instance',
'class_instance',
'treenode',
'connector',
'class_class',
'class',
'relation'
)
c.execute('TRUNCATE '+', '.join(tables_to_truncate))
db_connection.commit()
c.close()
db_connection.close()
|
426d3fd0572d0b648ceb7d5394b555f4a7c65a1e
|
source/cytoplasm/configuration.py
|
source/cytoplasm/configuration.py
|
# This module contains the user's configurations, to be accessed like:
# `print cytoplasm.configuration.build_dir`
import os, imp
# If the user has a file called _config.py, import that.
# The user's _config.py should "from cytoplasm.defaults import *" if they want to use
# some of the defaults.
if os.path.exists("_config.py"):
imp.load_source("_config", "_config.py")
from _config import *
|
# This module contains the user's configurations, to be accessed like:
# `print cytoplasm.configuration.build_dir`
import os, imp
from .errors import CytoplasmError
# If the user has a file called _config.py, import that.
# The user's _config.py should "from cytoplasm.defaults import *" if they want to use
# some of the defaults.
if os.path.exists("_config.py"):
imp.load_source("_config", "_config.py")
from _config import *
else:
raise CytoplasmError("You don't seem to have a configuration file at '_config.py'.")
|
Raise an error if the user doesn't have a _config.py
|
Raise an error if the user doesn't have a _config.py
This raises a less cryptic error.
|
Python
|
mit
|
startling/cytoplasm
|
# This module contains the user's configurations, to be accessed like:
# `print cytoplasm.configuration.build_dir`
import os, imp
# If the user has a file called _config.py, import that.
# The user's _config.py should "from cytoplasm.defaults import *" if they want to use
# some of the defaults.
if os.path.exists("_config.py"):
imp.load_source("_config", "_config.py")
from _config import *
Raise an error if the user doesn't have a _config.py
This raises a less cryptic error.
|
# This module contains the user's configurations, to be accessed like:
# `print cytoplasm.configuration.build_dir`
import os, imp
from .errors import CytoplasmError
# If the user has a file called _config.py, import that.
# The user's _config.py should "from cytoplasm.defaults import *" if they want to use
# some of the defaults.
if os.path.exists("_config.py"):
imp.load_source("_config", "_config.py")
from _config import *
else:
raise CytoplasmError("You don't seem to have a configuration file at '_config.py'.")
|
<commit_before># This module contains the user's configurations, to be accessed like:
# `print cytoplasm.configuration.build_dir`
import os, imp
# If the user has a file called _config.py, import that.
# The user's _config.py should "from cytoplasm.defaults import *" if they want to use
# some of the defaults.
if os.path.exists("_config.py"):
imp.load_source("_config", "_config.py")
from _config import *
<commit_msg>Raise an error if the user doesn't have a _config.py
This raises a less cryptic error.<commit_after>
|
# This module contains the user's configurations, to be accessed like:
# `print cytoplasm.configuration.build_dir`
import os, imp
from .errors import CytoplasmError
# If the user has a file called _config.py, import that.
# The user's _config.py should "from cytoplasm.defaults import *" if they want to use
# some of the defaults.
if os.path.exists("_config.py"):
imp.load_source("_config", "_config.py")
from _config import *
else:
raise CytoplasmError("You don't seem to have a configuration file at '_config.py'.")
|
# This module contains the user's configurations, to be accessed like:
# `print cytoplasm.configuration.build_dir`
import os, imp
# If the user has a file called _config.py, import that.
# The user's _config.py should "from cytoplasm.defaults import *" if they want to use
# some of the defaults.
if os.path.exists("_config.py"):
imp.load_source("_config", "_config.py")
from _config import *
Raise an error if the user doesn't have a _config.py
This raises a less cryptic error.# This module contains the user's configurations, to be accessed like:
# `print cytoplasm.configuration.build_dir`
import os, imp
from .errors import CytoplasmError
# If the user has a file called _config.py, import that.
# The user's _config.py should "from cytoplasm.defaults import *" if they want to use
# some of the defaults.
if os.path.exists("_config.py"):
imp.load_source("_config", "_config.py")
from _config import *
else:
raise CytoplasmError("You don't seem to have a configuration file at '_config.py'.")
|
<commit_before># This module contains the user's configurations, to be accessed like:
# `print cytoplasm.configuration.build_dir`
import os, imp
# If the user has a file called _config.py, import that.
# The user's _config.py should "from cytoplasm.defaults import *" if they want to use
# some of the defaults.
if os.path.exists("_config.py"):
imp.load_source("_config", "_config.py")
from _config import *
<commit_msg>Raise an error if the user doesn't have a _config.py
This raises a less cryptic error.<commit_after># This module contains the user's configurations, to be accessed like:
# `print cytoplasm.configuration.build_dir`
import os, imp
from .errors import CytoplasmError
# If the user has a file called _config.py, import that.
# The user's _config.py should "from cytoplasm.defaults import *" if they want to use
# some of the defaults.
if os.path.exists("_config.py"):
imp.load_source("_config", "_config.py")
from _config import *
else:
raise CytoplasmError("You don't seem to have a configuration file at '_config.py'.")
|
c5bc66351870ce369b0d06161f07a1943dfeed93
|
plugin_handler.py
|
plugin_handler.py
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
def load_venue_plugins():
"""
Read plugin directory and load found plugins.
Variable "blacklisted" can be used to exclude loading certain plugins.
"""
blacklisted = ["plugin_tiketti", "plugin_ontherocks"]
foundblacklisted = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path = [pluginspathabs]):
if plugname in sys.modules:
continue
if plugname in blacklisted:
foundblacklisted.append(plugname.lstrip("plugin_"))
continue
plugpath = "venues.%s" % (plugname)
loadplug = __import__(plugpath, fromlist = [plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.getVenueName()}")
print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:])))
return loadedplugins
if __name__ == '__main__':
load_venue_plugins()
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
def load_venue_plugins():
"""
Read plugin directory and load found plugins.
Variable "blacklisted" can be used to exclude loading certain plugins.
"""
blacklisted = ["plugin_tiketti"]
foundblacklisted = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path = [pluginspathabs]):
if plugname in sys.modules:
continue
if plugname in blacklisted:
foundblacklisted.append(plugname.lstrip("plugin_"))
continue
plugpath = "venues.%s" % (plugname)
loadplug = __import__(plugpath, fromlist = [plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.getVenueName()}")
print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:])))
return loadedplugins
if __name__ == '__main__':
load_venue_plugins()
|
Enable On the rocks plugin
|
Enable On the rocks plugin
|
Python
|
isc
|
weezel/BandEventNotifier
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
def load_venue_plugins():
"""
Read plugin directory and load found plugins.
Variable "blacklisted" can be used to exclude loading certain plugins.
"""
blacklisted = ["plugin_tiketti", "plugin_ontherocks"]
foundblacklisted = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path = [pluginspathabs]):
if plugname in sys.modules:
continue
if plugname in blacklisted:
foundblacklisted.append(plugname.lstrip("plugin_"))
continue
plugpath = "venues.%s" % (plugname)
loadplug = __import__(plugpath, fromlist = [plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.getVenueName()}")
print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:])))
return loadedplugins
if __name__ == '__main__':
load_venue_plugins()
Enable On the rocks plugin
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
def load_venue_plugins():
"""
Read plugin directory and load found plugins.
Variable "blacklisted" can be used to exclude loading certain plugins.
"""
blacklisted = ["plugin_tiketti"]
foundblacklisted = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path = [pluginspathabs]):
if plugname in sys.modules:
continue
if plugname in blacklisted:
foundblacklisted.append(plugname.lstrip("plugin_"))
continue
plugpath = "venues.%s" % (plugname)
loadplug = __import__(plugpath, fromlist = [plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.getVenueName()}")
print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:])))
return loadedplugins
if __name__ == '__main__':
load_venue_plugins()
|
<commit_before># -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
def load_venue_plugins():
"""
Read plugin directory and load found plugins.
Variable "blacklisted" can be used to exclude loading certain plugins.
"""
blacklisted = ["plugin_tiketti", "plugin_ontherocks"]
foundblacklisted = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path = [pluginspathabs]):
if plugname in sys.modules:
continue
if plugname in blacklisted:
foundblacklisted.append(plugname.lstrip("plugin_"))
continue
plugpath = "venues.%s" % (plugname)
loadplug = __import__(plugpath, fromlist = [plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.getVenueName()}")
print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:])))
return loadedplugins
if __name__ == '__main__':
load_venue_plugins()
<commit_msg>Enable On the rocks plugin<commit_after>
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
def load_venue_plugins():
"""
Read plugin directory and load found plugins.
Variable "blacklisted" can be used to exclude loading certain plugins.
"""
blacklisted = ["plugin_tiketti"]
foundblacklisted = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path = [pluginspathabs]):
if plugname in sys.modules:
continue
if plugname in blacklisted:
foundblacklisted.append(plugname.lstrip("plugin_"))
continue
plugpath = "venues.%s" % (plugname)
loadplug = __import__(plugpath, fromlist = [plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.getVenueName()}")
print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:])))
return loadedplugins
if __name__ == '__main__':
load_venue_plugins()
|
# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
def load_venue_plugins():
"""
Read plugin directory and load found plugins.
Variable "blacklisted" can be used to exclude loading certain plugins.
"""
blacklisted = ["plugin_tiketti", "plugin_ontherocks"]
foundblacklisted = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path = [pluginspathabs]):
if plugname in sys.modules:
continue
if plugname in blacklisted:
foundblacklisted.append(plugname.lstrip("plugin_"))
continue
plugpath = "venues.%s" % (plugname)
loadplug = __import__(plugpath, fromlist = [plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.getVenueName()}")
print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:])))
return loadedplugins
if __name__ == '__main__':
load_venue_plugins()
Enable On the rocks plugin# -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
def load_venue_plugins():
"""
Read plugin directory and load found plugins.
Variable "blacklisted" can be used to exclude loading certain plugins.
"""
blacklisted = ["plugin_tiketti"]
foundblacklisted = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path = [pluginspathabs]):
if plugname in sys.modules:
continue
if plugname in blacklisted:
foundblacklisted.append(plugname.lstrip("plugin_"))
continue
plugpath = "venues.%s" % (plugname)
loadplug = __import__(plugpath, fromlist = [plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.getVenueName()}")
print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:])))
return loadedplugins
if __name__ == '__main__':
load_venue_plugins()
|
<commit_before># -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
def load_venue_plugins():
"""
Read plugin directory and load found plugins.
Variable "blacklisted" can be used to exclude loading certain plugins.
"""
blacklisted = ["plugin_tiketti", "plugin_ontherocks"]
foundblacklisted = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path = [pluginspathabs]):
if plugname in sys.modules:
continue
if plugname in blacklisted:
foundblacklisted.append(plugname.lstrip("plugin_"))
continue
plugpath = "venues.%s" % (plugname)
loadplug = __import__(plugpath, fromlist = [plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.getVenueName()}")
print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:])))
return loadedplugins
if __name__ == '__main__':
load_venue_plugins()
<commit_msg>Enable On the rocks plugin<commit_after># -*- coding: utf-8 -*-
# Execute this file to see what plugins will be loaded.
# Implementation leans to Lex Toumbourou's example:
# https://lextoumbourou.com/blog/posts/dynamically-loading-modules-and-classes-in-python/
import os
import pkgutil
import sys
def load_venue_plugins():
"""
Read plugin directory and load found plugins.
Variable "blacklisted" can be used to exclude loading certain plugins.
"""
blacklisted = ["plugin_tiketti"]
foundblacklisted = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path = [pluginspathabs]):
if plugname in sys.modules:
continue
if plugname in blacklisted:
foundblacklisted.append(plugname.lstrip("plugin_"))
continue
plugpath = "venues.%s" % (plugname)
loadplug = __import__(plugpath, fromlist = [plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.getVenueName()}")
print("Blacklisted plugins: {}.\n".format(", ".join(foundblacklisted[1:])))
return loadedplugins
if __name__ == '__main__':
load_venue_plugins()
|
b9b21cbcc04ce5d24a82817002c99eb5b5d70cf5
|
molly/apps/service_status/views.py
|
molly/apps/service_status/views.py
|
import logging
from django.utils.translation import ugettext as _
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
logger = logging.getLogger("molly.apps.service_status.views")
class IndexView(BaseView):
"""
View to display service status information
"""
# TODO Remove specific references to OUCS and OLIS
def get_metadata(self, request):
return {
'title': _('Service Status', )
'additional': _('Check whether OUCS and OLIS services are available'),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb('service_status', None,
_('Service Status'),
lazy_reverse('index'))
def handle_GET(self, request, context):
services = []
for provider in self.conf.providers:
try:
status = provider.get_status()
except Exception, e:
logger.warn("Failed to load service status", exc_info=True)
else:
services.append((
provider.slug, provider.name,
status['lastBuildDate'], status['services'],
provider.get_announcements(),
))
context['services'] = services
return self.render(request, context, 'service_status/index')
|
import logging
from django.utils.translation import ugettext as _
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
logger = logging.getLogger("molly.apps.service_status.views")
class IndexView(BaseView):
"""
View to display service status information
"""
# TODO Remove specific references to OUCS and OLIS
def get_metadata(self, request):
return {
'title': _('Service Status'),
'additional': _('Check whether OUCS and OLIS services are available'),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb('service_status', None,
_('Service Status'),
lazy_reverse('index'))
def handle_GET(self, request, context):
services = []
for provider in self.conf.providers:
try:
status = provider.get_status()
except Exception, e:
logger.warn("Failed to load service status", exc_info=True)
else:
services.append((
provider.slug, provider.name,
status['lastBuildDate'], status['services'],
provider.get_announcements(),
))
context['services'] = services
return self.render(request, context, 'service_status/index')
|
Fix syntax error in service_status
|
Fix syntax error in service_status
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
import logging
from django.utils.translation import ugettext as _
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
logger = logging.getLogger("molly.apps.service_status.views")
class IndexView(BaseView):
"""
View to display service status information
"""
# TODO Remove specific references to OUCS and OLIS
def get_metadata(self, request):
return {
'title': _('Service Status', )
'additional': _('Check whether OUCS and OLIS services are available'),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb('service_status', None,
_('Service Status'),
lazy_reverse('index'))
def handle_GET(self, request, context):
services = []
for provider in self.conf.providers:
try:
status = provider.get_status()
except Exception, e:
logger.warn("Failed to load service status", exc_info=True)
else:
services.append((
provider.slug, provider.name,
status['lastBuildDate'], status['services'],
provider.get_announcements(),
))
context['services'] = services
return self.render(request, context, 'service_status/index')
Fix syntax error in service_status
|
import logging
from django.utils.translation import ugettext as _
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
logger = logging.getLogger("molly.apps.service_status.views")
class IndexView(BaseView):
"""
View to display service status information
"""
# TODO Remove specific references to OUCS and OLIS
def get_metadata(self, request):
return {
'title': _('Service Status'),
'additional': _('Check whether OUCS and OLIS services are available'),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb('service_status', None,
_('Service Status'),
lazy_reverse('index'))
def handle_GET(self, request, context):
services = []
for provider in self.conf.providers:
try:
status = provider.get_status()
except Exception, e:
logger.warn("Failed to load service status", exc_info=True)
else:
services.append((
provider.slug, provider.name,
status['lastBuildDate'], status['services'],
provider.get_announcements(),
))
context['services'] = services
return self.render(request, context, 'service_status/index')
|
<commit_before>import logging
from django.utils.translation import ugettext as _
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
logger = logging.getLogger("molly.apps.service_status.views")
class IndexView(BaseView):
"""
View to display service status information
"""
# TODO Remove specific references to OUCS and OLIS
def get_metadata(self, request):
return {
'title': _('Service Status', )
'additional': _('Check whether OUCS and OLIS services are available'),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb('service_status', None,
_('Service Status'),
lazy_reverse('index'))
def handle_GET(self, request, context):
services = []
for provider in self.conf.providers:
try:
status = provider.get_status()
except Exception, e:
logger.warn("Failed to load service status", exc_info=True)
else:
services.append((
provider.slug, provider.name,
status['lastBuildDate'], status['services'],
provider.get_announcements(),
))
context['services'] = services
return self.render(request, context, 'service_status/index')
<commit_msg>Fix syntax error in service_status<commit_after>
|
import logging
from django.utils.translation import ugettext as _
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
logger = logging.getLogger("molly.apps.service_status.views")
class IndexView(BaseView):
"""
View to display service status information
"""
# TODO Remove specific references to OUCS and OLIS
def get_metadata(self, request):
return {
'title': _('Service Status'),
'additional': _('Check whether OUCS and OLIS services are available'),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb('service_status', None,
_('Service Status'),
lazy_reverse('index'))
def handle_GET(self, request, context):
services = []
for provider in self.conf.providers:
try:
status = provider.get_status()
except Exception, e:
logger.warn("Failed to load service status", exc_info=True)
else:
services.append((
provider.slug, provider.name,
status['lastBuildDate'], status['services'],
provider.get_announcements(),
))
context['services'] = services
return self.render(request, context, 'service_status/index')
|
import logging
from django.utils.translation import ugettext as _
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
logger = logging.getLogger("molly.apps.service_status.views")
class IndexView(BaseView):
"""
View to display service status information
"""
# TODO Remove specific references to OUCS and OLIS
def get_metadata(self, request):
return {
'title': _('Service Status', )
'additional': _('Check whether OUCS and OLIS services are available'),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb('service_status', None,
_('Service Status'),
lazy_reverse('index'))
def handle_GET(self, request, context):
services = []
for provider in self.conf.providers:
try:
status = provider.get_status()
except Exception, e:
logger.warn("Failed to load service status", exc_info=True)
else:
services.append((
provider.slug, provider.name,
status['lastBuildDate'], status['services'],
provider.get_announcements(),
))
context['services'] = services
return self.render(request, context, 'service_status/index')
Fix syntax error in service_statusimport logging
from django.utils.translation import ugettext as _
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
logger = logging.getLogger("molly.apps.service_status.views")
class IndexView(BaseView):
"""
View to display service status information
"""
# TODO Remove specific references to OUCS and OLIS
def get_metadata(self, request):
return {
'title': _('Service Status'),
'additional': _('Check whether OUCS and OLIS services are available'),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb('service_status', None,
_('Service Status'),
lazy_reverse('index'))
def handle_GET(self, request, context):
services = []
for provider in self.conf.providers:
try:
status = provider.get_status()
except Exception, e:
logger.warn("Failed to load service status", exc_info=True)
else:
services.append((
provider.slug, provider.name,
status['lastBuildDate'], status['services'],
provider.get_announcements(),
))
context['services'] = services
return self.render(request, context, 'service_status/index')
|
<commit_before>import logging
from django.utils.translation import ugettext as _
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
logger = logging.getLogger("molly.apps.service_status.views")
class IndexView(BaseView):
"""
View to display service status information
"""
# TODO Remove specific references to OUCS and OLIS
def get_metadata(self, request):
return {
'title': _('Service Status', )
'additional': _('Check whether OUCS and OLIS services are available'),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb('service_status', None,
_('Service Status'),
lazy_reverse('index'))
def handle_GET(self, request, context):
services = []
for provider in self.conf.providers:
try:
status = provider.get_status()
except Exception, e:
logger.warn("Failed to load service status", exc_info=True)
else:
services.append((
provider.slug, provider.name,
status['lastBuildDate'], status['services'],
provider.get_announcements(),
))
context['services'] = services
return self.render(request, context, 'service_status/index')
<commit_msg>Fix syntax error in service_status<commit_after>import logging
from django.utils.translation import ugettext as _
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import *
logger = logging.getLogger("molly.apps.service_status.views")
class IndexView(BaseView):
"""
View to display service status information
"""
# TODO Remove specific references to OUCS and OLIS
def get_metadata(self, request):
return {
'title': _('Service Status'),
'additional': _('Check whether OUCS and OLIS services are available'),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb('service_status', None,
_('Service Status'),
lazy_reverse('index'))
def handle_GET(self, request, context):
services = []
for provider in self.conf.providers:
try:
status = provider.get_status()
except Exception, e:
logger.warn("Failed to load service status", exc_info=True)
else:
services.append((
provider.slug, provider.name,
status['lastBuildDate'], status['services'],
provider.get_announcements(),
))
context['services'] = services
return self.render(request, context, 'service_status/index')
|
90e8f58c24608c503697e9d491ff77b5b46972ba
|
pi_director/controllers/controllers.py
|
pi_director/controllers/controllers.py
|
from pi_director.models.models import (
DBSession,
MyModel,
)
def get_pis():
PiList=DBSession.query(MyModel).filter(MyModel.uuid!="default").all()
return PiList
|
from pi_director.models.models import (
DBSession,
MyModel,
)
def get_pis():
PiList=DBSession.query(MyModel).filter(MyModel.uuid!="default").order_by(MyModel.lastseen).all()
return PiList
|
Order Pis in list by lastseen, showing non-communicating pis at the top
|
Order Pis in list by lastseen, showing non-communicating pis at the top
|
Python
|
mit
|
PeterGrace/pi_director,PeterGrace/pi_director,PeterGrace/pi_director,selfcommit/pi_director,selfcommit/pi_director,selfcommit/pi_director
|
from pi_director.models.models import (
DBSession,
MyModel,
)
def get_pis():
PiList=DBSession.query(MyModel).filter(MyModel.uuid!="default").all()
return PiList
Order Pis in list by lastseen, showing non-communicating pis at the top
|
from pi_director.models.models import (
DBSession,
MyModel,
)
def get_pis():
PiList=DBSession.query(MyModel).filter(MyModel.uuid!="default").order_by(MyModel.lastseen).all()
return PiList
|
<commit_before>from pi_director.models.models import (
DBSession,
MyModel,
)
def get_pis():
PiList=DBSession.query(MyModel).filter(MyModel.uuid!="default").all()
return PiList
<commit_msg>Order Pis in list by lastseen, showing non-communicating pis at the top<commit_after>
|
from pi_director.models.models import (
DBSession,
MyModel,
)
def get_pis():
PiList=DBSession.query(MyModel).filter(MyModel.uuid!="default").order_by(MyModel.lastseen).all()
return PiList
|
from pi_director.models.models import (
DBSession,
MyModel,
)
def get_pis():
PiList=DBSession.query(MyModel).filter(MyModel.uuid!="default").all()
return PiList
Order Pis in list by lastseen, showing non-communicating pis at the topfrom pi_director.models.models import (
DBSession,
MyModel,
)
def get_pis():
PiList=DBSession.query(MyModel).filter(MyModel.uuid!="default").order_by(MyModel.lastseen).all()
return PiList
|
<commit_before>from pi_director.models.models import (
DBSession,
MyModel,
)
def get_pis():
PiList=DBSession.query(MyModel).filter(MyModel.uuid!="default").all()
return PiList
<commit_msg>Order Pis in list by lastseen, showing non-communicating pis at the top<commit_after>from pi_director.models.models import (
DBSession,
MyModel,
)
def get_pis():
PiList=DBSession.query(MyModel).filter(MyModel.uuid!="default").order_by(MyModel.lastseen).all()
return PiList
|
1046157fa2e062f12123e110c82851c2484216be
|
gallery_plugins/plugin_gfycat.py
|
gallery_plugins/plugin_gfycat.py
|
import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read()
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read()
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
|
import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
|
Update gfycat plugin for python3 support
|
Update gfycat plugin for python3 support
|
Python
|
mit
|
regosen/gallery_get
|
import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read()
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read()
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
Update gfycat plugin for python3 support
|
import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
|
<commit_before>import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read()
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read()
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
<commit_msg>Update gfycat plugin for python3 support<commit_after>
|
import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
|
import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read()
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read()
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
Update gfycat plugin for python3 supportimport re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
|
<commit_before>import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read()
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read()
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
<commit_msg>Update gfycat plugin for python3 support<commit_after>import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
|
55c60d059a4f6a6ae6633318420a7c0cd22c2513
|
product/models.py
|
product/models.py
|
from django.db import models
from amadaa.models import AmadaaModel
from django.urls import reverse
from ckeditor.fields import RichTextField
# Create your models here.
class ProductCategory(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-category-detail', kwargs={'pk': self.pk})
def __str__(self):
return "{}".format(self.name)
class ProductType(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-type-list')
def __str__(self):
return "{}".format(self.name)
class UnitOfMeasurement(AmadaaModel):
unit = models.CharField(max_length=30, unique=True)
def get_absolute_url(self):
return reverse('uom-list')
def __str__(self):
return "%(self.unit)s"
class Product(AmadaaModel):
name = models.CharField(max_length=100)
internal_ref = models.CharField(max_length=100, default='')
product_type = models.ForeignKey(ProductType, default=0)
category = models.ForeignKey(ProductCategory)
description = RichTextField(blank=True, default='')
def get_absolute_url(self):
return reverse('product-list')
def __str__(self):
return "%(self.name)s"
|
from django.db import models
from amadaa.models import AmadaaModel
from django.urls import reverse
from ckeditor.fields import RichTextField
# Create your models here.
class ProductCategory(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-category-detail', kwargs={'pk': self.pk})
def __str__(self):
return "{}".format(self.name)
class ProductType(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-type-list')
def __str__(self):
return "{}".format(self.name)
class UnitOfMeasurement(AmadaaModel):
unit = models.CharField(max_length=30, unique=True)
def get_absolute_url(self):
return reverse('uom-list')
def __str__(self):
return "%(self.unit)s"
class Product(AmadaaModel):
name = models.CharField(max_length=100)
internal_ref = models.CharField(max_length=100, default='')
product_type = models.ForeignKey(ProductType, default=0)
category = models.ForeignKey(ProductCategory)
description = RichTextField(blank=True, default='')
def get_absolute_url(self):
return reverse('product-list')
def __str__(self):
return "{}".format(self.name)
|
Fix in string representation of product model.
|
Fix in string representation of product model.
|
Python
|
mit
|
borderitsolutions/amadaa,borderitsolutions/amadaa,borderitsolutions/amadaa
|
from django.db import models
from amadaa.models import AmadaaModel
from django.urls import reverse
from ckeditor.fields import RichTextField
# Create your models here.
class ProductCategory(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-category-detail', kwargs={'pk': self.pk})
def __str__(self):
return "{}".format(self.name)
class ProductType(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-type-list')
def __str__(self):
return "{}".format(self.name)
class UnitOfMeasurement(AmadaaModel):
unit = models.CharField(max_length=30, unique=True)
def get_absolute_url(self):
return reverse('uom-list')
def __str__(self):
return "%(self.unit)s"
class Product(AmadaaModel):
name = models.CharField(max_length=100)
internal_ref = models.CharField(max_length=100, default='')
product_type = models.ForeignKey(ProductType, default=0)
category = models.ForeignKey(ProductCategory)
description = RichTextField(blank=True, default='')
def get_absolute_url(self):
return reverse('product-list')
def __str__(self):
return "%(self.name)s"
Fix in string representation of product model.
|
from django.db import models
from amadaa.models import AmadaaModel
from django.urls import reverse
from ckeditor.fields import RichTextField
# Create your models here.
class ProductCategory(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-category-detail', kwargs={'pk': self.pk})
def __str__(self):
return "{}".format(self.name)
class ProductType(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-type-list')
def __str__(self):
return "{}".format(self.name)
class UnitOfMeasurement(AmadaaModel):
unit = models.CharField(max_length=30, unique=True)
def get_absolute_url(self):
return reverse('uom-list')
def __str__(self):
return "%(self.unit)s"
class Product(AmadaaModel):
name = models.CharField(max_length=100)
internal_ref = models.CharField(max_length=100, default='')
product_type = models.ForeignKey(ProductType, default=0)
category = models.ForeignKey(ProductCategory)
description = RichTextField(blank=True, default='')
def get_absolute_url(self):
return reverse('product-list')
def __str__(self):
return "{}".format(self.name)
|
<commit_before>from django.db import models
from amadaa.models import AmadaaModel
from django.urls import reverse
from ckeditor.fields import RichTextField
# Create your models here.
class ProductCategory(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-category-detail', kwargs={'pk': self.pk})
def __str__(self):
return "{}".format(self.name)
class ProductType(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-type-list')
def __str__(self):
return "{}".format(self.name)
class UnitOfMeasurement(AmadaaModel):
unit = models.CharField(max_length=30, unique=True)
def get_absolute_url(self):
return reverse('uom-list')
def __str__(self):
return "%(self.unit)s"
class Product(AmadaaModel):
name = models.CharField(max_length=100)
internal_ref = models.CharField(max_length=100, default='')
product_type = models.ForeignKey(ProductType, default=0)
category = models.ForeignKey(ProductCategory)
description = RichTextField(blank=True, default='')
def get_absolute_url(self):
return reverse('product-list')
def __str__(self):
return "%(self.name)s"
<commit_msg>Fix in string representation of product model.<commit_after>
|
from django.db import models
from amadaa.models import AmadaaModel
from django.urls import reverse
from ckeditor.fields import RichTextField
# Create your models here.
class ProductCategory(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-category-detail', kwargs={'pk': self.pk})
def __str__(self):
return "{}".format(self.name)
class ProductType(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-type-list')
def __str__(self):
return "{}".format(self.name)
class UnitOfMeasurement(AmadaaModel):
unit = models.CharField(max_length=30, unique=True)
def get_absolute_url(self):
return reverse('uom-list')
def __str__(self):
return "%(self.unit)s"
class Product(AmadaaModel):
name = models.CharField(max_length=100)
internal_ref = models.CharField(max_length=100, default='')
product_type = models.ForeignKey(ProductType, default=0)
category = models.ForeignKey(ProductCategory)
description = RichTextField(blank=True, default='')
def get_absolute_url(self):
return reverse('product-list')
def __str__(self):
return "{}".format(self.name)
|
from django.db import models
from amadaa.models import AmadaaModel
from django.urls import reverse
from ckeditor.fields import RichTextField
# Create your models here.
class ProductCategory(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-category-detail', kwargs={'pk': self.pk})
def __str__(self):
return "{}".format(self.name)
class ProductType(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-type-list')
def __str__(self):
return "{}".format(self.name)
class UnitOfMeasurement(AmadaaModel):
unit = models.CharField(max_length=30, unique=True)
def get_absolute_url(self):
return reverse('uom-list')
def __str__(self):
return "%(self.unit)s"
class Product(AmadaaModel):
name = models.CharField(max_length=100)
internal_ref = models.CharField(max_length=100, default='')
product_type = models.ForeignKey(ProductType, default=0)
category = models.ForeignKey(ProductCategory)
description = RichTextField(blank=True, default='')
def get_absolute_url(self):
return reverse('product-list')
def __str__(self):
return "%(self.name)s"
Fix in string representation of product model.from django.db import models
from amadaa.models import AmadaaModel
from django.urls import reverse
from ckeditor.fields import RichTextField
# Create your models here.
class ProductCategory(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-category-detail', kwargs={'pk': self.pk})
def __str__(self):
return "{}".format(self.name)
class ProductType(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-type-list')
def __str__(self):
return "{}".format(self.name)
class UnitOfMeasurement(AmadaaModel):
unit = models.CharField(max_length=30, unique=True)
def get_absolute_url(self):
return reverse('uom-list')
def __str__(self):
return "%(self.unit)s"
class Product(AmadaaModel):
name = models.CharField(max_length=100)
internal_ref = models.CharField(max_length=100, default='')
product_type = models.ForeignKey(ProductType, default=0)
category = models.ForeignKey(ProductCategory)
description = RichTextField(blank=True, default='')
def get_absolute_url(self):
return reverse('product-list')
def __str__(self):
return "{}".format(self.name)
|
<commit_before>from django.db import models
from amadaa.models import AmadaaModel
from django.urls import reverse
from ckeditor.fields import RichTextField
# Create your models here.
class ProductCategory(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-category-detail', kwargs={'pk': self.pk})
def __str__(self):
return "{}".format(self.name)
class ProductType(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-type-list')
def __str__(self):
return "{}".format(self.name)
class UnitOfMeasurement(AmadaaModel):
unit = models.CharField(max_length=30, unique=True)
def get_absolute_url(self):
return reverse('uom-list')
def __str__(self):
return "%(self.unit)s"
class Product(AmadaaModel):
name = models.CharField(max_length=100)
internal_ref = models.CharField(max_length=100, default='')
product_type = models.ForeignKey(ProductType, default=0)
category = models.ForeignKey(ProductCategory)
description = RichTextField(blank=True, default='')
def get_absolute_url(self):
return reverse('product-list')
def __str__(self):
return "%(self.name)s"
<commit_msg>Fix in string representation of product model.<commit_after>from django.db import models
from amadaa.models import AmadaaModel
from django.urls import reverse
from ckeditor.fields import RichTextField
# Create your models here.
class ProductCategory(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-category-detail', kwargs={'pk': self.pk})
def __str__(self):
return "{}".format(self.name)
class ProductType(AmadaaModel):
name = models.CharField(max_length=100, unique=True)
def get_absolute_url(self):
return reverse('product-type-list')
def __str__(self):
return "{}".format(self.name)
class UnitOfMeasurement(AmadaaModel):
unit = models.CharField(max_length=30, unique=True)
def get_absolute_url(self):
return reverse('uom-list')
def __str__(self):
return "%(self.unit)s"
class Product(AmadaaModel):
name = models.CharField(max_length=100)
internal_ref = models.CharField(max_length=100, default='')
product_type = models.ForeignKey(ProductType, default=0)
category = models.ForeignKey(ProductCategory)
description = RichTextField(blank=True, default='')
def get_absolute_url(self):
return reverse('product-list')
def __str__(self):
return "{}".format(self.name)
|
df5040b728ec59f9f548c7bd032d9e8b7ab0c2e0
|
database/queries/update_queries.py
|
database/queries/update_queries.py
|
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
|
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
DELETE_RESERVATION = '''
UPDATE RESERVATION
SET USER_ID=?, PROJECTION_ID=?, ROW=?, COL=?
WHERE RESERVATION.ID=?;
'''
|
Add update queries for reservation
|
Add update queries for reservation
|
Python
|
mit
|
BrickText/JHROM
|
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
Add update queries for reservation
|
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
DELETE_RESERVATION = '''
UPDATE RESERVATION
SET USER_ID=?, PROJECTION_ID=?, ROW=?, COL=?
WHERE RESERVATION.ID=?;
'''
|
<commit_before>UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
<commit_msg>Add update queries for reservation<commit_after>
|
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
DELETE_RESERVATION = '''
UPDATE RESERVATION
SET USER_ID=?, PROJECTION_ID=?, ROW=?, COL=?
WHERE RESERVATION.ID=?;
'''
|
UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
Add update queries for reservationUPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
DELETE_RESERVATION = '''
UPDATE RESERVATION
SET USER_ID=?, PROJECTION_ID=?, ROW=?, COL=?
WHERE RESERVATION.ID=?;
'''
|
<commit_before>UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
<commit_msg>Add update queries for reservation<commit_after>UPDATE_MOVIE = '''
UPDATE MOVIE
SET column1=?, RATING=?
WHERE MOVIE.ID=?;
'''
UPDATE_PROJECTION = '''
UPDATE PROJECTION
SET MOVIE_ID=?, TYPE=?, DATE=?
WHERE PROJECTION.ID=?;
'''
DELETE_RESERVATION = '''
UPDATE RESERVATION
SET USER_ID=?, PROJECTION_ID=?, ROW=?, COL=?
WHERE RESERVATION.ID=?;
'''
|
1d355a2143daf438b5a2f5185a7f60268ad7c686
|
tests/local_test.py
|
tests/local_test.py
|
from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
|
from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@istest
def cwd_of_run_can_be_set():
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
|
Add test for setting cwd in LocalShell.run
|
Add test for setting cwd in LocalShell.run
|
Python
|
bsd-2-clause
|
mwilliamson/spur.py
|
from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
Add test for setting cwd in LocalShell.run
|
from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@istest
def cwd_of_run_can_be_set():
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
|
<commit_before>from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
<commit_msg>Add test for setting cwd in LocalShell.run<commit_after>
|
from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@istest
def cwd_of_run_can_be_set():
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
|
from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
Add test for setting cwd in LocalShell.runfrom nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@istest
def cwd_of_run_can_be_set():
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
|
<commit_before>from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
<commit_msg>Add test for setting cwd in LocalShell.run<commit_after>from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@istest
def cwd_of_run_can_be_set():
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
|
2f02960607b75e74a757ded1e2472a5fb8585d4f
|
tests/pyb/extint.py
|
tests/pyb/extint.py
|
import pyb
ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l))
ext.disable()
ext.enable()
print(ext.line())
ext.swint()
ext.disable()
|
import pyb
# test basic functionality
ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l))
ext.disable()
ext.enable()
print(ext.line())
ext.swint()
# test swint while disabled, then again after re-enabled
ext.disable()
ext.swint()
ext.enable()
ext.swint()
# disable now that the test is finished
ext.disable()
|
Add test for ExtInt when doing swint while disabled.
|
tests/pyb: Add test for ExtInt when doing swint while disabled.
|
Python
|
mit
|
infinnovation/micropython,adafruit/circuitpython,turbinenreiter/micropython,dxxb/micropython,pfalcon/micropython,infinnovation/micropython,Timmenem/micropython,oopy/micropython,mhoffma/micropython,pfalcon/micropython,selste/micropython,adafruit/circuitpython,Peetz0r/micropython-esp32,oopy/micropython,torwag/micropython,tobbad/micropython,tralamazza/micropython,selste/micropython,HenrikSolver/micropython,AriZuu/micropython,puuu/micropython,alex-march/micropython,lowRISC/micropython,deshipu/micropython,TDAbboud/micropython,chrisdearman/micropython,jmarcelino/pycom-micropython,SHA2017-badge/micropython-esp32,puuu/micropython,trezor/micropython,turbinenreiter/micropython,HenrikSolver/micropython,toolmacher/micropython,pozetroninc/micropython,selste/micropython,pozetroninc/micropython,tobbad/micropython,MrSurly/micropython,cwyark/micropython,blazewicz/micropython,MrSurly/micropython,kerneltask/micropython,alex-robbins/micropython,ryannathans/micropython,mhoffma/micropython,toolmacher/micropython,bvernoux/micropython,lowRISC/micropython,blazewicz/micropython,matthewelse/micropython,kerneltask/micropython,PappaPeppar/micropython,AriZuu/micropython,MrSurly/micropython-esp32,pramasoul/micropython,adafruit/micropython,Peetz0r/micropython-esp32,bvernoux/micropython,henriknelson/micropython,swegener/micropython,kerneltask/micropython,alex-march/micropython,infinnovation/micropython,matthewelse/micropython,pramasoul/micropython,hiway/micropython,MrSurly/micropython-esp32,alex-robbins/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,MrSurly/micropython-esp32,jmarcelino/pycom-micropython,HenrikSolver/micropython,tuc-osg/micropython,alex-robbins/micropython,HenrikSolver/micropython,pramasoul/micropython,TDAbboud/micropython,puuu/micropython,blazewicz/micropython,AriZuu/micropython,deshipu/micropython,lowRISC/micropython,chrisdearman/micropython,hiway/micropython,hiway/micropython,pfalcon/micropython,puuu/micropython,trezor/micropython,bvernoux/micropython,adafruit/micropython,infinnovation/micropython,Timmenem/micropython,deshipu/micropython,ryannathans/micropython,micropython/micropython-esp32,MrSurly/micropython-esp32,matthewelse/micropython,MrSurly/micropython,matthewelse/micropython,tralamazza/micropython,swegener/micropython,oopy/micropython,pozetroninc/micropython,PappaPeppar/micropython,AriZuu/micropython,pramasoul/micropython,micropython/micropython-esp32,Timmenem/micropython,tuc-osg/micropython,puuu/micropython,henriknelson/micropython,deshipu/micropython,ryannathans/micropython,Timmenem/micropython,oopy/micropython,cwyark/micropython,dxxb/micropython,dmazzella/micropython,mhoffma/micropython,toolmacher/micropython,hosaka/micropython,tobbad/micropython,mhoffma/micropython,MrSurly/micropython,hosaka/micropython,micropython/micropython-esp32,bvernoux/micropython,chrisdearman/micropython,toolmacher/micropython,TDAbboud/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,swegener/micropython,chrisdearman/micropython,tuc-osg/micropython,turbinenreiter/micropython,dmazzella/micropython,hosaka/micropython,bvernoux/micropython,Timmenem/micropython,tobbad/micropython,hiway/micropython,cwyark/micropython,MrSurly/micropython-esp32,tobbad/micropython,adafruit/micropython,blazewicz/micropython,henriknelson/micropython,dmazzella/micropython,kerneltask/micropython,pozetroninc/micropython,ryannathans/micropython,alex-march/micropython,alex-march/micropython,micropython/micropython-esp32,turbinenreiter/micropython,SHA2017-badge/micropython-esp32,torwag/micropython,PappaPeppar/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,AriZuu/micropython,TDAbboud/micropython,hosaka/micropython,SHA2017-badge/micropython-esp32,cwyark/micropython,cwyark/micropython,tralamazza/micropython,alex-robbins/micropython,deshipu/micropython,alex-robbins/micropython,torwag/micropython,turbinenreiter/micropython,SHA2017-badge/micropython-esp32,swegener/micropython,MrSurly/micropython,alex-march/micropython,trezor/micropython,torwag/micropython,pfalcon/micropython,henriknelson/micropython,jmarcelino/pycom-micropython,adafruit/micropython,dxxb/micropython,pramasoul/micropython,trezor/micropython,dmazzella/micropython,adafruit/circuitpython,lowRISC/micropython,dxxb/micropython,hiway/micropython,pfalcon/micropython,swegener/micropython,infinnovation/micropython,adafruit/circuitpython,HenrikSolver/micropython,matthewelse/micropython,adafruit/circuitpython,henriknelson/micropython,SHA2017-badge/micropython-esp32,oopy/micropython,jmarcelino/pycom-micropython,adafruit/micropython,dxxb/micropython,chrisdearman/micropython,trezor/micropython,lowRISC/micropython,jmarcelino/pycom-micropython,hosaka/micropython,toolmacher/micropython,adafruit/circuitpython,tuc-osg/micropython,pozetroninc/micropython,matthewelse/micropython,torwag/micropython,selste/micropython,selste/micropython,tuc-osg/micropython,kerneltask/micropython,micropython/micropython-esp32,tralamazza/micropython,mhoffma/micropython,TDAbboud/micropython,ryannathans/micropython
|
import pyb
ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l))
ext.disable()
ext.enable()
print(ext.line())
ext.swint()
ext.disable()
tests/pyb: Add test for ExtInt when doing swint while disabled.
|
import pyb
# test basic functionality
ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l))
ext.disable()
ext.enable()
print(ext.line())
ext.swint()
# test swint while disabled, then again after re-enabled
ext.disable()
ext.swint()
ext.enable()
ext.swint()
# disable now that the test is finished
ext.disable()
|
<commit_before>import pyb
ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l))
ext.disable()
ext.enable()
print(ext.line())
ext.swint()
ext.disable()
<commit_msg>tests/pyb: Add test for ExtInt when doing swint while disabled.<commit_after>
|
import pyb
# test basic functionality
ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l))
ext.disable()
ext.enable()
print(ext.line())
ext.swint()
# test swint while disabled, then again after re-enabled
ext.disable()
ext.swint()
ext.enable()
ext.swint()
# disable now that the test is finished
ext.disable()
|
import pyb
ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l))
ext.disable()
ext.enable()
print(ext.line())
ext.swint()
ext.disable()
tests/pyb: Add test for ExtInt when doing swint while disabled.import pyb
# test basic functionality
ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l))
ext.disable()
ext.enable()
print(ext.line())
ext.swint()
# test swint while disabled, then again after re-enabled
ext.disable()
ext.swint()
ext.enable()
ext.swint()
# disable now that the test is finished
ext.disable()
|
<commit_before>import pyb
ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l))
ext.disable()
ext.enable()
print(ext.line())
ext.swint()
ext.disable()
<commit_msg>tests/pyb: Add test for ExtInt when doing swint while disabled.<commit_after>import pyb
# test basic functionality
ext = pyb.ExtInt('X1', pyb.ExtInt.IRQ_RISING, pyb.Pin.PULL_DOWN, lambda l:print('line:', l))
ext.disable()
ext.enable()
print(ext.line())
ext.swint()
# test swint while disabled, then again after re-enabled
ext.disable()
ext.swint()
ext.enable()
ext.swint()
# disable now that the test is finished
ext.disable()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.