commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bc31bb2ddbf5b7f2e5d375a8b0d6e01f631d0aef
|
txircd/modules/extra/snotice_links.py
|
txircd/modules/extra/snotice_links.py
|
from twisted.plugin import IPlugin
from txircd.modbase import IModuleData, ModuleData
from zope.interface import implements
class SnoLinks(ModuleData):
implements(IPlugin, IModuleData)
name = "ServerNoticeLinks"
def actions(self):
return [ ("serverconnect", 1, self.announceConnect),
("serverquit", 1, self.announceQuit),
("servernoticetype", 1, self.checkSnoType) ]
def announceConnect(self, server):
message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name)
self.ircd.runActionStandard("sendservernotice", "links", message)
def announceQuit(self, server, reason):
message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason)
self.ircd.runActionStandard("sendservernotice", "links", message)
def checkSnoType(self, user, typename):
if typename == "links":
return True
return False
snoLinks = SnoLinks()
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class SnoLinks(ModuleData):
implements(IPlugin, IModuleData)
name = "ServerNoticeLinks"
def actions(self):
return [ ("serverconnect", 1, self.announceConnect),
("serverquit", 1, self.announceQuit),
("servernoticetype", 1, self.checkSnoType) ]
def announceConnect(self, server):
message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name)
self.ircd.runActionStandard("sendservernotice", "links", message)
def announceQuit(self, server, reason):
message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason)
self.ircd.runActionStandard("sendservernotice", "links", message)
def checkSnoType(self, user, typename):
if typename == "links":
return True
return False
snoLinks = SnoLinks()
|
Fix module data import on links server notice
|
Fix module data import on links server notice
|
Python
|
bsd-3-clause
|
Heufneutje/txircd
|
from twisted.plugin import IPlugin
from txircd.modbase import IModuleData, ModuleData
from zope.interface import implements
class SnoLinks(ModuleData):
implements(IPlugin, IModuleData)
name = "ServerNoticeLinks"
def actions(self):
return [ ("serverconnect", 1, self.announceConnect),
("serverquit", 1, self.announceQuit),
("servernoticetype", 1, self.checkSnoType) ]
def announceConnect(self, server):
message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name)
self.ircd.runActionStandard("sendservernotice", "links", message)
def announceQuit(self, server, reason):
message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason)
self.ircd.runActionStandard("sendservernotice", "links", message)
def checkSnoType(self, user, typename):
if typename == "links":
return True
return False
snoLinks = SnoLinks()Fix module data import on links server notice
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class SnoLinks(ModuleData):
implements(IPlugin, IModuleData)
name = "ServerNoticeLinks"
def actions(self):
return [ ("serverconnect", 1, self.announceConnect),
("serverquit", 1, self.announceQuit),
("servernoticetype", 1, self.checkSnoType) ]
def announceConnect(self, server):
message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name)
self.ircd.runActionStandard("sendservernotice", "links", message)
def announceQuit(self, server, reason):
message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason)
self.ircd.runActionStandard("sendservernotice", "links", message)
def checkSnoType(self, user, typename):
if typename == "links":
return True
return False
snoLinks = SnoLinks()
|
<commit_before>from twisted.plugin import IPlugin
from txircd.modbase import IModuleData, ModuleData
from zope.interface import implements
class SnoLinks(ModuleData):
implements(IPlugin, IModuleData)
name = "ServerNoticeLinks"
def actions(self):
return [ ("serverconnect", 1, self.announceConnect),
("serverquit", 1, self.announceQuit),
("servernoticetype", 1, self.checkSnoType) ]
def announceConnect(self, server):
message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name)
self.ircd.runActionStandard("sendservernotice", "links", message)
def announceQuit(self, server, reason):
message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason)
self.ircd.runActionStandard("sendservernotice", "links", message)
def checkSnoType(self, user, typename):
if typename == "links":
return True
return False
snoLinks = SnoLinks()<commit_msg>Fix module data import on links server notice<commit_after>
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class SnoLinks(ModuleData):
implements(IPlugin, IModuleData)
name = "ServerNoticeLinks"
def actions(self):
return [ ("serverconnect", 1, self.announceConnect),
("serverquit", 1, self.announceQuit),
("servernoticetype", 1, self.checkSnoType) ]
def announceConnect(self, server):
message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name)
self.ircd.runActionStandard("sendservernotice", "links", message)
def announceQuit(self, server, reason):
message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason)
self.ircd.runActionStandard("sendservernotice", "links", message)
def checkSnoType(self, user, typename):
if typename == "links":
return True
return False
snoLinks = SnoLinks()
|
from twisted.plugin import IPlugin
from txircd.modbase import IModuleData, ModuleData
from zope.interface import implements
class SnoLinks(ModuleData):
implements(IPlugin, IModuleData)
name = "ServerNoticeLinks"
def actions(self):
return [ ("serverconnect", 1, self.announceConnect),
("serverquit", 1, self.announceQuit),
("servernoticetype", 1, self.checkSnoType) ]
def announceConnect(self, server):
message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name)
self.ircd.runActionStandard("sendservernotice", "links", message)
def announceQuit(self, server, reason):
message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason)
self.ircd.runActionStandard("sendservernotice", "links", message)
def checkSnoType(self, user, typename):
if typename == "links":
return True
return False
snoLinks = SnoLinks()Fix module data import on links server noticefrom twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class SnoLinks(ModuleData):
implements(IPlugin, IModuleData)
name = "ServerNoticeLinks"
def actions(self):
return [ ("serverconnect", 1, self.announceConnect),
("serverquit", 1, self.announceQuit),
("servernoticetype", 1, self.checkSnoType) ]
def announceConnect(self, server):
message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name)
self.ircd.runActionStandard("sendservernotice", "links", message)
def announceQuit(self, server, reason):
message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason)
self.ircd.runActionStandard("sendservernotice", "links", message)
def checkSnoType(self, user, typename):
if typename == "links":
return True
return False
snoLinks = SnoLinks()
|
<commit_before>from twisted.plugin import IPlugin
from txircd.modbase import IModuleData, ModuleData
from zope.interface import implements
class SnoLinks(ModuleData):
implements(IPlugin, IModuleData)
name = "ServerNoticeLinks"
def actions(self):
return [ ("serverconnect", 1, self.announceConnect),
("serverquit", 1, self.announceQuit),
("servernoticetype", 1, self.checkSnoType) ]
def announceConnect(self, server):
message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name)
self.ircd.runActionStandard("sendservernotice", "links", message)
def announceQuit(self, server, reason):
message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason)
self.ircd.runActionStandard("sendservernotice", "links", message)
def checkSnoType(self, user, typename):
if typename == "links":
return True
return False
snoLinks = SnoLinks()<commit_msg>Fix module data import on links server notice<commit_after>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class SnoLinks(ModuleData):
implements(IPlugin, IModuleData)
name = "ServerNoticeLinks"
def actions(self):
return [ ("serverconnect", 1, self.announceConnect),
("serverquit", 1, self.announceQuit),
("servernoticetype", 1, self.checkSnoType) ]
def announceConnect(self, server):
message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name)
self.ircd.runActionStandard("sendservernotice", "links", message)
def announceQuit(self, server, reason):
message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason)
self.ircd.runActionStandard("sendservernotice", "links", message)
def checkSnoType(self, user, typename):
if typename == "links":
return True
return False
snoLinks = SnoLinks()
|
076a11626b2e5567f216a0593e46b30df3588545
|
paystackapi/trecipient.py
|
paystackapi/trecipient.py
|
"""Script used to define the paystack Transfer Recipient class."""
from paystackapi.base import PayStackBase
class Invoice(PayStackBase):
"""docstring for Transfer Recipient."""
@classmethod
def create(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
type: Recipient Type (Only nuban at this time)
name: A name for the recipient
account_number: Required if type is nuban
bank_code: Required if type is nuban.
You can get the list of Bank Codes by calling the List Banks endpoint.
**kwargs
Returns:
Json data from paystack API.
"""
return cls().requests.post('transferrecipient', data=kwargs,)
@classmethod
def list(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
perPage: records you want to retrieve per page (Integer)
page: what page you want to retrieve (Integer)
Returns:
Json data from paystack API.
"""
return cls().requests.get('transferrecipient', qs=kwargs,)
|
"""Script used to define the paystack Transfer Recipient class."""
from paystackapi.base import PayStackBase
class Invoice(PayStackBase):
"""docstring for Transfer Recipient."""
@classmethod
def create(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
type: Recipient Type (Only nuban at this time)
name: A name for the recipient
account_number: Required if type is nuban
bank_code: Required if type is nuban.
You can get the list of Bank Codes by calling the List Banks endpoint.
**kwargs
Returns:
Json data from paystack API.
"""
return cls().requests.post('transferrecipient', data=kwargs,)
@classmethod
def list(cls, **kwargs):
"""
Method defined to list transfer recipient.
Args:
perPage: records you want to retrieve per page (Integer)
page: what page you want to retrieve (Integer)
Returns:
Json data from paystack API.
"""
return cls().requests.get('transferrecipient', qs=kwargs,)
|
Update list for transfer recipient
|
Update list for transfer recipient
|
Python
|
mit
|
andela-sjames/paystack-python
|
"""Script used to define the paystack Transfer Recipient class."""
from paystackapi.base import PayStackBase
class Invoice(PayStackBase):
"""docstring for Transfer Recipient."""
@classmethod
def create(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
type: Recipient Type (Only nuban at this time)
name: A name for the recipient
account_number: Required if type is nuban
bank_code: Required if type is nuban.
You can get the list of Bank Codes by calling the List Banks endpoint.
**kwargs
Returns:
Json data from paystack API.
"""
return cls().requests.post('transferrecipient', data=kwargs,)
@classmethod
def list(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
perPage: records you want to retrieve per page (Integer)
page: what page you want to retrieve (Integer)
Returns:
Json data from paystack API.
"""
return cls().requests.get('transferrecipient', qs=kwargs,)
Update list for transfer recipient
|
"""Script used to define the paystack Transfer Recipient class."""
from paystackapi.base import PayStackBase
class Invoice(PayStackBase):
"""docstring for Transfer Recipient."""
@classmethod
def create(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
type: Recipient Type (Only nuban at this time)
name: A name for the recipient
account_number: Required if type is nuban
bank_code: Required if type is nuban.
You can get the list of Bank Codes by calling the List Banks endpoint.
**kwargs
Returns:
Json data from paystack API.
"""
return cls().requests.post('transferrecipient', data=kwargs,)
@classmethod
def list(cls, **kwargs):
"""
Method defined to list transfer recipient.
Args:
perPage: records you want to retrieve per page (Integer)
page: what page you want to retrieve (Integer)
Returns:
Json data from paystack API.
"""
return cls().requests.get('transferrecipient', qs=kwargs,)
|
<commit_before>"""Script used to define the paystack Transfer Recipient class."""
from paystackapi.base import PayStackBase
class Invoice(PayStackBase):
"""docstring for Transfer Recipient."""
@classmethod
def create(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
type: Recipient Type (Only nuban at this time)
name: A name for the recipient
account_number: Required if type is nuban
bank_code: Required if type is nuban.
You can get the list of Bank Codes by calling the List Banks endpoint.
**kwargs
Returns:
Json data from paystack API.
"""
return cls().requests.post('transferrecipient', data=kwargs,)
@classmethod
def list(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
perPage: records you want to retrieve per page (Integer)
page: what page you want to retrieve (Integer)
Returns:
Json data from paystack API.
"""
return cls().requests.get('transferrecipient', qs=kwargs,)
<commit_msg>Update list for transfer recipient<commit_after>
|
"""Script used to define the paystack Transfer Recipient class."""
from paystackapi.base import PayStackBase
class Invoice(PayStackBase):
"""docstring for Transfer Recipient."""
@classmethod
def create(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
type: Recipient Type (Only nuban at this time)
name: A name for the recipient
account_number: Required if type is nuban
bank_code: Required if type is nuban.
You can get the list of Bank Codes by calling the List Banks endpoint.
**kwargs
Returns:
Json data from paystack API.
"""
return cls().requests.post('transferrecipient', data=kwargs,)
@classmethod
def list(cls, **kwargs):
"""
Method defined to list transfer recipient.
Args:
perPage: records you want to retrieve per page (Integer)
page: what page you want to retrieve (Integer)
Returns:
Json data from paystack API.
"""
return cls().requests.get('transferrecipient', qs=kwargs,)
|
"""Script used to define the paystack Transfer Recipient class."""
from paystackapi.base import PayStackBase
class Invoice(PayStackBase):
"""docstring for Transfer Recipient."""
@classmethod
def create(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
type: Recipient Type (Only nuban at this time)
name: A name for the recipient
account_number: Required if type is nuban
bank_code: Required if type is nuban.
You can get the list of Bank Codes by calling the List Banks endpoint.
**kwargs
Returns:
Json data from paystack API.
"""
return cls().requests.post('transferrecipient', data=kwargs,)
@classmethod
def list(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
perPage: records you want to retrieve per page (Integer)
page: what page you want to retrieve (Integer)
Returns:
Json data from paystack API.
"""
return cls().requests.get('transferrecipient', qs=kwargs,)
Update list for transfer recipient"""Script used to define the paystack Transfer Recipient class."""
from paystackapi.base import PayStackBase
class Invoice(PayStackBase):
"""docstring for Transfer Recipient."""
@classmethod
def create(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
type: Recipient Type (Only nuban at this time)
name: A name for the recipient
account_number: Required if type is nuban
bank_code: Required if type is nuban.
You can get the list of Bank Codes by calling the List Banks endpoint.
**kwargs
Returns:
Json data from paystack API.
"""
return cls().requests.post('transferrecipient', data=kwargs,)
@classmethod
def list(cls, **kwargs):
"""
Method defined to list transfer recipient.
Args:
perPage: records you want to retrieve per page (Integer)
page: what page you want to retrieve (Integer)
Returns:
Json data from paystack API.
"""
return cls().requests.get('transferrecipient', qs=kwargs,)
|
<commit_before>"""Script used to define the paystack Transfer Recipient class."""
from paystackapi.base import PayStackBase
class Invoice(PayStackBase):
"""docstring for Transfer Recipient."""
@classmethod
def create(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
type: Recipient Type (Only nuban at this time)
name: A name for the recipient
account_number: Required if type is nuban
bank_code: Required if type is nuban.
You can get the list of Bank Codes by calling the List Banks endpoint.
**kwargs
Returns:
Json data from paystack API.
"""
return cls().requests.post('transferrecipient', data=kwargs,)
@classmethod
def list(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
perPage: records you want to retrieve per page (Integer)
page: what page you want to retrieve (Integer)
Returns:
Json data from paystack API.
"""
return cls().requests.get('transferrecipient', qs=kwargs,)
<commit_msg>Update list for transfer recipient<commit_after>"""Script used to define the paystack Transfer Recipient class."""
from paystackapi.base import PayStackBase
class Invoice(PayStackBase):
"""docstring for Transfer Recipient."""
@classmethod
def create(cls, **kwargs):
"""
Method defined to create transfer recipient.
Args:
type: Recipient Type (Only nuban at this time)
name: A name for the recipient
account_number: Required if type is nuban
bank_code: Required if type is nuban.
You can get the list of Bank Codes by calling the List Banks endpoint.
**kwargs
Returns:
Json data from paystack API.
"""
return cls().requests.post('transferrecipient', data=kwargs,)
@classmethod
def list(cls, **kwargs):
"""
Method defined to list transfer recipient.
Args:
perPage: records you want to retrieve per page (Integer)
page: what page you want to retrieve (Integer)
Returns:
Json data from paystack API.
"""
return cls().requests.get('transferrecipient', qs=kwargs,)
|
96f933bcfef90ba984e43947b46f9557e760e838
|
project/category/views.py
|
project/category/views.py
|
from flask import render_template, Blueprint, url_for, \
redirect, flash, request
from project.models import Category, Webinar
from .helpers import slugify
category_blueprint = Blueprint('category', __name__,)
|
from flask import render_template, Blueprint, url_for, \
redirect, flash, request
from project.models import Category, Webinar
from .helpers import slugify
category_blueprint = Blueprint('category', __name__,)
@category_blueprint.route('/categories')
def index():
categories = Category.query.all()
return render_template('category/categories.html', categories=categories)
|
Create simple categories page view
|
Create simple categories page view
|
Python
|
mit
|
dylanshine/streamschool,dylanshine/streamschool
|
from flask import render_template, Blueprint, url_for, \
redirect, flash, request
from project.models import Category, Webinar
from .helpers import slugify
category_blueprint = Blueprint('category', __name__,)
Create simple categories page view
|
from flask import render_template, Blueprint, url_for, \
redirect, flash, request
from project.models import Category, Webinar
from .helpers import slugify
category_blueprint = Blueprint('category', __name__,)
@category_blueprint.route('/categories')
def index():
categories = Category.query.all()
return render_template('category/categories.html', categories=categories)
|
<commit_before>from flask import render_template, Blueprint, url_for, \
redirect, flash, request
from project.models import Category, Webinar
from .helpers import slugify
category_blueprint = Blueprint('category', __name__,)
<commit_msg>Create simple categories page view<commit_after>
|
from flask import render_template, Blueprint, url_for, \
redirect, flash, request
from project.models import Category, Webinar
from .helpers import slugify
category_blueprint = Blueprint('category', __name__,)
@category_blueprint.route('/categories')
def index():
categories = Category.query.all()
return render_template('category/categories.html', categories=categories)
|
from flask import render_template, Blueprint, url_for, \
redirect, flash, request
from project.models import Category, Webinar
from .helpers import slugify
category_blueprint = Blueprint('category', __name__,)
Create simple categories page viewfrom flask import render_template, Blueprint, url_for, \
redirect, flash, request
from project.models import Category, Webinar
from .helpers import slugify
category_blueprint = Blueprint('category', __name__,)
@category_blueprint.route('/categories')
def index():
categories = Category.query.all()
return render_template('category/categories.html', categories=categories)
|
<commit_before>from flask import render_template, Blueprint, url_for, \
redirect, flash, request
from project.models import Category, Webinar
from .helpers import slugify
category_blueprint = Blueprint('category', __name__,)
<commit_msg>Create simple categories page view<commit_after>from flask import render_template, Blueprint, url_for, \
redirect, flash, request
from project.models import Category, Webinar
from .helpers import slugify
category_blueprint = Blueprint('category', __name__,)
@category_blueprint.route('/categories')
def index():
categories = Category.query.all()
return render_template('category/categories.html', categories=categories)
|
a736355efe592d4a6418740f791f3526db2fc67a
|
protocols/no_reconnect.py
|
protocols/no_reconnect.py
|
try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return super(NoReconnectProto, self).reconnect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
|
try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return self.connect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
|
Call connect instead of reconnect.
|
Call connect instead of reconnect.
|
Python
|
apache-2.0
|
Floobits/plugin-common-python
|
try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return super(NoReconnectProto, self).reconnect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
Call connect instead of reconnect.
|
try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return self.connect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
|
<commit_before>try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return super(NoReconnectProto, self).reconnect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
<commit_msg>Call connect instead of reconnect.<commit_after>
|
try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return self.connect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
|
try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return super(NoReconnectProto, self).reconnect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
Call connect instead of reconnect.try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return self.connect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
|
<commit_before>try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return super(NoReconnectProto, self).reconnect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
<commit_msg>Call connect instead of reconnect.<commit_after>try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return self.connect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
|
7aa74665e69aa7117ebae24e7aa12baa07d2119a
|
tests/test__compat.py
|
tests/test__compat.py
|
# -*- coding: utf-8 -*-
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
import numpy as np
import dask
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
|
Include some tests for _asarray
|
Include some tests for _asarray
Make sure that it correctly converts everything to a Dask Array. Try
using a Python list, NumPy array, and Dask Array. Also make sure the
final array has the same contents as the original. Borrowed from
`dask-ndmeasure`.
|
Python
|
bsd-3-clause
|
jakirkham/dask-distance
|
# -*- coding: utf-8 -*-
Include some tests for _asarray
Make sure that it correctly converts everything to a Dask Array. Try
using a Python list, NumPy array, and Dask Array. Also make sure the
final array has the same contents as the original. Borrowed from
`dask-ndmeasure`.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
import numpy as np
import dask
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
|
<commit_before># -*- coding: utf-8 -*-
<commit_msg>Include some tests for _asarray
Make sure that it correctly converts everything to a Dask Array. Try
using a Python list, NumPy array, and Dask Array. Also make sure the
final array has the same contents as the original. Borrowed from
`dask-ndmeasure`.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
import numpy as np
import dask
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
|
# -*- coding: utf-8 -*-
Include some tests for _asarray
Make sure that it correctly converts everything to a Dask Array. Try
using a Python list, NumPy array, and Dask Array. Also make sure the
final array has the same contents as the original. Borrowed from
`dask-ndmeasure`.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
import numpy as np
import dask
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
|
<commit_before># -*- coding: utf-8 -*-
<commit_msg>Include some tests for _asarray
Make sure that it correctly converts everything to a Dask Array. Try
using a Python list, NumPy array, and Dask Array. Also make sure the
final array has the same contents as the original. Borrowed from
`dask-ndmeasure`.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
import numpy as np
import dask
import dask.array as da
import dask.array.utils as dau
import dask_distance._compat
@pytest.mark.parametrize("x", [
list(range(5)),
np.random.randint(10, size=(15, 16)),
da.random.randint(10, size=(15, 16), chunks=(5, 5)),
])
def test_asarray(x):
d = dask_distance._compat._asarray(x)
assert isinstance(d, da.Array)
if not isinstance(x, (np.ndarray, da.Array)):
x = np.asarray(x)
dau.assert_eq(d, x)
|
44fe60f561abd98df1a1a39f3fbf96c06267c3ec
|
tests/test_wheeler.py
|
tests/test_wheeler.py
|
# coding=utf-8
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
|
# coding=utf-8
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
builder = wheeler.Builder()
with builder:
with self.assertRaises(wheeler.BuildError):
builder._find_wheel('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
|
Fix test covering pip 1.5.2 error handling.
|
Fix test covering pip 1.5.2 error handling.
|
Python
|
bsd-3-clause
|
tylerdave/devpi-builder
|
# coding=utf-8
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
Fix test covering pip 1.5.2 error handling.
|
# coding=utf-8
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
builder = wheeler.Builder()
with builder:
with self.assertRaises(wheeler.BuildError):
builder._find_wheel('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
|
<commit_before># coding=utf-8
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix test covering pip 1.5.2 error handling.<commit_after>
|
# coding=utf-8
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
builder = wheeler.Builder()
with builder:
with self.assertRaises(wheeler.BuildError):
builder._find_wheel('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
|
# coding=utf-8
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
Fix test covering pip 1.5.2 error handling.# coding=utf-8
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
builder = wheeler.Builder()
with builder:
with self.assertRaises(wheeler.BuildError):
builder._find_wheel('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
|
<commit_before># coding=utf-8
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix test covering pip 1.5.2 error handling.<commit_after># coding=utf-8
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
builder = wheeler.Builder()
with builder:
with self.assertRaises(wheeler.BuildError):
builder._find_wheel('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
|
8646f5af48dc011799a5c7ab9d89b7e6a09ed95b
|
editor/views.py
|
editor/views.py
|
# thesquirrel.org
#
# Copyright (C) 2015 Flying Squirrel Community Space
#
# thesquirrel.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# thesquirrel.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with thesquirrel.org. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import render
from .models import EditorImage
from . import formatting
@login_required
def upload_image(request):
if 'file' not in request.FILES:
return JsonResponse({'error': 'no file given'})
else:
image = EditorImage.objects.create_from_file(request.FILES['file'])
return JsonResponse({'imageId': image.id})
def formatting_help(request):
return render(request, 'editor/formatting-help.html')
@login_required
def preview(request):
body = request.GET.get('body', '')
return JsonResponse({'body': formatting.render(body)})
|
# thesquirrel.org
#
# Copyright (C) 2015 Flying Squirrel Community Space
#
# thesquirrel.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# thesquirrel.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with thesquirrel.org. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import render
from .models import EditorImage
from . import formatting
@login_required
def upload_image(request):
if 'file' not in request.FILES:
return JsonResponse({'error': 'no file given'})
else:
try:
image = EditorImage.objects.create_from_file(request.FILES['file'])
except Exception, e:
return JsonResponse({'error': str(e)})
return JsonResponse({'imageId': image.id})
def formatting_help(request):
return render(request, 'editor/formatting-help.html')
@login_required
def preview(request):
body = request.GET.get('body', '')
return JsonResponse({'body': formatting.render(body)})
|
Return exception when we have an issue reading an image file
|
Return exception when we have an issue reading an image file
|
Python
|
agpl-3.0
|
bendk/thesquirrel,bendk/thesquirrel,bendk/thesquirrel,bendk/thesquirrel
|
# thesquirrel.org
#
# Copyright (C) 2015 Flying Squirrel Community Space
#
# thesquirrel.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# thesquirrel.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with thesquirrel.org. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import render
from .models import EditorImage
from . import formatting
@login_required
def upload_image(request):
if 'file' not in request.FILES:
return JsonResponse({'error': 'no file given'})
else:
image = EditorImage.objects.create_from_file(request.FILES['file'])
return JsonResponse({'imageId': image.id})
def formatting_help(request):
return render(request, 'editor/formatting-help.html')
@login_required
def preview(request):
body = request.GET.get('body', '')
return JsonResponse({'body': formatting.render(body)})
Return exception when we have an issue reading an image file
|
# thesquirrel.org
#
# Copyright (C) 2015 Flying Squirrel Community Space
#
# thesquirrel.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# thesquirrel.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with thesquirrel.org. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import render
from .models import EditorImage
from . import formatting
@login_required
def upload_image(request):
if 'file' not in request.FILES:
return JsonResponse({'error': 'no file given'})
else:
try:
image = EditorImage.objects.create_from_file(request.FILES['file'])
except Exception, e:
return JsonResponse({'error': str(e)})
return JsonResponse({'imageId': image.id})
def formatting_help(request):
return render(request, 'editor/formatting-help.html')
@login_required
def preview(request):
body = request.GET.get('body', '')
return JsonResponse({'body': formatting.render(body)})
|
<commit_before># thesquirrel.org
#
# Copyright (C) 2015 Flying Squirrel Community Space
#
# thesquirrel.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# thesquirrel.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with thesquirrel.org. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import render
from .models import EditorImage
from . import formatting
@login_required
def upload_image(request):
if 'file' not in request.FILES:
return JsonResponse({'error': 'no file given'})
else:
image = EditorImage.objects.create_from_file(request.FILES['file'])
return JsonResponse({'imageId': image.id})
def formatting_help(request):
return render(request, 'editor/formatting-help.html')
@login_required
def preview(request):
body = request.GET.get('body', '')
return JsonResponse({'body': formatting.render(body)})
<commit_msg>Return exception when we have an issue reading an image file<commit_after>
|
# thesquirrel.org
#
# Copyright (C) 2015 Flying Squirrel Community Space
#
# thesquirrel.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# thesquirrel.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with thesquirrel.org. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import render
from .models import EditorImage
from . import formatting
@login_required
def upload_image(request):
if 'file' not in request.FILES:
return JsonResponse({'error': 'no file given'})
else:
try:
image = EditorImage.objects.create_from_file(request.FILES['file'])
except Exception, e:
return JsonResponse({'error': str(e)})
return JsonResponse({'imageId': image.id})
def formatting_help(request):
return render(request, 'editor/formatting-help.html')
@login_required
def preview(request):
body = request.GET.get('body', '')
return JsonResponse({'body': formatting.render(body)})
|
# thesquirrel.org
#
# Copyright (C) 2015 Flying Squirrel Community Space
#
# thesquirrel.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# thesquirrel.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with thesquirrel.org. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import render
from .models import EditorImage
from . import formatting
@login_required
def upload_image(request):
if 'file' not in request.FILES:
return JsonResponse({'error': 'no file given'})
else:
image = EditorImage.objects.create_from_file(request.FILES['file'])
return JsonResponse({'imageId': image.id})
def formatting_help(request):
return render(request, 'editor/formatting-help.html')
@login_required
def preview(request):
body = request.GET.get('body', '')
return JsonResponse({'body': formatting.render(body)})
Return exception when we have an issue reading an image file# thesquirrel.org
#
# Copyright (C) 2015 Flying Squirrel Community Space
#
# thesquirrel.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# thesquirrel.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with thesquirrel.org. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import render
from .models import EditorImage
from . import formatting
@login_required
def upload_image(request):
if 'file' not in request.FILES:
return JsonResponse({'error': 'no file given'})
else:
try:
image = EditorImage.objects.create_from_file(request.FILES['file'])
except Exception, e:
return JsonResponse({'error': str(e)})
return JsonResponse({'imageId': image.id})
def formatting_help(request):
return render(request, 'editor/formatting-help.html')
@login_required
def preview(request):
body = request.GET.get('body', '')
return JsonResponse({'body': formatting.render(body)})
|
<commit_before># thesquirrel.org
#
# Copyright (C) 2015 Flying Squirrel Community Space
#
# thesquirrel.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# thesquirrel.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with thesquirrel.org. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import render
from .models import EditorImage
from . import formatting
@login_required
def upload_image(request):
if 'file' not in request.FILES:
return JsonResponse({'error': 'no file given'})
else:
image = EditorImage.objects.create_from_file(request.FILES['file'])
return JsonResponse({'imageId': image.id})
def formatting_help(request):
return render(request, 'editor/formatting-help.html')
@login_required
def preview(request):
body = request.GET.get('body', '')
return JsonResponse({'body': formatting.render(body)})
<commit_msg>Return exception when we have an issue reading an image file<commit_after># thesquirrel.org
#
# Copyright (C) 2015 Flying Squirrel Community Space
#
# thesquirrel.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# thesquirrel.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with thesquirrel.org. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.shortcuts import render
from .models import EditorImage
from . import formatting
@login_required
def upload_image(request):
if 'file' not in request.FILES:
return JsonResponse({'error': 'no file given'})
else:
try:
image = EditorImage.objects.create_from_file(request.FILES['file'])
except Exception, e:
return JsonResponse({'error': str(e)})
return JsonResponse({'imageId': image.id})
def formatting_help(request):
return render(request, 'editor/formatting-help.html')
@login_required
def preview(request):
body = request.GET.get('body', '')
return JsonResponse({'body': formatting.render(body)})
|
9cd72406d63d1ce3a6cd75a65131c8bde3df95ba
|
push_plugin.py
|
push_plugin.py
|
import requests
class PushClient:
def __init__(self, app):
self.app = app
def handle_new_or_edit(self, post):
data = { 'hub.mode' : 'publish',
'hub.url' : 'http://kylewm.com/all.atom' }
response = requests.post('https://pubsubhubbub.appspot.com/', data)
if response.status_code == 204:
self.app.logger.info('successfully sent PuSH notification')
|
import requests
class PushClient:
def __init__(self, app):
self.app = app
def publish(self, url):
data = { 'hub.mode' : 'publish', 'hub.url' : url }
response = requests.post('https://pubsubhubbub.appspot.com/', data)
if response.status_code == 204:
self.app.logger.info('successfully sent PuSH notification')
else:
self.app.logger.warn('unexpected response from PuSH hub %s', response)
def handle_new_or_edit(self, post):
self.publish('http://kylewm.com/all.atom')
if post.post_type=='article':
self.publish('http://kylewm.com/articles.atom')
elif post.post_type=='note':
self.publish('http://kylewm.com/notes.atom')
|
Send the all.atom feed and the articles/notes feeds to PuSH
|
Send the all.atom feed and the articles/notes feeds to PuSH
|
Python
|
bsd-2-clause
|
thedod/redwind,Lancey6/redwind,thedod/redwind,Lancey6/redwind,Lancey6/redwind
|
import requests
class PushClient:
def __init__(self, app):
self.app = app
def handle_new_or_edit(self, post):
data = { 'hub.mode' : 'publish',
'hub.url' : 'http://kylewm.com/all.atom' }
response = requests.post('https://pubsubhubbub.appspot.com/', data)
if response.status_code == 204:
self.app.logger.info('successfully sent PuSH notification')
Send the all.atom feed and the articles/notes feeds to PuSH
|
import requests
class PushClient:
def __init__(self, app):
self.app = app
def publish(self, url):
data = { 'hub.mode' : 'publish', 'hub.url' : url }
response = requests.post('https://pubsubhubbub.appspot.com/', data)
if response.status_code == 204:
self.app.logger.info('successfully sent PuSH notification')
else:
self.app.logger.warn('unexpected response from PuSH hub %s', response)
def handle_new_or_edit(self, post):
self.publish('http://kylewm.com/all.atom')
if post.post_type=='article':
self.publish('http://kylewm.com/articles.atom')
elif post.post_type=='note':
self.publish('http://kylewm.com/notes.atom')
|
<commit_before>import requests
class PushClient:
def __init__(self, app):
self.app = app
def handle_new_or_edit(self, post):
data = { 'hub.mode' : 'publish',
'hub.url' : 'http://kylewm.com/all.atom' }
response = requests.post('https://pubsubhubbub.appspot.com/', data)
if response.status_code == 204:
self.app.logger.info('successfully sent PuSH notification')
<commit_msg>Send the all.atom feed and the articles/notes feeds to PuSH<commit_after>
|
import requests
class PushClient:
def __init__(self, app):
self.app = app
def publish(self, url):
data = { 'hub.mode' : 'publish', 'hub.url' : url }
response = requests.post('https://pubsubhubbub.appspot.com/', data)
if response.status_code == 204:
self.app.logger.info('successfully sent PuSH notification')
else:
self.app.logger.warn('unexpected response from PuSH hub %s', response)
def handle_new_or_edit(self, post):
self.publish('http://kylewm.com/all.atom')
if post.post_type=='article':
self.publish('http://kylewm.com/articles.atom')
elif post.post_type=='note':
self.publish('http://kylewm.com/notes.atom')
|
import requests
class PushClient:
def __init__(self, app):
self.app = app
def handle_new_or_edit(self, post):
data = { 'hub.mode' : 'publish',
'hub.url' : 'http://kylewm.com/all.atom' }
response = requests.post('https://pubsubhubbub.appspot.com/', data)
if response.status_code == 204:
self.app.logger.info('successfully sent PuSH notification')
Send the all.atom feed and the articles/notes feeds to PuSHimport requests
class PushClient:
def __init__(self, app):
self.app = app
def publish(self, url):
data = { 'hub.mode' : 'publish', 'hub.url' : url }
response = requests.post('https://pubsubhubbub.appspot.com/', data)
if response.status_code == 204:
self.app.logger.info('successfully sent PuSH notification')
else:
self.app.logger.warn('unexpected response from PuSH hub %s', response)
def handle_new_or_edit(self, post):
self.publish('http://kylewm.com/all.atom')
if post.post_type=='article':
self.publish('http://kylewm.com/articles.atom')
elif post.post_type=='note':
self.publish('http://kylewm.com/notes.atom')
|
<commit_before>import requests
class PushClient:
def __init__(self, app):
self.app = app
def handle_new_or_edit(self, post):
data = { 'hub.mode' : 'publish',
'hub.url' : 'http://kylewm.com/all.atom' }
response = requests.post('https://pubsubhubbub.appspot.com/', data)
if response.status_code == 204:
self.app.logger.info('successfully sent PuSH notification')
<commit_msg>Send the all.atom feed and the articles/notes feeds to PuSH<commit_after>import requests
class PushClient:
def __init__(self, app):
self.app = app
def publish(self, url):
data = { 'hub.mode' : 'publish', 'hub.url' : url }
response = requests.post('https://pubsubhubbub.appspot.com/', data)
if response.status_code == 204:
self.app.logger.info('successfully sent PuSH notification')
else:
self.app.logger.warn('unexpected response from PuSH hub %s', response)
def handle_new_or_edit(self, post):
self.publish('http://kylewm.com/all.atom')
if post.post_type=='article':
self.publish('http://kylewm.com/articles.atom')
elif post.post_type=='note':
self.publish('http://kylewm.com/notes.atom')
|
a06dc82df053ea47f8a39b46d938f52679b2cff5
|
grow/preprocessors/blogger_test.py
|
grow/preprocessors/blogger_test.py
|
from . import google_drive
from grow.pods import pods
from grow.pods import storage
from grow.testing import testing
import cStringIO
import csv
import json
import unittest
import yaml
class BloggerTestCase(testing.TestCase):
def test_run(self):
pod = testing.create_pod()
fields = {
'preprocessors': [{
'name': 'blogger',
'kind': 'blogger',
'blog_id': '10861780', # Official Google blog.
'collection': '/content/posts/',
'markdown': True,
'authenticated': False,
'inject': True,
}],
}
pod.write_yaml('/podspec.yaml', fields)
fields = {
'$path': '/{date}/{slug}/',
'$view': '/views/base.html',
}
pod.write_yaml('/content/posts/_blueprint.yaml', fields)
content = '{{doc.html|safe}}'
pod.write_file('/views/base.html', content)
# Weak test to verify preprocessor runs.
pod.preprocess(['blogger'])
# Verify inject.
collection = pod.get_collection('/content/posts')
doc = collection.docs()[0]
preprocessor = pod.list_preprocessors()[0]
preprocessor.inject(doc)
if __name__ == '__main__':
unittest.main()
|
from . import google_drive
from grow.pods import pods
from grow.pods import storage
from grow.testing import testing
import cStringIO
import csv
import json
import unittest
import yaml
class BloggerTestCase(testing.TestCase):
def test_run(self):
pod = testing.create_pod()
fields = {
'preprocessors': [{
'name': 'blogger',
'kind': 'blogger',
'blog_id': '4154157974596966834',
'collection': '/content/posts/',
'markdown': True,
'authenticated': False,
'inject': True,
}],
}
pod.write_yaml('/podspec.yaml', fields)
fields = {
'$path': '/{date}/{slug}/',
'$view': '/views/base.html',
}
pod.write_yaml('/content/posts/_blueprint.yaml', fields)
content = '{{doc.html|safe}}'
pod.write_file('/views/base.html', content)
# Weak test to verify preprocessor runs.
pod.preprocess(['blogger'])
# Verify inject.
collection = pod.get_collection('/content/posts')
doc = collection.docs()[0]
preprocessor = pod.list_preprocessors()[0]
preprocessor.inject(doc)
if __name__ == '__main__':
unittest.main()
|
Use different blog for test data.
|
Use different blog for test data.
|
Python
|
mit
|
grow/pygrow,denmojo/pygrow,denmojo/pygrow,denmojo/pygrow,grow/grow,grow/pygrow,grow/grow,grow/grow,grow/grow,grow/pygrow,denmojo/pygrow
|
from . import google_drive
from grow.pods import pods
from grow.pods import storage
from grow.testing import testing
import cStringIO
import csv
import json
import unittest
import yaml
class BloggerTestCase(testing.TestCase):
def test_run(self):
pod = testing.create_pod()
fields = {
'preprocessors': [{
'name': 'blogger',
'kind': 'blogger',
'blog_id': '10861780', # Official Google blog.
'collection': '/content/posts/',
'markdown': True,
'authenticated': False,
'inject': True,
}],
}
pod.write_yaml('/podspec.yaml', fields)
fields = {
'$path': '/{date}/{slug}/',
'$view': '/views/base.html',
}
pod.write_yaml('/content/posts/_blueprint.yaml', fields)
content = '{{doc.html|safe}}'
pod.write_file('/views/base.html', content)
# Weak test to verify preprocessor runs.
pod.preprocess(['blogger'])
# Verify inject.
collection = pod.get_collection('/content/posts')
doc = collection.docs()[0]
preprocessor = pod.list_preprocessors()[0]
preprocessor.inject(doc)
if __name__ == '__main__':
unittest.main()
Use different blog for test data.
|
from . import google_drive
from grow.pods import pods
from grow.pods import storage
from grow.testing import testing
import cStringIO
import csv
import json
import unittest
import yaml
class BloggerTestCase(testing.TestCase):
def test_run(self):
pod = testing.create_pod()
fields = {
'preprocessors': [{
'name': 'blogger',
'kind': 'blogger',
'blog_id': '4154157974596966834',
'collection': '/content/posts/',
'markdown': True,
'authenticated': False,
'inject': True,
}],
}
pod.write_yaml('/podspec.yaml', fields)
fields = {
'$path': '/{date}/{slug}/',
'$view': '/views/base.html',
}
pod.write_yaml('/content/posts/_blueprint.yaml', fields)
content = '{{doc.html|safe}}'
pod.write_file('/views/base.html', content)
# Weak test to verify preprocessor runs.
pod.preprocess(['blogger'])
# Verify inject.
collection = pod.get_collection('/content/posts')
doc = collection.docs()[0]
preprocessor = pod.list_preprocessors()[0]
preprocessor.inject(doc)
if __name__ == '__main__':
unittest.main()
|
<commit_before>from . import google_drive
from grow.pods import pods
from grow.pods import storage
from grow.testing import testing
import cStringIO
import csv
import json
import unittest
import yaml
class BloggerTestCase(testing.TestCase):
def test_run(self):
pod = testing.create_pod()
fields = {
'preprocessors': [{
'name': 'blogger',
'kind': 'blogger',
'blog_id': '10861780', # Official Google blog.
'collection': '/content/posts/',
'markdown': True,
'authenticated': False,
'inject': True,
}],
}
pod.write_yaml('/podspec.yaml', fields)
fields = {
'$path': '/{date}/{slug}/',
'$view': '/views/base.html',
}
pod.write_yaml('/content/posts/_blueprint.yaml', fields)
content = '{{doc.html|safe}}'
pod.write_file('/views/base.html', content)
# Weak test to verify preprocessor runs.
pod.preprocess(['blogger'])
# Verify inject.
collection = pod.get_collection('/content/posts')
doc = collection.docs()[0]
preprocessor = pod.list_preprocessors()[0]
preprocessor.inject(doc)
if __name__ == '__main__':
unittest.main()
<commit_msg>Use different blog for test data.<commit_after>
|
from . import google_drive
from grow.pods import pods
from grow.pods import storage
from grow.testing import testing
import cStringIO
import csv
import json
import unittest
import yaml
class BloggerTestCase(testing.TestCase):
def test_run(self):
pod = testing.create_pod()
fields = {
'preprocessors': [{
'name': 'blogger',
'kind': 'blogger',
'blog_id': '4154157974596966834',
'collection': '/content/posts/',
'markdown': True,
'authenticated': False,
'inject': True,
}],
}
pod.write_yaml('/podspec.yaml', fields)
fields = {
'$path': '/{date}/{slug}/',
'$view': '/views/base.html',
}
pod.write_yaml('/content/posts/_blueprint.yaml', fields)
content = '{{doc.html|safe}}'
pod.write_file('/views/base.html', content)
# Weak test to verify preprocessor runs.
pod.preprocess(['blogger'])
# Verify inject.
collection = pod.get_collection('/content/posts')
doc = collection.docs()[0]
preprocessor = pod.list_preprocessors()[0]
preprocessor.inject(doc)
if __name__ == '__main__':
unittest.main()
|
from . import google_drive
from grow.pods import pods
from grow.pods import storage
from grow.testing import testing
import cStringIO
import csv
import json
import unittest
import yaml
class BloggerTestCase(testing.TestCase):
def test_run(self):
pod = testing.create_pod()
fields = {
'preprocessors': [{
'name': 'blogger',
'kind': 'blogger',
'blog_id': '10861780', # Official Google blog.
'collection': '/content/posts/',
'markdown': True,
'authenticated': False,
'inject': True,
}],
}
pod.write_yaml('/podspec.yaml', fields)
fields = {
'$path': '/{date}/{slug}/',
'$view': '/views/base.html',
}
pod.write_yaml('/content/posts/_blueprint.yaml', fields)
content = '{{doc.html|safe}}'
pod.write_file('/views/base.html', content)
# Weak test to verify preprocessor runs.
pod.preprocess(['blogger'])
# Verify inject.
collection = pod.get_collection('/content/posts')
doc = collection.docs()[0]
preprocessor = pod.list_preprocessors()[0]
preprocessor.inject(doc)
if __name__ == '__main__':
unittest.main()
Use different blog for test data.from . import google_drive
from grow.pods import pods
from grow.pods import storage
from grow.testing import testing
import cStringIO
import csv
import json
import unittest
import yaml
class BloggerTestCase(testing.TestCase):
def test_run(self):
pod = testing.create_pod()
fields = {
'preprocessors': [{
'name': 'blogger',
'kind': 'blogger',
'blog_id': '4154157974596966834',
'collection': '/content/posts/',
'markdown': True,
'authenticated': False,
'inject': True,
}],
}
pod.write_yaml('/podspec.yaml', fields)
fields = {
'$path': '/{date}/{slug}/',
'$view': '/views/base.html',
}
pod.write_yaml('/content/posts/_blueprint.yaml', fields)
content = '{{doc.html|safe}}'
pod.write_file('/views/base.html', content)
# Weak test to verify preprocessor runs.
pod.preprocess(['blogger'])
# Verify inject.
collection = pod.get_collection('/content/posts')
doc = collection.docs()[0]
preprocessor = pod.list_preprocessors()[0]
preprocessor.inject(doc)
if __name__ == '__main__':
unittest.main()
|
<commit_before>from . import google_drive
from grow.pods import pods
from grow.pods import storage
from grow.testing import testing
import cStringIO
import csv
import json
import unittest
import yaml
class BloggerTestCase(testing.TestCase):
def test_run(self):
pod = testing.create_pod()
fields = {
'preprocessors': [{
'name': 'blogger',
'kind': 'blogger',
'blog_id': '10861780', # Official Google blog.
'collection': '/content/posts/',
'markdown': True,
'authenticated': False,
'inject': True,
}],
}
pod.write_yaml('/podspec.yaml', fields)
fields = {
'$path': '/{date}/{slug}/',
'$view': '/views/base.html',
}
pod.write_yaml('/content/posts/_blueprint.yaml', fields)
content = '{{doc.html|safe}}'
pod.write_file('/views/base.html', content)
# Weak test to verify preprocessor runs.
pod.preprocess(['blogger'])
# Verify inject.
collection = pod.get_collection('/content/posts')
doc = collection.docs()[0]
preprocessor = pod.list_preprocessors()[0]
preprocessor.inject(doc)
if __name__ == '__main__':
unittest.main()
<commit_msg>Use different blog for test data.<commit_after>from . import google_drive
from grow.pods import pods
from grow.pods import storage
from grow.testing import testing
import cStringIO
import csv
import json
import unittest
import yaml
class BloggerTestCase(testing.TestCase):
def test_run(self):
pod = testing.create_pod()
fields = {
'preprocessors': [{
'name': 'blogger',
'kind': 'blogger',
'blog_id': '4154157974596966834',
'collection': '/content/posts/',
'markdown': True,
'authenticated': False,
'inject': True,
}],
}
pod.write_yaml('/podspec.yaml', fields)
fields = {
'$path': '/{date}/{slug}/',
'$view': '/views/base.html',
}
pod.write_yaml('/content/posts/_blueprint.yaml', fields)
content = '{{doc.html|safe}}'
pod.write_file('/views/base.html', content)
# Weak test to verify preprocessor runs.
pod.preprocess(['blogger'])
# Verify inject.
collection = pod.get_collection('/content/posts')
doc = collection.docs()[0]
preprocessor = pod.list_preprocessors()[0]
preprocessor.inject(doc)
if __name__ == '__main__':
unittest.main()
|
783766b4f4d65dfb4b41e6386edd8ea2df32d727
|
tests/test_creation.py
|
tests/test_creation.py
|
import generic as g
class CreationTest(g.unittest.TestCase):
def test_soup(self):
count = 100
mesh = g.trimesh.creation.random_soup(face_count=count)
self.assertTrue(len(mesh.faces) == count)
self.assertTrue(len(mesh.face_adjacency) == 0)
self.assertTrue(len(mesh.split(only_watertight=True)) == 0)
self.assertTrue(len(mesh.split(only_watertight=False)) == count)
def test_uv(self):
sphere = g.trimesh.creation.uv_sphere()
self.assertTrue(sphere.is_watertight)
self.assertTrue(sphere.is_winding_consistent)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
|
import generic as g
class CreationTest(g.unittest.TestCase):
def test_soup(self):
count = 100
mesh = g.trimesh.creation.random_soup(face_count=count)
self.assertTrue(len(mesh.faces) == count)
self.assertTrue(len(mesh.face_adjacency) == 0)
self.assertTrue(len(mesh.split(only_watertight=True)) == 0)
self.assertTrue(len(mesh.split(only_watertight=False)) == count)
def test_uv(self):
sphere = g.trimesh.creation.uv_sphere()
self.assertTrue(sphere.is_watertight)
self.assertTrue(sphere.is_winding_consistent)
def test_path_extrude(self):
# Create base polygon
vec = g.np.array([0,1])*0.2
n_comps = 100
angle = g.np.pi * 2.0 / n_comps
rotmat = g.np.array([
[g.np.cos(angle), -g.np.sin(angle)],
[g.np.sin(angle), g.np.cos(angle)]
])
perim = []
for i in range(n_comps):
perim.append(vec)
vec = g.np.dot(rotmat, vec)
poly = g.Polygon(perim)
# Create 3D path
angles = g.np.linspace(0, 8*g.np.pi, 1000)
x = angles / 10.0
y = g.np.cos(angles)
z = g.np.sin(angles)
path = g.np.c_[x,y,z]
# Extrude
mesh = g.trimesh.creation.extrude_polygon_along_path(poly, path)
self.assertTrue(mesh.is_volume)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
|
Add integration test for extrusion
|
Add integration test for extrusion
|
Python
|
mit
|
mikedh/trimesh,mikedh/trimesh,mikedh/trimesh,dajusc/trimesh,mikedh/trimesh,dajusc/trimesh
|
import generic as g
class CreationTest(g.unittest.TestCase):
def test_soup(self):
count = 100
mesh = g.trimesh.creation.random_soup(face_count=count)
self.assertTrue(len(mesh.faces) == count)
self.assertTrue(len(mesh.face_adjacency) == 0)
self.assertTrue(len(mesh.split(only_watertight=True)) == 0)
self.assertTrue(len(mesh.split(only_watertight=False)) == count)
def test_uv(self):
sphere = g.trimesh.creation.uv_sphere()
self.assertTrue(sphere.is_watertight)
self.assertTrue(sphere.is_winding_consistent)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
Add integration test for extrusion
|
import generic as g
class CreationTest(g.unittest.TestCase):
def test_soup(self):
count = 100
mesh = g.trimesh.creation.random_soup(face_count=count)
self.assertTrue(len(mesh.faces) == count)
self.assertTrue(len(mesh.face_adjacency) == 0)
self.assertTrue(len(mesh.split(only_watertight=True)) == 0)
self.assertTrue(len(mesh.split(only_watertight=False)) == count)
def test_uv(self):
sphere = g.trimesh.creation.uv_sphere()
self.assertTrue(sphere.is_watertight)
self.assertTrue(sphere.is_winding_consistent)
def test_path_extrude(self):
# Create base polygon
vec = g.np.array([0,1])*0.2
n_comps = 100
angle = g.np.pi * 2.0 / n_comps
rotmat = g.np.array([
[g.np.cos(angle), -g.np.sin(angle)],
[g.np.sin(angle), g.np.cos(angle)]
])
perim = []
for i in range(n_comps):
perim.append(vec)
vec = g.np.dot(rotmat, vec)
poly = g.Polygon(perim)
# Create 3D path
angles = g.np.linspace(0, 8*g.np.pi, 1000)
x = angles / 10.0
y = g.np.cos(angles)
z = g.np.sin(angles)
path = g.np.c_[x,y,z]
# Extrude
mesh = g.trimesh.creation.extrude_polygon_along_path(poly, path)
self.assertTrue(mesh.is_volume)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
|
<commit_before>import generic as g
class CreationTest(g.unittest.TestCase):
def test_soup(self):
count = 100
mesh = g.trimesh.creation.random_soup(face_count=count)
self.assertTrue(len(mesh.faces) == count)
self.assertTrue(len(mesh.face_adjacency) == 0)
self.assertTrue(len(mesh.split(only_watertight=True)) == 0)
self.assertTrue(len(mesh.split(only_watertight=False)) == count)
def test_uv(self):
sphere = g.trimesh.creation.uv_sphere()
self.assertTrue(sphere.is_watertight)
self.assertTrue(sphere.is_winding_consistent)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
<commit_msg>Add integration test for extrusion<commit_after>
|
import generic as g
class CreationTest(g.unittest.TestCase):
def test_soup(self):
count = 100
mesh = g.trimesh.creation.random_soup(face_count=count)
self.assertTrue(len(mesh.faces) == count)
self.assertTrue(len(mesh.face_adjacency) == 0)
self.assertTrue(len(mesh.split(only_watertight=True)) == 0)
self.assertTrue(len(mesh.split(only_watertight=False)) == count)
def test_uv(self):
sphere = g.trimesh.creation.uv_sphere()
self.assertTrue(sphere.is_watertight)
self.assertTrue(sphere.is_winding_consistent)
def test_path_extrude(self):
# Create base polygon
vec = g.np.array([0,1])*0.2
n_comps = 100
angle = g.np.pi * 2.0 / n_comps
rotmat = g.np.array([
[g.np.cos(angle), -g.np.sin(angle)],
[g.np.sin(angle), g.np.cos(angle)]
])
perim = []
for i in range(n_comps):
perim.append(vec)
vec = g.np.dot(rotmat, vec)
poly = g.Polygon(perim)
# Create 3D path
angles = g.np.linspace(0, 8*g.np.pi, 1000)
x = angles / 10.0
y = g.np.cos(angles)
z = g.np.sin(angles)
path = g.np.c_[x,y,z]
# Extrude
mesh = g.trimesh.creation.extrude_polygon_along_path(poly, path)
self.assertTrue(mesh.is_volume)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
|
import generic as g
class CreationTest(g.unittest.TestCase):
def test_soup(self):
count = 100
mesh = g.trimesh.creation.random_soup(face_count=count)
self.assertTrue(len(mesh.faces) == count)
self.assertTrue(len(mesh.face_adjacency) == 0)
self.assertTrue(len(mesh.split(only_watertight=True)) == 0)
self.assertTrue(len(mesh.split(only_watertight=False)) == count)
def test_uv(self):
sphere = g.trimesh.creation.uv_sphere()
self.assertTrue(sphere.is_watertight)
self.assertTrue(sphere.is_winding_consistent)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
Add integration test for extrusionimport generic as g
class CreationTest(g.unittest.TestCase):
def test_soup(self):
count = 100
mesh = g.trimesh.creation.random_soup(face_count=count)
self.assertTrue(len(mesh.faces) == count)
self.assertTrue(len(mesh.face_adjacency) == 0)
self.assertTrue(len(mesh.split(only_watertight=True)) == 0)
self.assertTrue(len(mesh.split(only_watertight=False)) == count)
def test_uv(self):
sphere = g.trimesh.creation.uv_sphere()
self.assertTrue(sphere.is_watertight)
self.assertTrue(sphere.is_winding_consistent)
def test_path_extrude(self):
# Create base polygon
vec = g.np.array([0,1])*0.2
n_comps = 100
angle = g.np.pi * 2.0 / n_comps
rotmat = g.np.array([
[g.np.cos(angle), -g.np.sin(angle)],
[g.np.sin(angle), g.np.cos(angle)]
])
perim = []
for i in range(n_comps):
perim.append(vec)
vec = g.np.dot(rotmat, vec)
poly = g.Polygon(perim)
# Create 3D path
angles = g.np.linspace(0, 8*g.np.pi, 1000)
x = angles / 10.0
y = g.np.cos(angles)
z = g.np.sin(angles)
path = g.np.c_[x,y,z]
# Extrude
mesh = g.trimesh.creation.extrude_polygon_along_path(poly, path)
self.assertTrue(mesh.is_volume)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
|
<commit_before>import generic as g
class CreationTest(g.unittest.TestCase):
def test_soup(self):
count = 100
mesh = g.trimesh.creation.random_soup(face_count=count)
self.assertTrue(len(mesh.faces) == count)
self.assertTrue(len(mesh.face_adjacency) == 0)
self.assertTrue(len(mesh.split(only_watertight=True)) == 0)
self.assertTrue(len(mesh.split(only_watertight=False)) == count)
def test_uv(self):
sphere = g.trimesh.creation.uv_sphere()
self.assertTrue(sphere.is_watertight)
self.assertTrue(sphere.is_winding_consistent)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
<commit_msg>Add integration test for extrusion<commit_after>import generic as g
class CreationTest(g.unittest.TestCase):
def test_soup(self):
count = 100
mesh = g.trimesh.creation.random_soup(face_count=count)
self.assertTrue(len(mesh.faces) == count)
self.assertTrue(len(mesh.face_adjacency) == 0)
self.assertTrue(len(mesh.split(only_watertight=True)) == 0)
self.assertTrue(len(mesh.split(only_watertight=False)) == count)
def test_uv(self):
sphere = g.trimesh.creation.uv_sphere()
self.assertTrue(sphere.is_watertight)
self.assertTrue(sphere.is_winding_consistent)
def test_path_extrude(self):
# Create base polygon
vec = g.np.array([0,1])*0.2
n_comps = 100
angle = g.np.pi * 2.0 / n_comps
rotmat = g.np.array([
[g.np.cos(angle), -g.np.sin(angle)],
[g.np.sin(angle), g.np.cos(angle)]
])
perim = []
for i in range(n_comps):
perim.append(vec)
vec = g.np.dot(rotmat, vec)
poly = g.Polygon(perim)
# Create 3D path
angles = g.np.linspace(0, 8*g.np.pi, 1000)
x = angles / 10.0
y = g.np.cos(angles)
z = g.np.sin(angles)
path = g.np.c_[x,y,z]
# Extrude
mesh = g.trimesh.creation.extrude_polygon_along_path(poly, path)
self.assertTrue(mesh.is_volume)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
|
f7a201f61382593baa6e8ebadfedea68563f1fef
|
examples/repeat.py
|
examples/repeat.py
|
#!/usr/bin/env python
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
repeater = Repeat('repeat')
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
|
#!/usr/bin/env python
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
role = sys.argv[1] if len(sys.argv) > 1 else 'repeat'
repeater = Repeat(role)
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
|
Allow to specify role name on commandline
|
examples: Allow to specify role name on commandline
|
Python
|
mit
|
msgflo/msgflo-python
|
#!/usr/bin/env python
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
repeater = Repeat('repeat')
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
examples: Allow to specify role name on commandline
|
#!/usr/bin/env python
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
role = sys.argv[1] if len(sys.argv) > 1 else 'repeat'
repeater = Repeat(role)
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
|
<commit_before>#!/usr/bin/env python
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
repeater = Repeat('repeat')
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
<commit_msg>examples: Allow to specify role name on commandline<commit_after>
|
#!/usr/bin/env python
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
role = sys.argv[1] if len(sys.argv) > 1 else 'repeat'
repeater = Repeat(role)
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
|
#!/usr/bin/env python
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
repeater = Repeat('repeat')
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
examples: Allow to specify role name on commandline#!/usr/bin/env python
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
role = sys.argv[1] if len(sys.argv) > 1 else 'repeat'
repeater = Repeat(role)
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
|
<commit_before>#!/usr/bin/env python
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
repeater = Repeat('repeat')
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
<commit_msg>examples: Allow to specify role name on commandline<commit_after>#!/usr/bin/env python
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
role = sys.argv[1] if len(sys.argv) > 1 else 'repeat'
repeater = Repeat(role)
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
|
97c26c367c2c4597842356e677064a012ea19cb6
|
events/forms.py
|
events/forms.py
|
from django import forms
from events.models import Event, City
class EventForm(forms.ModelForm):
city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville")
class Meta:
model = Event
exclude = ('submission_time', 'updated_time', 'decision_time',
'moderator', 'moderated', 'latitude', 'longitude')
|
# -*- encoding:utf-8 -*-
from django import forms
from events.models import Event, City
from django.forms.util import ErrorList
from datetime import datetime
class EventForm(forms.ModelForm):
city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville")
class Meta:
model = Event
exclude = ('submission_time', 'updated_time', 'decision_time',
'moderator', 'moderated', 'latitude', 'longitude')
def clean(self):
cleaned_data = self.cleaned_data
start_time = cleaned_data.get("start_time")
end_time = cleaned_data.get("end_time")
if start_time >= end_time:
msg = u"L'évènement ne peut se terminer avant son début"
self._errors["start_time"] = ErrorList([msg])
self._errors["end_time"] = ErrorList([msg])
del cleaned_data["start_time"]
del cleaned_data["end_time"]
elif start_time < datetime.today():
msg = u"Seul les évènements à venir sont acceptés"
self._errors["start_time"] = ErrorList([msg])
del cleaned_data["start_time"]
return cleaned_data
|
Validate entered dates in Event form
|
Validate entered dates in Event form
|
Python
|
agpl-3.0
|
vcorreze/agendaEteAccoord,mlhamel/agendadulibre,vcorreze/agendaEteAccoord,mlhamel/agendadulibre,mlhamel/agendadulibre,vcorreze/agendaEteAccoord
|
from django import forms
from events.models import Event, City
class EventForm(forms.ModelForm):
city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville")
class Meta:
model = Event
exclude = ('submission_time', 'updated_time', 'decision_time',
'moderator', 'moderated', 'latitude', 'longitude')
Validate entered dates in Event form
|
# -*- encoding:utf-8 -*-
from django import forms
from events.models import Event, City
from django.forms.util import ErrorList
from datetime import datetime
class EventForm(forms.ModelForm):
city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville")
class Meta:
model = Event
exclude = ('submission_time', 'updated_time', 'decision_time',
'moderator', 'moderated', 'latitude', 'longitude')
def clean(self):
cleaned_data = self.cleaned_data
start_time = cleaned_data.get("start_time")
end_time = cleaned_data.get("end_time")
if start_time >= end_time:
msg = u"L'évènement ne peut se terminer avant son début"
self._errors["start_time"] = ErrorList([msg])
self._errors["end_time"] = ErrorList([msg])
del cleaned_data["start_time"]
del cleaned_data["end_time"]
elif start_time < datetime.today():
msg = u"Seul les évènements à venir sont acceptés"
self._errors["start_time"] = ErrorList([msg])
del cleaned_data["start_time"]
return cleaned_data
|
<commit_before>from django import forms
from events.models import Event, City
class EventForm(forms.ModelForm):
city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville")
class Meta:
model = Event
exclude = ('submission_time', 'updated_time', 'decision_time',
'moderator', 'moderated', 'latitude', 'longitude')
<commit_msg>Validate entered dates in Event form<commit_after>
|
# -*- encoding:utf-8 -*-
from django import forms
from events.models import Event, City
from django.forms.util import ErrorList
from datetime import datetime
class EventForm(forms.ModelForm):
city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville")
class Meta:
model = Event
exclude = ('submission_time', 'updated_time', 'decision_time',
'moderator', 'moderated', 'latitude', 'longitude')
def clean(self):
cleaned_data = self.cleaned_data
start_time = cleaned_data.get("start_time")
end_time = cleaned_data.get("end_time")
if start_time >= end_time:
msg = u"L'évènement ne peut se terminer avant son début"
self._errors["start_time"] = ErrorList([msg])
self._errors["end_time"] = ErrorList([msg])
del cleaned_data["start_time"]
del cleaned_data["end_time"]
elif start_time < datetime.today():
msg = u"Seul les évènements à venir sont acceptés"
self._errors["start_time"] = ErrorList([msg])
del cleaned_data["start_time"]
return cleaned_data
|
from django import forms
from events.models import Event, City
class EventForm(forms.ModelForm):
city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville")
class Meta:
model = Event
exclude = ('submission_time', 'updated_time', 'decision_time',
'moderator', 'moderated', 'latitude', 'longitude')
Validate entered dates in Event form# -*- encoding:utf-8 -*-
from django import forms
from events.models import Event, City
from django.forms.util import ErrorList
from datetime import datetime
class EventForm(forms.ModelForm):
city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville")
class Meta:
model = Event
exclude = ('submission_time', 'updated_time', 'decision_time',
'moderator', 'moderated', 'latitude', 'longitude')
def clean(self):
cleaned_data = self.cleaned_data
start_time = cleaned_data.get("start_time")
end_time = cleaned_data.get("end_time")
if start_time >= end_time:
msg = u"L'évènement ne peut se terminer avant son début"
self._errors["start_time"] = ErrorList([msg])
self._errors["end_time"] = ErrorList([msg])
del cleaned_data["start_time"]
del cleaned_data["end_time"]
elif start_time < datetime.today():
msg = u"Seul les évènements à venir sont acceptés"
self._errors["start_time"] = ErrorList([msg])
del cleaned_data["start_time"]
return cleaned_data
|
<commit_before>from django import forms
from events.models import Event, City
class EventForm(forms.ModelForm):
city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville")
class Meta:
model = Event
exclude = ('submission_time', 'updated_time', 'decision_time',
'moderator', 'moderated', 'latitude', 'longitude')
<commit_msg>Validate entered dates in Event form<commit_after># -*- encoding:utf-8 -*-
from django import forms
from events.models import Event, City
from django.forms.util import ErrorList
from datetime import datetime
class EventForm(forms.ModelForm):
city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville")
class Meta:
model = Event
exclude = ('submission_time', 'updated_time', 'decision_time',
'moderator', 'moderated', 'latitude', 'longitude')
def clean(self):
cleaned_data = self.cleaned_data
start_time = cleaned_data.get("start_time")
end_time = cleaned_data.get("end_time")
if start_time >= end_time:
msg = u"L'évènement ne peut se terminer avant son début"
self._errors["start_time"] = ErrorList([msg])
self._errors["end_time"] = ErrorList([msg])
del cleaned_data["start_time"]
del cleaned_data["end_time"]
elif start_time < datetime.today():
msg = u"Seul les évènements à venir sont acceptés"
self._errors["start_time"] = ErrorList([msg])
del cleaned_data["start_time"]
return cleaned_data
|
c39a64c5dc83d55632ffc19a96196aef07474114
|
pylab/accounts/tests/test_settings.py
|
pylab/accounts/tests/test_settings.py
|
import django_webtest
import django.contrib.auth.models as auth_models
import pylab.accounts.models as accounts_models
class SettingsTests(django_webtest.WebTest):
def setUp(self):
super().setUp()
auth_models.User.objects.create_user('u1')
def test_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['first_name'] = 'My'
resp.form['last_name'] = 'Name'
resp.form['email'] = ''
resp.form['language'] = 'en'
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(auth_models.User.objects.values_list('first_name', 'last_name', 'email')), [
('My', 'Name', ''),
])
self.assertEqual(list(accounts_models.UserProfile.objects.values_list('language')), [('en',),])
|
import django_webtest
import django.contrib.auth.models as auth_models
import pylab.accounts.models as accounts_models
class SettingsTests(django_webtest.WebTest):
def setUp(self):
super().setUp()
auth_models.User.objects.create_user('u1')
def test_user_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['first_name'] = 'My'
resp.form['last_name'] = 'Name'
resp.form['email'] = ''
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(auth_models.User.objects.values_list('first_name', 'last_name', 'email')), [
('My', 'Name', ''),
])
def test_userprofile_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['language'] = 'en'
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(accounts_models.UserProfile.objects.values_list('language')), [('en',),])
|
Split user settings test into two tests
|
Split user settings test into two tests
|
Python
|
agpl-3.0
|
python-dirbtuves/website,python-dirbtuves/website,python-dirbtuves/website
|
import django_webtest
import django.contrib.auth.models as auth_models
import pylab.accounts.models as accounts_models
class SettingsTests(django_webtest.WebTest):
def setUp(self):
super().setUp()
auth_models.User.objects.create_user('u1')
def test_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['first_name'] = 'My'
resp.form['last_name'] = 'Name'
resp.form['email'] = ''
resp.form['language'] = 'en'
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(auth_models.User.objects.values_list('first_name', 'last_name', 'email')), [
('My', 'Name', ''),
])
self.assertEqual(list(accounts_models.UserProfile.objects.values_list('language')), [('en',),])
Split user settings test into two tests
|
import django_webtest
import django.contrib.auth.models as auth_models
import pylab.accounts.models as accounts_models
class SettingsTests(django_webtest.WebTest):
def setUp(self):
super().setUp()
auth_models.User.objects.create_user('u1')
def test_user_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['first_name'] = 'My'
resp.form['last_name'] = 'Name'
resp.form['email'] = ''
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(auth_models.User.objects.values_list('first_name', 'last_name', 'email')), [
('My', 'Name', ''),
])
def test_userprofile_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['language'] = 'en'
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(accounts_models.UserProfile.objects.values_list('language')), [('en',),])
|
<commit_before>import django_webtest
import django.contrib.auth.models as auth_models
import pylab.accounts.models as accounts_models
class SettingsTests(django_webtest.WebTest):
def setUp(self):
super().setUp()
auth_models.User.objects.create_user('u1')
def test_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['first_name'] = 'My'
resp.form['last_name'] = 'Name'
resp.form['email'] = ''
resp.form['language'] = 'en'
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(auth_models.User.objects.values_list('first_name', 'last_name', 'email')), [
('My', 'Name', ''),
])
self.assertEqual(list(accounts_models.UserProfile.objects.values_list('language')), [('en',),])
<commit_msg>Split user settings test into two tests<commit_after>
|
import django_webtest
import django.contrib.auth.models as auth_models
import pylab.accounts.models as accounts_models
class SettingsTests(django_webtest.WebTest):
def setUp(self):
super().setUp()
auth_models.User.objects.create_user('u1')
def test_user_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['first_name'] = 'My'
resp.form['last_name'] = 'Name'
resp.form['email'] = ''
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(auth_models.User.objects.values_list('first_name', 'last_name', 'email')), [
('My', 'Name', ''),
])
def test_userprofile_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['language'] = 'en'
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(accounts_models.UserProfile.objects.values_list('language')), [('en',),])
|
import django_webtest
import django.contrib.auth.models as auth_models
import pylab.accounts.models as accounts_models
class SettingsTests(django_webtest.WebTest):
def setUp(self):
super().setUp()
auth_models.User.objects.create_user('u1')
def test_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['first_name'] = 'My'
resp.form['last_name'] = 'Name'
resp.form['email'] = ''
resp.form['language'] = 'en'
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(auth_models.User.objects.values_list('first_name', 'last_name', 'email')), [
('My', 'Name', ''),
])
self.assertEqual(list(accounts_models.UserProfile.objects.values_list('language')), [('en',),])
Split user settings test into two testsimport django_webtest
import django.contrib.auth.models as auth_models
import pylab.accounts.models as accounts_models
class SettingsTests(django_webtest.WebTest):
def setUp(self):
super().setUp()
auth_models.User.objects.create_user('u1')
def test_user_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['first_name'] = 'My'
resp.form['last_name'] = 'Name'
resp.form['email'] = ''
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(auth_models.User.objects.values_list('first_name', 'last_name', 'email')), [
('My', 'Name', ''),
])
def test_userprofile_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['language'] = 'en'
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(accounts_models.UserProfile.objects.values_list('language')), [('en',),])
|
<commit_before>import django_webtest
import django.contrib.auth.models as auth_models
import pylab.accounts.models as accounts_models
class SettingsTests(django_webtest.WebTest):
def setUp(self):
super().setUp()
auth_models.User.objects.create_user('u1')
def test_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['first_name'] = 'My'
resp.form['last_name'] = 'Name'
resp.form['email'] = ''
resp.form['language'] = 'en'
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(auth_models.User.objects.values_list('first_name', 'last_name', 'email')), [
('My', 'Name', ''),
])
self.assertEqual(list(accounts_models.UserProfile.objects.values_list('language')), [('en',),])
<commit_msg>Split user settings test into two tests<commit_after>import django_webtest
import django.contrib.auth.models as auth_models
import pylab.accounts.models as accounts_models
class SettingsTests(django_webtest.WebTest):
def setUp(self):
super().setUp()
auth_models.User.objects.create_user('u1')
def test_user_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['first_name'] = 'My'
resp.form['last_name'] = 'Name'
resp.form['email'] = ''
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(auth_models.User.objects.values_list('first_name', 'last_name', 'email')), [
('My', 'Name', ''),
])
def test_userprofile_settings(self):
resp = self.app.get('/accounts/settings/', user='u1')
resp.form['language'] = 'en'
resp = resp.form.submit()
self.assertEqual(resp.status_int, 302)
self.assertEqual(list(accounts_models.UserProfile.objects.values_list('language')), [('en',),])
|
090bcbf8bbc32a2a8da5f0ab2be097e5a6716c3d
|
src/adhocracy_frontend/adhocracy_frontend/tests/integration/test_jasmine.py
|
src/adhocracy_frontend/adhocracy_frontend/tests/integration/test_jasmine.py
|
"""This is structurally equivalent to ../unit/test_jasmine.py.
The difference is that it runs igtest.html instead of test.html.
also, it is located next to acceptance tests, because it has to
be allowed to import components other than adhocracy, like
adhocracy_core.
"""
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
"""This is structurally equivalent to ../unit/test_jasmine.py.
The difference is that it runs igtest.html instead of test.html.
also, it is located next to acceptance tests, because it has to
be allowed to import components other than adhocracy, like
adhocracy_core.
"""
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
@mark.xfail
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
Mark integration tests as xfail
|
Mark integration tests as xfail
|
Python
|
agpl-3.0
|
fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator
|
"""This is structurally equivalent to ../unit/test_jasmine.py.
The difference is that it runs igtest.html instead of test.html.
also, it is located next to acceptance tests, because it has to
be allowed to import components other than adhocracy, like
adhocracy_core.
"""
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
Mark integration tests as xfail
|
"""This is structurally equivalent to ../unit/test_jasmine.py.
The difference is that it runs igtest.html instead of test.html.
also, it is located next to acceptance tests, because it has to
be allowed to import components other than adhocracy, like
adhocracy_core.
"""
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
@mark.xfail
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
<commit_before>"""This is structurally equivalent to ../unit/test_jasmine.py.
The difference is that it runs igtest.html instead of test.html.
also, it is located next to acceptance tests, because it has to
be allowed to import components other than adhocracy, like
adhocracy_core.
"""
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
<commit_msg>Mark integration tests as xfail<commit_after>
|
"""This is structurally equivalent to ../unit/test_jasmine.py.
The difference is that it runs igtest.html instead of test.html.
also, it is located next to acceptance tests, because it has to
be allowed to import components other than adhocracy, like
adhocracy_core.
"""
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
@mark.xfail
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
"""This is structurally equivalent to ../unit/test_jasmine.py.
The difference is that it runs igtest.html instead of test.html.
also, it is located next to acceptance tests, because it has to
be allowed to import components other than adhocracy, like
adhocracy_core.
"""
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
Mark integration tests as xfail"""This is structurally equivalent to ../unit/test_jasmine.py.
The difference is that it runs igtest.html instead of test.html.
also, it is located next to acceptance tests, because it has to
be allowed to import components other than adhocracy, like
adhocracy_core.
"""
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
@mark.xfail
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
<commit_before>"""This is structurally equivalent to ../unit/test_jasmine.py.
The difference is that it runs igtest.html instead of test.html.
also, it is located next to acceptance tests, because it has to
be allowed to import components other than adhocracy, like
adhocracy_core.
"""
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
<commit_msg>Mark integration tests as xfail<commit_after>"""This is structurally equivalent to ../unit/test_jasmine.py.
The difference is that it runs igtest.html instead of test.html.
also, it is located next to acceptance tests, because it has to
be allowed to import components other than adhocracy, like
adhocracy_core.
"""
from pytest import fixture
from pytest import mark
from adhocracy_frontend.testing import Browser
from adhocracy_frontend.testing import browser_test_helper
from adhocracy_frontend.tests.unit.console import Parser
from adhocracy_frontend.tests.unit.console import Formatter
pytestmark = mark.jasmine
class TestJasmine:
@mark.xfail
def test_all(self, browser_igtest):
data = browser_igtest.evaluate_script('jsApiReporter.specs()')
formatter = Formatter([])
parser = Parser()
results = parser.parse(data)
formatter.results = results
print(formatter.format())
num_failures = len(list(results.failed()))
assert num_failures == 0
|
d9a266ccd3c4873478f0524afa6a3068858bbea6
|
django_seo_js/middleware/useragent.py
|
django_seo_js/middleware/useragent.py
|
import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
|
import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not hasattr(request, 'ENABLED') or not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
|
Fix issue where ENABLED is not defined
|
Fix issue where ENABLED is not defined
|
Python
|
mit
|
skoczen/django-seo-js
|
import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
Fix issue where ENABLED is not defined
|
import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not hasattr(request, 'ENABLED') or not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
|
<commit_before>import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
<commit_msg>Fix issue where ENABLED is not defined<commit_after>
|
import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not hasattr(request, 'ENABLED') or not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
|
import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
Fix issue where ENABLED is not definedimport re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not hasattr(request, 'ENABLED') or not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
|
<commit_before>import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
<commit_msg>Fix issue where ENABLED is not defined<commit_after>import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not hasattr(request, 'ENABLED') or not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
|
d76398b40844e969439d495d4ea3604e5b2011b4
|
mock-recipe-server/test_mock_server.py
|
mock-recipe-server/test_mock_server.py
|
"""
Tests for the mock-server itself.
"""
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
recipe_data = recipe_path.read()
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
signed_recipe_data = recipe_path.add('signed').read()
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
"""
Tests for the mock-server itself.
"""
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
try:
recipe_data = recipe_path.read()
signed_recipe_data = recipe_path.add('signed').read()
except FileNotFoundError:
# Some error testcases are purposefully missing files,
# so we just skip checking those.
continue
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
Handle error testcases in mock-server tests.
|
Handle error testcases in mock-server tests.
|
Python
|
mpl-2.0
|
Osmose/normandy,Osmose/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy,Osmose/normandy,Osmose/normandy,mozilla/normandy
|
"""
Tests for the mock-server itself.
"""
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
recipe_data = recipe_path.read()
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
signed_recipe_data = recipe_path.add('signed').read()
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
Handle error testcases in mock-server tests.
|
"""
Tests for the mock-server itself.
"""
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
try:
recipe_data = recipe_path.read()
signed_recipe_data = recipe_path.add('signed').read()
except FileNotFoundError:
# Some error testcases are purposefully missing files,
# so we just skip checking those.
continue
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
<commit_before>"""
Tests for the mock-server itself.
"""
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
recipe_data = recipe_path.read()
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
signed_recipe_data = recipe_path.add('signed').read()
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
<commit_msg>Handle error testcases in mock-server tests.<commit_after>
|
"""
Tests for the mock-server itself.
"""
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
try:
recipe_data = recipe_path.read()
signed_recipe_data = recipe_path.add('signed').read()
except FileNotFoundError:
# Some error testcases are purposefully missing files,
# so we just skip checking those.
continue
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
"""
Tests for the mock-server itself.
"""
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
recipe_data = recipe_path.read()
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
signed_recipe_data = recipe_path.add('signed').read()
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
Handle error testcases in mock-server tests."""
Tests for the mock-server itself.
"""
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
try:
recipe_data = recipe_path.read()
signed_recipe_data = recipe_path.add('signed').read()
except FileNotFoundError:
# Some error testcases are purposefully missing files,
# so we just skip checking those.
continue
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
<commit_before>"""
Tests for the mock-server itself.
"""
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
recipe_data = recipe_path.read()
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
signed_recipe_data = recipe_path.add('signed').read()
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
<commit_msg>Handle error testcases in mock-server tests.<commit_after>"""
Tests for the mock-server itself.
"""
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
try:
recipe_data = recipe_path.read()
signed_recipe_data = recipe_path.add('signed').read()
except FileNotFoundError:
# Some error testcases are purposefully missing files,
# so we just skip checking those.
continue
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
1293fccb88c129772ca9e8d11e6017740dcd609f
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.2'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.3.dev0'
|
Update dsub version to 0.3.3.dev0
|
Update dsub version to 0.3.3.dev0
PiperOrigin-RevId: 253060658
|
Python
|
apache-2.0
|
DataBiosphere/dsub,DataBiosphere/dsub
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.2'
Update dsub version to 0.3.3.dev0
PiperOrigin-RevId: 253060658
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.3.dev0'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.2'
<commit_msg>Update dsub version to 0.3.3.dev0
PiperOrigin-RevId: 253060658<commit_after>
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.3.dev0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.2'
Update dsub version to 0.3.3.dev0
PiperOrigin-RevId: 253060658# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.3.dev0'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.2'
<commit_msg>Update dsub version to 0.3.3.dev0
PiperOrigin-RevId: 253060658<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.3.dev0'
|
9ea4164f739b06752719ad4e68f5af85b18f9f1c
|
tests/scripts/constants.py
|
tests/scripts/constants.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# For better print formatting
from __future__ import print_function
# Imports
import os
############################################
# CONSTANTS
############################################
DEFAULT_SKIP = True
DEFAULT_NUM_RETRIES = 3
DEFAULT_FAIL_FAST = False
DEFAULT_FAMILIES = ["autoparallel",
"c",
"cloud",
"java",
"performance",
"pscos",
"python",
"tools",
"fault_tolerance"]
DEFAULT_TESTS = []
DEFAULT_CFG_FILE = "NIO.cfg"
DEFAULT_CFG_EXTENSION = ".cfg"
DEFAULT_COMPSS_HOME = "/opt/COMPSs/"
DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor"
DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"]
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources")
CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations")
RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss"
CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# For better print formatting
from __future__ import print_function
# Imports
import os
############################################
# CONSTANTS
############################################
DEFAULT_SKIP = True
DEFAULT_NUM_RETRIES = 3
DEFAULT_FAIL_FAST = False
DEFAULT_FAMILIES = [
"agents",
"autoparallel",
"c",
"cloud",
"java",
"performance",
"pscos",
"python",
"tools",
"fault_tolerance"]
DEFAULT_TESTS = []
DEFAULT_CFG_FILE = "NIO.cfg"
DEFAULT_CFG_EXTENSION = ".cfg"
DEFAULT_COMPSS_HOME = "/opt/COMPSs/"
DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor"
DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"]
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources")
CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations")
RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss"
CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
|
Include agents as a default test family
|
Include agents as a default test family
|
Python
|
apache-2.0
|
mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs,mF2C/COMPSs
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# For better print formatting
from __future__ import print_function
# Imports
import os
############################################
# CONSTANTS
############################################
DEFAULT_SKIP = True
DEFAULT_NUM_RETRIES = 3
DEFAULT_FAIL_FAST = False
DEFAULT_FAMILIES = ["autoparallel",
"c",
"cloud",
"java",
"performance",
"pscos",
"python",
"tools",
"fault_tolerance"]
DEFAULT_TESTS = []
DEFAULT_CFG_FILE = "NIO.cfg"
DEFAULT_CFG_EXTENSION = ".cfg"
DEFAULT_COMPSS_HOME = "/opt/COMPSs/"
DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor"
DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"]
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources")
CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations")
RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss"
CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
Include agents as a default test family
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# For better print formatting
from __future__ import print_function
# Imports
import os
############################################
# CONSTANTS
############################################
DEFAULT_SKIP = True
DEFAULT_NUM_RETRIES = 3
DEFAULT_FAIL_FAST = False
DEFAULT_FAMILIES = [
"agents",
"autoparallel",
"c",
"cloud",
"java",
"performance",
"pscos",
"python",
"tools",
"fault_tolerance"]
DEFAULT_TESTS = []
DEFAULT_CFG_FILE = "NIO.cfg"
DEFAULT_CFG_EXTENSION = ".cfg"
DEFAULT_COMPSS_HOME = "/opt/COMPSs/"
DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor"
DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"]
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources")
CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations")
RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss"
CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
# For better print formatting
from __future__ import print_function
# Imports
import os
############################################
# CONSTANTS
############################################
DEFAULT_SKIP = True
DEFAULT_NUM_RETRIES = 3
DEFAULT_FAIL_FAST = False
DEFAULT_FAMILIES = ["autoparallel",
"c",
"cloud",
"java",
"performance",
"pscos",
"python",
"tools",
"fault_tolerance"]
DEFAULT_TESTS = []
DEFAULT_CFG_FILE = "NIO.cfg"
DEFAULT_CFG_EXTENSION = ".cfg"
DEFAULT_COMPSS_HOME = "/opt/COMPSs/"
DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor"
DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"]
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources")
CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations")
RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss"
CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
<commit_msg>Include agents as a default test family<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# For better print formatting
from __future__ import print_function
# Imports
import os
############################################
# CONSTANTS
############################################
DEFAULT_SKIP = True
DEFAULT_NUM_RETRIES = 3
DEFAULT_FAIL_FAST = False
DEFAULT_FAMILIES = [
"agents",
"autoparallel",
"c",
"cloud",
"java",
"performance",
"pscos",
"python",
"tools",
"fault_tolerance"]
DEFAULT_TESTS = []
DEFAULT_CFG_FILE = "NIO.cfg"
DEFAULT_CFG_EXTENSION = ".cfg"
DEFAULT_COMPSS_HOME = "/opt/COMPSs/"
DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor"
DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"]
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources")
CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations")
RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss"
CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# For better print formatting
from __future__ import print_function
# Imports
import os
############################################
# CONSTANTS
############################################
DEFAULT_SKIP = True
DEFAULT_NUM_RETRIES = 3
DEFAULT_FAIL_FAST = False
DEFAULT_FAMILIES = ["autoparallel",
"c",
"cloud",
"java",
"performance",
"pscos",
"python",
"tools",
"fault_tolerance"]
DEFAULT_TESTS = []
DEFAULT_CFG_FILE = "NIO.cfg"
DEFAULT_CFG_EXTENSION = ".cfg"
DEFAULT_COMPSS_HOME = "/opt/COMPSs/"
DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor"
DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"]
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources")
CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations")
RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss"
CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
Include agents as a default test family#!/usr/bin/python
# -*- coding: utf-8 -*-
# For better print formatting
from __future__ import print_function
# Imports
import os
############################################
# CONSTANTS
############################################
DEFAULT_SKIP = True
DEFAULT_NUM_RETRIES = 3
DEFAULT_FAIL_FAST = False
DEFAULT_FAMILIES = [
"agents",
"autoparallel",
"c",
"cloud",
"java",
"performance",
"pscos",
"python",
"tools",
"fault_tolerance"]
DEFAULT_TESTS = []
DEFAULT_CFG_FILE = "NIO.cfg"
DEFAULT_CFG_EXTENSION = ".cfg"
DEFAULT_COMPSS_HOME = "/opt/COMPSs/"
DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor"
DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"]
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources")
CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations")
RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss"
CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
# For better print formatting
from __future__ import print_function
# Imports
import os
############################################
# CONSTANTS
############################################
DEFAULT_SKIP = True
DEFAULT_NUM_RETRIES = 3
DEFAULT_FAIL_FAST = False
DEFAULT_FAMILIES = ["autoparallel",
"c",
"cloud",
"java",
"performance",
"pscos",
"python",
"tools",
"fault_tolerance"]
DEFAULT_TESTS = []
DEFAULT_CFG_FILE = "NIO.cfg"
DEFAULT_CFG_EXTENSION = ".cfg"
DEFAULT_COMPSS_HOME = "/opt/COMPSs/"
DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor"
DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"]
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources")
CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations")
RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss"
CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
<commit_msg>Include agents as a default test family<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
# For better print formatting
from __future__ import print_function
# Imports
import os
############################################
# CONSTANTS
############################################
DEFAULT_SKIP = True
DEFAULT_NUM_RETRIES = 3
DEFAULT_FAIL_FAST = False
DEFAULT_FAMILIES = [
"agents",
"autoparallel",
"c",
"cloud",
"java",
"performance",
"pscos",
"python",
"tools",
"fault_tolerance"]
DEFAULT_TESTS = []
DEFAULT_CFG_FILE = "NIO.cfg"
DEFAULT_CFG_EXTENSION = ".cfg"
DEFAULT_COMPSS_HOME = "/opt/COMPSs/"
DEFAULT_COMM = "es.bsc.compss.nio.master.NIOAdaptor"
DEFAULT_EXECUTION_ENVS = ["python2", "python3", "python2_mpi", "python3_mpi"]
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
TESTS_DIR = os.path.join(SCRIPT_DIR, "../sources")
CONFIGURATIONS_DIR = os.path.join(SCRIPT_DIR, "../configurations")
RUNCOMPSS_REL_PATH = "Runtime/scripts/user/runcompss"
CLEAN_PROCS_REL_PATH = "Runtime/scripts/utils/compss_clean_procs"
|
cc96c599cb7e83034f13f8277399dea59a6226ec
|
mooc_aggregator_restful_api/udacity.py
|
mooc_aggregator_restful_api/udacity.py
|
'''
This module retrieves the course catalog and overviews of the Udacity API
Link to Documentation:
https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf
'''
import json
import requests
class UdacityAPI(object):
'''
This class defines attributes and methods for Udaciy API
'''
UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses'
def __init__(self):
self.response = requests.get(UDACITY_API_ENDPOINT)
self.courses = self.response.json()['courses']
self.tracks = self.response.json()['tracks']
def status_code(self):
return self.response.status_code
def get_courses(self):
return self.courses
def get_tracks(self):
return self.tracks
if __name__ == '__main__':
udacity_object = UdacityAPI()
print len(udacity_object.get_courses())
print udacity_object.get_courses()[0].keys()
|
'''
This module retrieves the course catalog and overviews of the Udacity API
Link to Documentation:
https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf
'''
import json
import requests
class UdacityAPI(object):
'''
This class defines attributes and methods for Udaciy API
'''
UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses'
def __init__(self):
self.response = requests.get(UDACITY_API_ENDPOINT)
self.courses = self.response.json()['courses']
self.tracks = self.response.json()['tracks']
def status_code(self):
'''
Return status code of response object
'''
return self.response.status_code
def get_courses(self):
'''
Return list of course objects for all courses offered by Udacity
'''
return self.courses
def get_tracks(self):
'''
Return list of tracks offered by Udacity
'''
return self.tracks
if __name__ == '__main__':
udacity_object = UdacityAPI()
print len(udacity_object.get_courses())
print udacity_object.get_courses()[0].keys()
|
Add docstring to instance methods
|
Add docstring to instance methods
|
Python
|
mit
|
ueg1990/mooc_aggregator_restful_api
|
'''
This module retrieves the course catalog and overviews of the Udacity API
Link to Documentation:
https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf
'''
import json
import requests
class UdacityAPI(object):
'''
This class defines attributes and methods for Udaciy API
'''
UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses'
def __init__(self):
self.response = requests.get(UDACITY_API_ENDPOINT)
self.courses = self.response.json()['courses']
self.tracks = self.response.json()['tracks']
def status_code(self):
return self.response.status_code
def get_courses(self):
return self.courses
def get_tracks(self):
return self.tracks
if __name__ == '__main__':
udacity_object = UdacityAPI()
print len(udacity_object.get_courses())
print udacity_object.get_courses()[0].keys()
Add docstring to instance methods
|
'''
This module retrieves the course catalog and overviews of the Udacity API
Link to Documentation:
https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf
'''
import json
import requests
class UdacityAPI(object):
'''
This class defines attributes and methods for Udaciy API
'''
UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses'
def __init__(self):
self.response = requests.get(UDACITY_API_ENDPOINT)
self.courses = self.response.json()['courses']
self.tracks = self.response.json()['tracks']
def status_code(self):
'''
Return status code of response object
'''
return self.response.status_code
def get_courses(self):
'''
Return list of course objects for all courses offered by Udacity
'''
return self.courses
def get_tracks(self):
'''
Return list of tracks offered by Udacity
'''
return self.tracks
if __name__ == '__main__':
udacity_object = UdacityAPI()
print len(udacity_object.get_courses())
print udacity_object.get_courses()[0].keys()
|
<commit_before>'''
This module retrieves the course catalog and overviews of the Udacity API
Link to Documentation:
https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf
'''
import json
import requests
class UdacityAPI(object):
'''
This class defines attributes and methods for Udaciy API
'''
UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses'
def __init__(self):
self.response = requests.get(UDACITY_API_ENDPOINT)
self.courses = self.response.json()['courses']
self.tracks = self.response.json()['tracks']
def status_code(self):
return self.response.status_code
def get_courses(self):
return self.courses
def get_tracks(self):
return self.tracks
if __name__ == '__main__':
udacity_object = UdacityAPI()
print len(udacity_object.get_courses())
print udacity_object.get_courses()[0].keys()
<commit_msg>Add docstring to instance methods<commit_after>
|
'''
This module retrieves the course catalog and overviews of the Udacity API
Link to Documentation:
https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf
'''
import json
import requests
class UdacityAPI(object):
'''
This class defines attributes and methods for Udaciy API
'''
UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses'
def __init__(self):
self.response = requests.get(UDACITY_API_ENDPOINT)
self.courses = self.response.json()['courses']
self.tracks = self.response.json()['tracks']
def status_code(self):
'''
Return status code of response object
'''
return self.response.status_code
def get_courses(self):
'''
Return list of course objects for all courses offered by Udacity
'''
return self.courses
def get_tracks(self):
'''
Return list of tracks offered by Udacity
'''
return self.tracks
if __name__ == '__main__':
udacity_object = UdacityAPI()
print len(udacity_object.get_courses())
print udacity_object.get_courses()[0].keys()
|
'''
This module retrieves the course catalog and overviews of the Udacity API
Link to Documentation:
https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf
'''
import json
import requests
class UdacityAPI(object):
'''
This class defines attributes and methods for Udaciy API
'''
UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses'
def __init__(self):
self.response = requests.get(UDACITY_API_ENDPOINT)
self.courses = self.response.json()['courses']
self.tracks = self.response.json()['tracks']
def status_code(self):
return self.response.status_code
def get_courses(self):
return self.courses
def get_tracks(self):
return self.tracks
if __name__ == '__main__':
udacity_object = UdacityAPI()
print len(udacity_object.get_courses())
print udacity_object.get_courses()[0].keys()
Add docstring to instance methods'''
This module retrieves the course catalog and overviews of the Udacity API
Link to Documentation:
https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf
'''
import json
import requests
class UdacityAPI(object):
'''
This class defines attributes and methods for Udaciy API
'''
UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses'
def __init__(self):
self.response = requests.get(UDACITY_API_ENDPOINT)
self.courses = self.response.json()['courses']
self.tracks = self.response.json()['tracks']
def status_code(self):
'''
Return status code of response object
'''
return self.response.status_code
def get_courses(self):
'''
Return list of course objects for all courses offered by Udacity
'''
return self.courses
def get_tracks(self):
'''
Return list of tracks offered by Udacity
'''
return self.tracks
if __name__ == '__main__':
udacity_object = UdacityAPI()
print len(udacity_object.get_courses())
print udacity_object.get_courses()[0].keys()
|
<commit_before>'''
This module retrieves the course catalog and overviews of the Udacity API
Link to Documentation:
https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf
'''
import json
import requests
class UdacityAPI(object):
'''
This class defines attributes and methods for Udaciy API
'''
UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses'
def __init__(self):
self.response = requests.get(UDACITY_API_ENDPOINT)
self.courses = self.response.json()['courses']
self.tracks = self.response.json()['tracks']
def status_code(self):
return self.response.status_code
def get_courses(self):
return self.courses
def get_tracks(self):
return self.tracks
if __name__ == '__main__':
udacity_object = UdacityAPI()
print len(udacity_object.get_courses())
print udacity_object.get_courses()[0].keys()
<commit_msg>Add docstring to instance methods<commit_after>'''
This module retrieves the course catalog and overviews of the Udacity API
Link to Documentation:
https://s3.amazonaws.com/content.udacity-data.com/techdocs/UdacityCourseCatalogAPIDocumentation-v0.pdf
'''
import json
import requests
class UdacityAPI(object):
'''
This class defines attributes and methods for Udaciy API
'''
UDACITY_API_ENDPOINT = 'https://udacity.com/public-api/v0/courses'
def __init__(self):
self.response = requests.get(UDACITY_API_ENDPOINT)
self.courses = self.response.json()['courses']
self.tracks = self.response.json()['tracks']
def status_code(self):
'''
Return status code of response object
'''
return self.response.status_code
def get_courses(self):
'''
Return list of course objects for all courses offered by Udacity
'''
return self.courses
def get_tracks(self):
'''
Return list of tracks offered by Udacity
'''
return self.tracks
if __name__ == '__main__':
udacity_object = UdacityAPI()
print len(udacity_object.get_courses())
print udacity_object.get_courses()[0].keys()
|
4696efdee643bb3d86995fea4c35f7947535111d
|
foundation/offices/tests/factories.py
|
foundation/offices/tests/factories.py
|
from __future__ import absolute_import
from .. import models
import factory
class OfficeFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'office-/{0}/'.format(n))
jst = factory.SubFactory('foundation.teryt.tests.factories.JSTFactory')
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
verified = True
state = 'created'
class Meta:
model = models.Office
class EmailFactory(factory.django.DjangoModelFactory):
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
office = factory.SubFactory(OfficeFactory)
class Meta:
model = models.Email
|
from __future__ import absolute_import
from .. import models
import factory
class OfficeFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'office-/{0}/'.format(n))
jst = factory.SubFactory('foundation.teryt.tests.factories.JSTFactory')
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
verified = True
state = 'created'
class Meta:
model = models.Office
class EmailFactory(factory.django.DjangoModelFactory):
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
office = factory.SubFactory(OfficeFactory)
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
class Meta:
model = models.Email
|
Fix EmailFactory by missing created_by
|
Fix EmailFactory by missing created_by
|
Python
|
bsd-3-clause
|
ad-m/foundation-manager,ad-m/foundation-manager,pilnujemy/pytamy,pilnujemy/pytamy,ad-m/foundation-manager,ad-m/foundation-manager,pilnujemy/pytamy,pilnujemy/pytamy
|
from __future__ import absolute_import
from .. import models
import factory
class OfficeFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'office-/{0}/'.format(n))
jst = factory.SubFactory('foundation.teryt.tests.factories.JSTFactory')
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
verified = True
state = 'created'
class Meta:
model = models.Office
class EmailFactory(factory.django.DjangoModelFactory):
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
office = factory.SubFactory(OfficeFactory)
class Meta:
model = models.Email
Fix EmailFactory by missing created_by
|
from __future__ import absolute_import
from .. import models
import factory
class OfficeFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'office-/{0}/'.format(n))
jst = factory.SubFactory('foundation.teryt.tests.factories.JSTFactory')
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
verified = True
state = 'created'
class Meta:
model = models.Office
class EmailFactory(factory.django.DjangoModelFactory):
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
office = factory.SubFactory(OfficeFactory)
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
class Meta:
model = models.Email
|
<commit_before>from __future__ import absolute_import
from .. import models
import factory
class OfficeFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'office-/{0}/'.format(n))
jst = factory.SubFactory('foundation.teryt.tests.factories.JSTFactory')
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
verified = True
state = 'created'
class Meta:
model = models.Office
class EmailFactory(factory.django.DjangoModelFactory):
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
office = factory.SubFactory(OfficeFactory)
class Meta:
model = models.Email
<commit_msg>Fix EmailFactory by missing created_by<commit_after>
|
from __future__ import absolute_import
from .. import models
import factory
class OfficeFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'office-/{0}/'.format(n))
jst = factory.SubFactory('foundation.teryt.tests.factories.JSTFactory')
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
verified = True
state = 'created'
class Meta:
model = models.Office
class EmailFactory(factory.django.DjangoModelFactory):
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
office = factory.SubFactory(OfficeFactory)
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
class Meta:
model = models.Email
|
from __future__ import absolute_import
from .. import models
import factory
class OfficeFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'office-/{0}/'.format(n))
jst = factory.SubFactory('foundation.teryt.tests.factories.JSTFactory')
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
verified = True
state = 'created'
class Meta:
model = models.Office
class EmailFactory(factory.django.DjangoModelFactory):
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
office = factory.SubFactory(OfficeFactory)
class Meta:
model = models.Email
Fix EmailFactory by missing created_byfrom __future__ import absolute_import
from .. import models
import factory
class OfficeFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'office-/{0}/'.format(n))
jst = factory.SubFactory('foundation.teryt.tests.factories.JSTFactory')
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
verified = True
state = 'created'
class Meta:
model = models.Office
class EmailFactory(factory.django.DjangoModelFactory):
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
office = factory.SubFactory(OfficeFactory)
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
class Meta:
model = models.Email
|
<commit_before>from __future__ import absolute_import
from .. import models
import factory
class OfficeFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'office-/{0}/'.format(n))
jst = factory.SubFactory('foundation.teryt.tests.factories.JSTFactory')
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
verified = True
state = 'created'
class Meta:
model = models.Office
class EmailFactory(factory.django.DjangoModelFactory):
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
office = factory.SubFactory(OfficeFactory)
class Meta:
model = models.Email
<commit_msg>Fix EmailFactory by missing created_by<commit_after>from __future__ import absolute_import
from .. import models
import factory
class OfficeFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'office-/{0}/'.format(n))
jst = factory.SubFactory('foundation.teryt.tests.factories.JSTFactory')
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
verified = True
state = 'created'
class Meta:
model = models.Office
class EmailFactory(factory.django.DjangoModelFactory):
email = factory.Sequence(lambda n: 'user-{0}@example.com'.format(n))
office = factory.SubFactory(OfficeFactory)
created_by = factory.SubFactory('foundation.users.tests.factories.UserFactory')
class Meta:
model = models.Email
|
1639200e5700b1170a9d2312a32c7991ed5198b4
|
tests/basics/boundmeth1.py
|
tests/basics/boundmeth1.py
|
# tests basics of bound methods
# uPy and CPython differ when printing a bound method, so just print the type
print(type(repr([].append)))
class A:
def f(self):
return 0
def g(self, a):
return a
def h(self, a, b, c, d, e, f):
return a + b + c + d + e + f
# bound method with no extra args
m = A().f
print(m())
# bound method with 1 extra arg
m = A().g
print(m(1))
# bound method with lots of extra args
m = A().h
print(m(1, 2, 3, 4, 5, 6))
|
# tests basics of bound methods
# uPy and CPython differ when printing a bound method, so just print the type
print(type(repr([].append)))
class A:
def f(self):
return 0
def g(self, a):
return a
def h(self, a, b, c, d, e, f):
return a + b + c + d + e + f
# bound method with no extra args
m = A().f
print(m())
# bound method with 1 extra arg
m = A().g
print(m(1))
# bound method with lots of extra args
m = A().h
print(m(1, 2, 3, 4, 5, 6))
# can't assign attributes to a bound method
try:
A().f.x = 1
except AttributeError:
print('AttributeError')
|
Add test for assignment of attribute to bound method.
|
tests/basics: Add test for assignment of attribute to bound method.
|
Python
|
mit
|
ryannathans/micropython,bvernoux/micropython,HenrikSolver/micropython,dmazzella/micropython,lowRISC/micropython,toolmacher/micropython,ryannathans/micropython,cwyark/micropython,deshipu/micropython,mhoffma/micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,Timmenem/micropython,MrSurly/micropython,tralamazza/micropython,alex-robbins/micropython,chrisdearman/micropython,adafruit/circuitpython,trezor/micropython,deshipu/micropython,adafruit/circuitpython,tobbad/micropython,dmazzella/micropython,PappaPeppar/micropython,puuu/micropython,MrSurly/micropython-esp32,blazewicz/micropython,MrSurly/micropython,selste/micropython,swegener/micropython,tralamazza/micropython,mhoffma/micropython,AriZuu/micropython,PappaPeppar/micropython,lowRISC/micropython,henriknelson/micropython,torwag/micropython,puuu/micropython,toolmacher/micropython,toolmacher/micropython,kerneltask/micropython,mhoffma/micropython,deshipu/micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,mhoffma/micropython,tobbad/micropython,pozetroninc/micropython,toolmacher/micropython,AriZuu/micropython,ryannathans/micropython,hiway/micropython,pozetroninc/micropython,mhoffma/micropython,swegener/micropython,swegener/micropython,blazewicz/micropython,trezor/micropython,selste/micropython,HenrikSolver/micropython,adafruit/micropython,SHA2017-badge/micropython-esp32,henriknelson/micropython,alex-robbins/micropython,PappaPeppar/micropython,oopy/micropython,MrSurly/micropython-esp32,adafruit/micropython,TDAbboud/micropython,adafruit/circuitpython,tobbad/micropython,infinnovation/micropython,infinnovation/micropython,alex-robbins/micropython,henriknelson/micropython,pfalcon/micropython,ryannathans/micropython,adafruit/circuitpython,adafruit/circuitpython,ryannathans/micropython,MrSurly/micropython-esp32,infinnovation/micropython,TDAbboud/micropython,pozetroninc/micropython,lowRISC/micropython,adafruit/micropython,MrSurly/micropython,hiway/micropython,Peetz0r/micropython-esp32,tobbad/micropython,MrSurly/micropython,micropython/micropython-esp32,tralamazza/micropython,pramasoul/micropython,Timmenem/micropython,pfalcon/micropython,micropython/micropython-esp32,bvernoux/micropython,henriknelson/micropython,chrisdearman/micropython,adafruit/circuitpython,Timmenem/micropython,torwag/micropython,micropython/micropython-esp32,AriZuu/micropython,dmazzella/micropython,pramasoul/micropython,selste/micropython,tralamazza/micropython,lowRISC/micropython,puuu/micropython,AriZuu/micropython,SHA2017-badge/micropython-esp32,alex-robbins/micropython,adafruit/micropython,HenrikSolver/micropython,pramasoul/micropython,kerneltask/micropython,cwyark/micropython,trezor/micropython,henriknelson/micropython,tobbad/micropython,hiway/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,AriZuu/micropython,bvernoux/micropython,TDAbboud/micropython,swegener/micropython,adafruit/micropython,selste/micropython,SHA2017-badge/micropython-esp32,Timmenem/micropython,hiway/micropython,MrSurly/micropython,infinnovation/micropython,deshipu/micropython,pozetroninc/micropython,deshipu/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,MrSurly/micropython-esp32,trezor/micropython,trezor/micropython,SHA2017-badge/micropython-esp32,torwag/micropython,pramasoul/micropython,chrisdearman/micropython,infinnovation/micropython,blazewicz/micropython,TDAbboud/micropython,pramasoul/micropython,oopy/micropython,Peetz0r/micropython-esp32,micropython/micropython-esp32,pfalcon/micropython,kerneltask/micropython,TDAbboud/micropython,chrisdearman/micropython,torwag/micropython,blazewicz/micropython,pozetroninc/micropython,cwyark/micropython,pfalcon/micropython,kerneltask/micropython,cwyark/micropython,oopy/micropython,Timmenem/micropython,toolmacher/micropython,pfalcon/micropython,puuu/micropython,SHA2017-badge/micropython-esp32,puuu/micropython,bvernoux/micropython,blazewicz/micropython,oopy/micropython,chrisdearman/micropython,kerneltask/micropython,swegener/micropython,oopy/micropython,torwag/micropython,PappaPeppar/micropython,bvernoux/micropython,alex-robbins/micropython,hiway/micropython,cwyark/micropython,selste/micropython,lowRISC/micropython,dmazzella/micropython
|
# tests basics of bound methods
# uPy and CPython differ when printing a bound method, so just print the type
print(type(repr([].append)))
class A:
def f(self):
return 0
def g(self, a):
return a
def h(self, a, b, c, d, e, f):
return a + b + c + d + e + f
# bound method with no extra args
m = A().f
print(m())
# bound method with 1 extra arg
m = A().g
print(m(1))
# bound method with lots of extra args
m = A().h
print(m(1, 2, 3, 4, 5, 6))
tests/basics: Add test for assignment of attribute to bound method.
|
# tests basics of bound methods
# uPy and CPython differ when printing a bound method, so just print the type
print(type(repr([].append)))
class A:
def f(self):
return 0
def g(self, a):
return a
def h(self, a, b, c, d, e, f):
return a + b + c + d + e + f
# bound method with no extra args
m = A().f
print(m())
# bound method with 1 extra arg
m = A().g
print(m(1))
# bound method with lots of extra args
m = A().h
print(m(1, 2, 3, 4, 5, 6))
# can't assign attributes to a bound method
try:
A().f.x = 1
except AttributeError:
print('AttributeError')
|
<commit_before># tests basics of bound methods
# uPy and CPython differ when printing a bound method, so just print the type
print(type(repr([].append)))
class A:
def f(self):
return 0
def g(self, a):
return a
def h(self, a, b, c, d, e, f):
return a + b + c + d + e + f
# bound method with no extra args
m = A().f
print(m())
# bound method with 1 extra arg
m = A().g
print(m(1))
# bound method with lots of extra args
m = A().h
print(m(1, 2, 3, 4, 5, 6))
<commit_msg>tests/basics: Add test for assignment of attribute to bound method.<commit_after>
|
# tests basics of bound methods
# uPy and CPython differ when printing a bound method, so just print the type
print(type(repr([].append)))
class A:
def f(self):
return 0
def g(self, a):
return a
def h(self, a, b, c, d, e, f):
return a + b + c + d + e + f
# bound method with no extra args
m = A().f
print(m())
# bound method with 1 extra arg
m = A().g
print(m(1))
# bound method with lots of extra args
m = A().h
print(m(1, 2, 3, 4, 5, 6))
# can't assign attributes to a bound method
try:
A().f.x = 1
except AttributeError:
print('AttributeError')
|
# tests basics of bound methods
# uPy and CPython differ when printing a bound method, so just print the type
print(type(repr([].append)))
class A:
def f(self):
return 0
def g(self, a):
return a
def h(self, a, b, c, d, e, f):
return a + b + c + d + e + f
# bound method with no extra args
m = A().f
print(m())
# bound method with 1 extra arg
m = A().g
print(m(1))
# bound method with lots of extra args
m = A().h
print(m(1, 2, 3, 4, 5, 6))
tests/basics: Add test for assignment of attribute to bound method.# tests basics of bound methods
# uPy and CPython differ when printing a bound method, so just print the type
print(type(repr([].append)))
class A:
def f(self):
return 0
def g(self, a):
return a
def h(self, a, b, c, d, e, f):
return a + b + c + d + e + f
# bound method with no extra args
m = A().f
print(m())
# bound method with 1 extra arg
m = A().g
print(m(1))
# bound method with lots of extra args
m = A().h
print(m(1, 2, 3, 4, 5, 6))
# can't assign attributes to a bound method
try:
A().f.x = 1
except AttributeError:
print('AttributeError')
|
<commit_before># tests basics of bound methods
# uPy and CPython differ when printing a bound method, so just print the type
print(type(repr([].append)))
class A:
def f(self):
return 0
def g(self, a):
return a
def h(self, a, b, c, d, e, f):
return a + b + c + d + e + f
# bound method with no extra args
m = A().f
print(m())
# bound method with 1 extra arg
m = A().g
print(m(1))
# bound method with lots of extra args
m = A().h
print(m(1, 2, 3, 4, 5, 6))
<commit_msg>tests/basics: Add test for assignment of attribute to bound method.<commit_after># tests basics of bound methods
# uPy and CPython differ when printing a bound method, so just print the type
print(type(repr([].append)))
class A:
def f(self):
return 0
def g(self, a):
return a
def h(self, a, b, c, d, e, f):
return a + b + c + d + e + f
# bound method with no extra args
m = A().f
print(m())
# bound method with 1 extra arg
m = A().g
print(m(1))
# bound method with lots of extra args
m = A().h
print(m(1, 2, 3, 4, 5, 6))
# can't assign attributes to a bound method
try:
A().f.x = 1
except AttributeError:
print('AttributeError')
|
0224877de121cb7cb850ef51a75c1c3f8c1cb105
|
kala.py
|
kala.py
|
#!/usr/bin/python
import json
import bottle
from bottle_mongo import MongoPlugin
app = bottle.Bottle()
app.config.load_config('settings.ini')
app.install(MongoPlugin(
uri=app.config['mongodb.uri'],
db=app.config['mongodb.db'],
json_mongo=True))
def _get_json(name):
result = bottle.request.query.get(name)
return json.loads(result) if result else None
@app.route('/<collection>')
def get(mongodb, collection):
filter_ = _get_json('filter')
projection = _get_json('projection')
skip = int(bottle.request.query.get('skip', 0))
limit = int(bottle.request.query.get('limit', 100))
sort = _get_json('sort')
cursor = mongodb[collection].find(
filter=filter_, projection=projection, skip=skip, limit=limit,
sort=sort
)
return {'results': [document for document in cursor]}
if __name__ == '__main__':
app.run()
|
#!/usr/bin/python
import json
import bottle
from bottle_mongo import MongoPlugin
app = bottle.Bottle()
app.config.load_config('settings.ini')
app.install(MongoPlugin(
uri=app.config['mongodb.uri'],
db=app.config['mongodb.db'],
json_mongo=True))
def _get_json(name):
result = bottle.request.query.get(name)
return json.loads(result) if result else None
@app.route('/<collection>')
def get(mongodb, collection):
filter_ = _get_json('filter')
projection = _get_json('projection')
skip = int(bottle.request.query.get('skip', 0))
limit = int(bottle.request.query.get('limit', 100))
sort = _get_json('sort')
# Turns a JSON array of arrays to a list of tuples.
sort = [tuple(field) for field in sort] if sort else None
cursor = mongodb[collection].find(
filter=filter_, projection=projection, skip=skip, limit=limit,
sort=sort
)
return {'results': [document for document in cursor]}
if __name__ == '__main__':
app.run()
|
Sort needs to be an array of arrays in the query string but a list of tuples in python.
|
Bugfix: Sort needs to be an array of arrays in the query string but a list of tuples in python.
|
Python
|
mit
|
damoxc/kala,cheng93/kala,cloudbuy/kala
|
#!/usr/bin/python
import json
import bottle
from bottle_mongo import MongoPlugin
app = bottle.Bottle()
app.config.load_config('settings.ini')
app.install(MongoPlugin(
uri=app.config['mongodb.uri'],
db=app.config['mongodb.db'],
json_mongo=True))
def _get_json(name):
result = bottle.request.query.get(name)
return json.loads(result) if result else None
@app.route('/<collection>')
def get(mongodb, collection):
filter_ = _get_json('filter')
projection = _get_json('projection')
skip = int(bottle.request.query.get('skip', 0))
limit = int(bottle.request.query.get('limit', 100))
sort = _get_json('sort')
cursor = mongodb[collection].find(
filter=filter_, projection=projection, skip=skip, limit=limit,
sort=sort
)
return {'results': [document for document in cursor]}
if __name__ == '__main__':
app.run()
Bugfix: Sort needs to be an array of arrays in the query string but a list of tuples in python.
|
#!/usr/bin/python
import json
import bottle
from bottle_mongo import MongoPlugin
app = bottle.Bottle()
app.config.load_config('settings.ini')
app.install(MongoPlugin(
uri=app.config['mongodb.uri'],
db=app.config['mongodb.db'],
json_mongo=True))
def _get_json(name):
result = bottle.request.query.get(name)
return json.loads(result) if result else None
@app.route('/<collection>')
def get(mongodb, collection):
filter_ = _get_json('filter')
projection = _get_json('projection')
skip = int(bottle.request.query.get('skip', 0))
limit = int(bottle.request.query.get('limit', 100))
sort = _get_json('sort')
# Turns a JSON array of arrays to a list of tuples.
sort = [tuple(field) for field in sort] if sort else None
cursor = mongodb[collection].find(
filter=filter_, projection=projection, skip=skip, limit=limit,
sort=sort
)
return {'results': [document for document in cursor]}
if __name__ == '__main__':
app.run()
|
<commit_before>#!/usr/bin/python
import json
import bottle
from bottle_mongo import MongoPlugin
app = bottle.Bottle()
app.config.load_config('settings.ini')
app.install(MongoPlugin(
uri=app.config['mongodb.uri'],
db=app.config['mongodb.db'],
json_mongo=True))
def _get_json(name):
result = bottle.request.query.get(name)
return json.loads(result) if result else None
@app.route('/<collection>')
def get(mongodb, collection):
filter_ = _get_json('filter')
projection = _get_json('projection')
skip = int(bottle.request.query.get('skip', 0))
limit = int(bottle.request.query.get('limit', 100))
sort = _get_json('sort')
cursor = mongodb[collection].find(
filter=filter_, projection=projection, skip=skip, limit=limit,
sort=sort
)
return {'results': [document for document in cursor]}
if __name__ == '__main__':
app.run()
<commit_msg>Bugfix: Sort needs to be an array of arrays in the query string but a list of tuples in python.<commit_after>
|
#!/usr/bin/python
import json
import bottle
from bottle_mongo import MongoPlugin
app = bottle.Bottle()
app.config.load_config('settings.ini')
app.install(MongoPlugin(
uri=app.config['mongodb.uri'],
db=app.config['mongodb.db'],
json_mongo=True))
def _get_json(name):
result = bottle.request.query.get(name)
return json.loads(result) if result else None
@app.route('/<collection>')
def get(mongodb, collection):
filter_ = _get_json('filter')
projection = _get_json('projection')
skip = int(bottle.request.query.get('skip', 0))
limit = int(bottle.request.query.get('limit', 100))
sort = _get_json('sort')
# Turns a JSON array of arrays to a list of tuples.
sort = [tuple(field) for field in sort] if sort else None
cursor = mongodb[collection].find(
filter=filter_, projection=projection, skip=skip, limit=limit,
sort=sort
)
return {'results': [document for document in cursor]}
if __name__ == '__main__':
app.run()
|
#!/usr/bin/python
import json
import bottle
from bottle_mongo import MongoPlugin
app = bottle.Bottle()
app.config.load_config('settings.ini')
app.install(MongoPlugin(
uri=app.config['mongodb.uri'],
db=app.config['mongodb.db'],
json_mongo=True))
def _get_json(name):
result = bottle.request.query.get(name)
return json.loads(result) if result else None
@app.route('/<collection>')
def get(mongodb, collection):
filter_ = _get_json('filter')
projection = _get_json('projection')
skip = int(bottle.request.query.get('skip', 0))
limit = int(bottle.request.query.get('limit', 100))
sort = _get_json('sort')
cursor = mongodb[collection].find(
filter=filter_, projection=projection, skip=skip, limit=limit,
sort=sort
)
return {'results': [document for document in cursor]}
if __name__ == '__main__':
app.run()
Bugfix: Sort needs to be an array of arrays in the query string but a list of tuples in python.#!/usr/bin/python
import json
import bottle
from bottle_mongo import MongoPlugin
app = bottle.Bottle()
app.config.load_config('settings.ini')
app.install(MongoPlugin(
uri=app.config['mongodb.uri'],
db=app.config['mongodb.db'],
json_mongo=True))
def _get_json(name):
result = bottle.request.query.get(name)
return json.loads(result) if result else None
@app.route('/<collection>')
def get(mongodb, collection):
filter_ = _get_json('filter')
projection = _get_json('projection')
skip = int(bottle.request.query.get('skip', 0))
limit = int(bottle.request.query.get('limit', 100))
sort = _get_json('sort')
# Turns a JSON array of arrays to a list of tuples.
sort = [tuple(field) for field in sort] if sort else None
cursor = mongodb[collection].find(
filter=filter_, projection=projection, skip=skip, limit=limit,
sort=sort
)
return {'results': [document for document in cursor]}
if __name__ == '__main__':
app.run()
|
<commit_before>#!/usr/bin/python
import json
import bottle
from bottle_mongo import MongoPlugin
app = bottle.Bottle()
app.config.load_config('settings.ini')
app.install(MongoPlugin(
uri=app.config['mongodb.uri'],
db=app.config['mongodb.db'],
json_mongo=True))
def _get_json(name):
result = bottle.request.query.get(name)
return json.loads(result) if result else None
@app.route('/<collection>')
def get(mongodb, collection):
filter_ = _get_json('filter')
projection = _get_json('projection')
skip = int(bottle.request.query.get('skip', 0))
limit = int(bottle.request.query.get('limit', 100))
sort = _get_json('sort')
cursor = mongodb[collection].find(
filter=filter_, projection=projection, skip=skip, limit=limit,
sort=sort
)
return {'results': [document for document in cursor]}
if __name__ == '__main__':
app.run()
<commit_msg>Bugfix: Sort needs to be an array of arrays in the query string but a list of tuples in python.<commit_after>#!/usr/bin/python
import json
import bottle
from bottle_mongo import MongoPlugin
app = bottle.Bottle()
app.config.load_config('settings.ini')
app.install(MongoPlugin(
uri=app.config['mongodb.uri'],
db=app.config['mongodb.db'],
json_mongo=True))
def _get_json(name):
result = bottle.request.query.get(name)
return json.loads(result) if result else None
@app.route('/<collection>')
def get(mongodb, collection):
filter_ = _get_json('filter')
projection = _get_json('projection')
skip = int(bottle.request.query.get('skip', 0))
limit = int(bottle.request.query.get('limit', 100))
sort = _get_json('sort')
# Turns a JSON array of arrays to a list of tuples.
sort = [tuple(field) for field in sort] if sort else None
cursor = mongodb[collection].find(
filter=filter_, projection=projection, skip=skip, limit=limit,
sort=sort
)
return {'results': [document for document in cursor]}
if __name__ == '__main__':
app.run()
|
366da021d86466c8aa8389f6cfe80386172c3b8f
|
data_structures/tree/tree_node.py
|
data_structures/tree/tree_node.py
|
class TreeNode(object):
def __init__(self, key=None, payload=None):
self.key = key
self.payload = payload
self.left = None
self.right = None
self.parent = None
def __str__(self):
s = str(self.key)
if self.payload:
s += ": " + str(self.payload)
return s
|
class TreeNode(object):
def __init__(self, key=None, payload=None):
self.key = key
self.payload = payload
self.left = None
self.right = None
self.parent = None
def __str__(self):
s = str(self.key)
if self.payload:
s += ": " + str(self.payload)
return s
def set_children(self, **kwargs):
"""
This funciton used to set node's children, and also update its parent
Usage:
node1.set_children(left=node2)
node1.set_children(right=node3)
node1.set_children(left=node2, right=node3)
"""
old_children = (self.left, self.right)
if "left" in kwargs:
self.left = kwargs["left"]
if self.left:
self.left.parent = self
if "right" in kwargs:
self.right = kwargs["right"]
if self.right:
self.right.parent = self
return old_children
|
Add set_children function in TreeNode
|
Add set_children function in TreeNode
|
Python
|
mit
|
hongta/practice-python,hongta/practice-python
|
class TreeNode(object):
def __init__(self, key=None, payload=None):
self.key = key
self.payload = payload
self.left = None
self.right = None
self.parent = None
def __str__(self):
s = str(self.key)
if self.payload:
s += ": " + str(self.payload)
return s
Add set_children function in TreeNode
|
class TreeNode(object):
def __init__(self, key=None, payload=None):
self.key = key
self.payload = payload
self.left = None
self.right = None
self.parent = None
def __str__(self):
s = str(self.key)
if self.payload:
s += ": " + str(self.payload)
return s
def set_children(self, **kwargs):
"""
This funciton used to set node's children, and also update its parent
Usage:
node1.set_children(left=node2)
node1.set_children(right=node3)
node1.set_children(left=node2, right=node3)
"""
old_children = (self.left, self.right)
if "left" in kwargs:
self.left = kwargs["left"]
if self.left:
self.left.parent = self
if "right" in kwargs:
self.right = kwargs["right"]
if self.right:
self.right.parent = self
return old_children
|
<commit_before>
class TreeNode(object):
def __init__(self, key=None, payload=None):
self.key = key
self.payload = payload
self.left = None
self.right = None
self.parent = None
def __str__(self):
s = str(self.key)
if self.payload:
s += ": " + str(self.payload)
return s
<commit_msg>Add set_children function in TreeNode<commit_after>
|
class TreeNode(object):
def __init__(self, key=None, payload=None):
self.key = key
self.payload = payload
self.left = None
self.right = None
self.parent = None
def __str__(self):
s = str(self.key)
if self.payload:
s += ": " + str(self.payload)
return s
def set_children(self, **kwargs):
"""
This funciton used to set node's children, and also update its parent
Usage:
node1.set_children(left=node2)
node1.set_children(right=node3)
node1.set_children(left=node2, right=node3)
"""
old_children = (self.left, self.right)
if "left" in kwargs:
self.left = kwargs["left"]
if self.left:
self.left.parent = self
if "right" in kwargs:
self.right = kwargs["right"]
if self.right:
self.right.parent = self
return old_children
|
class TreeNode(object):
def __init__(self, key=None, payload=None):
self.key = key
self.payload = payload
self.left = None
self.right = None
self.parent = None
def __str__(self):
s = str(self.key)
if self.payload:
s += ": " + str(self.payload)
return s
Add set_children function in TreeNode
class TreeNode(object):
def __init__(self, key=None, payload=None):
self.key = key
self.payload = payload
self.left = None
self.right = None
self.parent = None
def __str__(self):
s = str(self.key)
if self.payload:
s += ": " + str(self.payload)
return s
def set_children(self, **kwargs):
"""
This funciton used to set node's children, and also update its parent
Usage:
node1.set_children(left=node2)
node1.set_children(right=node3)
node1.set_children(left=node2, right=node3)
"""
old_children = (self.left, self.right)
if "left" in kwargs:
self.left = kwargs["left"]
if self.left:
self.left.parent = self
if "right" in kwargs:
self.right = kwargs["right"]
if self.right:
self.right.parent = self
return old_children
|
<commit_before>
class TreeNode(object):
def __init__(self, key=None, payload=None):
self.key = key
self.payload = payload
self.left = None
self.right = None
self.parent = None
def __str__(self):
s = str(self.key)
if self.payload:
s += ": " + str(self.payload)
return s
<commit_msg>Add set_children function in TreeNode<commit_after>
class TreeNode(object):
def __init__(self, key=None, payload=None):
self.key = key
self.payload = payload
self.left = None
self.right = None
self.parent = None
def __str__(self):
s = str(self.key)
if self.payload:
s += ": " + str(self.payload)
return s
def set_children(self, **kwargs):
"""
This funciton used to set node's children, and also update its parent
Usage:
node1.set_children(left=node2)
node1.set_children(right=node3)
node1.set_children(left=node2, right=node3)
"""
old_children = (self.left, self.right)
if "left" in kwargs:
self.left = kwargs["left"]
if self.left:
self.left.parent = self
if "right" in kwargs:
self.right = kwargs["right"]
if self.right:
self.right.parent = self
return old_children
|
89dff1818183fa6c3e7f9c6f5a802842e4e3e797
|
demo/texture.py
|
demo/texture.py
|
#!/usr/bin/env python
from VisionEgg.Core import *
from VisionEgg.Textures import *
from VisionEgg.AppHelper import *
filename = "orig.bmp"
if len(sys.argv) > 1:
filename = sys.argv[1]
try:
texture = TextureFromFile(filename)
except:
print "Could not open image file '%s', generating texture."%filename
texture = Texture(size=(256,256))
screen = get_default_screen()
# Set the projection so that eye coordinates are window coordinates
projection = OrthographicProjection(right=screen.size[0],top=screen.size[1])
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture=texture,projection=projection)
# Set the stimulus to have 1:1 scaling (requires projection as set above)
# This may result in clipping if texture is bigger than screen
stimulus.parameters.upper_right = ( texture.orig.size[0], texture.orig.size[1] )
stimulus.init_gl()
# Because the stimulus has a projection, we don't care what default is that the viewport uses.
lower_y = screen.size[1]/2 - texture.orig.size[1]/2
viewport = Viewport(screen,(0,lower_y),screen.size)
viewport.add_stimulus(stimulus)
p = Presentation(duration=(5.0,'seconds'),viewports=[viewport])
p.go()
|
#!/usr/bin/env python
from VisionEgg.Core import *
from VisionEgg.Textures import *
from VisionEgg.AppHelper import *
filename = "orig.bmp"
if len(sys.argv) > 1:
filename = sys.argv[1]
try:
texture = TextureFromFile(filename)
except:
print "Could not open image file '%s', generating texture."%filename
texture = Texture(size=(256,256))
screen = get_default_screen()
# Set the projection so that eye coordinates are window coordinates
projection = OrthographicProjection(right=screen.size[0],top=screen.size[1])
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture=texture,projection=projection)
# Set the stimulus to have 1:1 scaling (requires projection as set above)
# This may result in clipping if texture is bigger than screen
stimulus.parameters.right = texture.orig.size[0]
stimulus.parameters.top = texture.orig.size[1]
stimulus.init_gl()
# Because the stimulus has a projection, we don't care what the
# default is that the viewport uses.
lower_y = screen.size[1]/2 - texture.orig.size[1]/2
viewport = Viewport(screen,(0,lower_y),screen.size)
viewport.add_stimulus(stimulus)
p = Presentation(duration=(5.0,'seconds'),viewports=[viewport])
p.go()
|
Use new TextureStimulus parameter definitions.
|
Use new TextureStimulus parameter definitions.
git-svn-id: 033d166fe8e629f6cbcd3c0e2b9ad0cffc79b88b@238 3a63a0ee-37fe-0310-a504-e92b6e0a3ba7
|
Python
|
lgpl-2.1
|
visionegg/visionegg,visionegg/visionegg,visionegg/visionegg,visionegg/visionegg,visionegg/visionegg
|
#!/usr/bin/env python
from VisionEgg.Core import *
from VisionEgg.Textures import *
from VisionEgg.AppHelper import *
filename = "orig.bmp"
if len(sys.argv) > 1:
filename = sys.argv[1]
try:
texture = TextureFromFile(filename)
except:
print "Could not open image file '%s', generating texture."%filename
texture = Texture(size=(256,256))
screen = get_default_screen()
# Set the projection so that eye coordinates are window coordinates
projection = OrthographicProjection(right=screen.size[0],top=screen.size[1])
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture=texture,projection=projection)
# Set the stimulus to have 1:1 scaling (requires projection as set above)
# This may result in clipping if texture is bigger than screen
stimulus.parameters.upper_right = ( texture.orig.size[0], texture.orig.size[1] )
stimulus.init_gl()
# Because the stimulus has a projection, we don't care what default is that the viewport uses.
lower_y = screen.size[1]/2 - texture.orig.size[1]/2
viewport = Viewport(screen,(0,lower_y),screen.size)
viewport.add_stimulus(stimulus)
p = Presentation(duration=(5.0,'seconds'),viewports=[viewport])
p.go()
Use new TextureStimulus parameter definitions.
git-svn-id: 033d166fe8e629f6cbcd3c0e2b9ad0cffc79b88b@238 3a63a0ee-37fe-0310-a504-e92b6e0a3ba7
|
#!/usr/bin/env python
from VisionEgg.Core import *
from VisionEgg.Textures import *
from VisionEgg.AppHelper import *
filename = "orig.bmp"
if len(sys.argv) > 1:
filename = sys.argv[1]
try:
texture = TextureFromFile(filename)
except:
print "Could not open image file '%s', generating texture."%filename
texture = Texture(size=(256,256))
screen = get_default_screen()
# Set the projection so that eye coordinates are window coordinates
projection = OrthographicProjection(right=screen.size[0],top=screen.size[1])
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture=texture,projection=projection)
# Set the stimulus to have 1:1 scaling (requires projection as set above)
# This may result in clipping if texture is bigger than screen
stimulus.parameters.right = texture.orig.size[0]
stimulus.parameters.top = texture.orig.size[1]
stimulus.init_gl()
# Because the stimulus has a projection, we don't care what the
# default is that the viewport uses.
lower_y = screen.size[1]/2 - texture.orig.size[1]/2
viewport = Viewport(screen,(0,lower_y),screen.size)
viewport.add_stimulus(stimulus)
p = Presentation(duration=(5.0,'seconds'),viewports=[viewport])
p.go()
|
<commit_before>#!/usr/bin/env python
from VisionEgg.Core import *
from VisionEgg.Textures import *
from VisionEgg.AppHelper import *
filename = "orig.bmp"
if len(sys.argv) > 1:
filename = sys.argv[1]
try:
texture = TextureFromFile(filename)
except:
print "Could not open image file '%s', generating texture."%filename
texture = Texture(size=(256,256))
screen = get_default_screen()
# Set the projection so that eye coordinates are window coordinates
projection = OrthographicProjection(right=screen.size[0],top=screen.size[1])
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture=texture,projection=projection)
# Set the stimulus to have 1:1 scaling (requires projection as set above)
# This may result in clipping if texture is bigger than screen
stimulus.parameters.upper_right = ( texture.orig.size[0], texture.orig.size[1] )
stimulus.init_gl()
# Because the stimulus has a projection, we don't care what default is that the viewport uses.
lower_y = screen.size[1]/2 - texture.orig.size[1]/2
viewport = Viewport(screen,(0,lower_y),screen.size)
viewport.add_stimulus(stimulus)
p = Presentation(duration=(5.0,'seconds'),viewports=[viewport])
p.go()
<commit_msg>Use new TextureStimulus parameter definitions.
git-svn-id: 033d166fe8e629f6cbcd3c0e2b9ad0cffc79b88b@238 3a63a0ee-37fe-0310-a504-e92b6e0a3ba7<commit_after>
|
#!/usr/bin/env python
from VisionEgg.Core import *
from VisionEgg.Textures import *
from VisionEgg.AppHelper import *
filename = "orig.bmp"
if len(sys.argv) > 1:
filename = sys.argv[1]
try:
texture = TextureFromFile(filename)
except:
print "Could not open image file '%s', generating texture."%filename
texture = Texture(size=(256,256))
screen = get_default_screen()
# Set the projection so that eye coordinates are window coordinates
projection = OrthographicProjection(right=screen.size[0],top=screen.size[1])
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture=texture,projection=projection)
# Set the stimulus to have 1:1 scaling (requires projection as set above)
# This may result in clipping if texture is bigger than screen
stimulus.parameters.right = texture.orig.size[0]
stimulus.parameters.top = texture.orig.size[1]
stimulus.init_gl()
# Because the stimulus has a projection, we don't care what the
# default is that the viewport uses.
lower_y = screen.size[1]/2 - texture.orig.size[1]/2
viewport = Viewport(screen,(0,lower_y),screen.size)
viewport.add_stimulus(stimulus)
p = Presentation(duration=(5.0,'seconds'),viewports=[viewport])
p.go()
|
#!/usr/bin/env python
from VisionEgg.Core import *
from VisionEgg.Textures import *
from VisionEgg.AppHelper import *
filename = "orig.bmp"
if len(sys.argv) > 1:
filename = sys.argv[1]
try:
texture = TextureFromFile(filename)
except:
print "Could not open image file '%s', generating texture."%filename
texture = Texture(size=(256,256))
screen = get_default_screen()
# Set the projection so that eye coordinates are window coordinates
projection = OrthographicProjection(right=screen.size[0],top=screen.size[1])
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture=texture,projection=projection)
# Set the stimulus to have 1:1 scaling (requires projection as set above)
# This may result in clipping if texture is bigger than screen
stimulus.parameters.upper_right = ( texture.orig.size[0], texture.orig.size[1] )
stimulus.init_gl()
# Because the stimulus has a projection, we don't care what default is that the viewport uses.
lower_y = screen.size[1]/2 - texture.orig.size[1]/2
viewport = Viewport(screen,(0,lower_y),screen.size)
viewport.add_stimulus(stimulus)
p = Presentation(duration=(5.0,'seconds'),viewports=[viewport])
p.go()
Use new TextureStimulus parameter definitions.
git-svn-id: 033d166fe8e629f6cbcd3c0e2b9ad0cffc79b88b@238 3a63a0ee-37fe-0310-a504-e92b6e0a3ba7#!/usr/bin/env python
from VisionEgg.Core import *
from VisionEgg.Textures import *
from VisionEgg.AppHelper import *
filename = "orig.bmp"
if len(sys.argv) > 1:
filename = sys.argv[1]
try:
texture = TextureFromFile(filename)
except:
print "Could not open image file '%s', generating texture."%filename
texture = Texture(size=(256,256))
screen = get_default_screen()
# Set the projection so that eye coordinates are window coordinates
projection = OrthographicProjection(right=screen.size[0],top=screen.size[1])
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture=texture,projection=projection)
# Set the stimulus to have 1:1 scaling (requires projection as set above)
# This may result in clipping if texture is bigger than screen
stimulus.parameters.right = texture.orig.size[0]
stimulus.parameters.top = texture.orig.size[1]
stimulus.init_gl()
# Because the stimulus has a projection, we don't care what the
# default is that the viewport uses.
lower_y = screen.size[1]/2 - texture.orig.size[1]/2
viewport = Viewport(screen,(0,lower_y),screen.size)
viewport.add_stimulus(stimulus)
p = Presentation(duration=(5.0,'seconds'),viewports=[viewport])
p.go()
|
<commit_before>#!/usr/bin/env python
from VisionEgg.Core import *
from VisionEgg.Textures import *
from VisionEgg.AppHelper import *
filename = "orig.bmp"
if len(sys.argv) > 1:
filename = sys.argv[1]
try:
texture = TextureFromFile(filename)
except:
print "Could not open image file '%s', generating texture."%filename
texture = Texture(size=(256,256))
screen = get_default_screen()
# Set the projection so that eye coordinates are window coordinates
projection = OrthographicProjection(right=screen.size[0],top=screen.size[1])
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture=texture,projection=projection)
# Set the stimulus to have 1:1 scaling (requires projection as set above)
# This may result in clipping if texture is bigger than screen
stimulus.parameters.upper_right = ( texture.orig.size[0], texture.orig.size[1] )
stimulus.init_gl()
# Because the stimulus has a projection, we don't care what default is that the viewport uses.
lower_y = screen.size[1]/2 - texture.orig.size[1]/2
viewport = Viewport(screen,(0,lower_y),screen.size)
viewport.add_stimulus(stimulus)
p = Presentation(duration=(5.0,'seconds'),viewports=[viewport])
p.go()
<commit_msg>Use new TextureStimulus parameter definitions.
git-svn-id: 033d166fe8e629f6cbcd3c0e2b9ad0cffc79b88b@238 3a63a0ee-37fe-0310-a504-e92b6e0a3ba7<commit_after>#!/usr/bin/env python
from VisionEgg.Core import *
from VisionEgg.Textures import *
from VisionEgg.AppHelper import *
filename = "orig.bmp"
if len(sys.argv) > 1:
filename = sys.argv[1]
try:
texture = TextureFromFile(filename)
except:
print "Could not open image file '%s', generating texture."%filename
texture = Texture(size=(256,256))
screen = get_default_screen()
# Set the projection so that eye coordinates are window coordinates
projection = OrthographicProjection(right=screen.size[0],top=screen.size[1])
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture=texture,projection=projection)
# Set the stimulus to have 1:1 scaling (requires projection as set above)
# This may result in clipping if texture is bigger than screen
stimulus.parameters.right = texture.orig.size[0]
stimulus.parameters.top = texture.orig.size[1]
stimulus.init_gl()
# Because the stimulus has a projection, we don't care what the
# default is that the viewport uses.
lower_y = screen.size[1]/2 - texture.orig.size[1]/2
viewport = Viewport(screen,(0,lower_y),screen.size)
viewport.add_stimulus(stimulus)
p = Presentation(duration=(5.0,'seconds'),viewports=[viewport])
p.go()
|
91f250485b86339b13f5a073e5879292525f9015
|
nbparameterise/code_drivers/python3.py
|
nbparameterise/code_drivers/python3.py
|
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str, ast.List)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isisntance(node, ast.List):
return list, node.s
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
|
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_list(node):
def bool_check(node):
return isinstance(node, ast.NameConstant) and (node.value in (True, False))
return all([(isinstance(n, (ast.Num, ast.Str))
or bool_check(n)) for n in node.elts])
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str)):
return
elif (isinstance(node, ast.List)
and isinstance(node.ctx, ast.Load) and check_list(node)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string, list or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isinstance(node, ast.List):
return list, [type_and_value(n)[1] for n in node.elts]
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
|
Add lists as valid parameters
|
Add lists as valid parameters
|
Python
|
mit
|
takluyver/nbparameterise
|
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str, ast.List)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isisntance(node, ast.List):
return list, node.s
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)Add lists as valid parameters
|
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_list(node):
def bool_check(node):
return isinstance(node, ast.NameConstant) and (node.value in (True, False))
return all([(isinstance(n, (ast.Num, ast.Str))
or bool_check(n)) for n in node.elts])
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str)):
return
elif (isinstance(node, ast.List)
and isinstance(node.ctx, ast.Load) and check_list(node)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string, list or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isinstance(node, ast.List):
return list, [type_and_value(n)[1] for n in node.elts]
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
|
<commit_before>import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str, ast.List)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isisntance(node, ast.List):
return list, node.s
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)<commit_msg>Add lists as valid parameters<commit_after>
|
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_list(node):
def bool_check(node):
return isinstance(node, ast.NameConstant) and (node.value in (True, False))
return all([(isinstance(n, (ast.Num, ast.Str))
or bool_check(n)) for n in node.elts])
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str)):
return
elif (isinstance(node, ast.List)
and isinstance(node.ctx, ast.Load) and check_list(node)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string, list or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isinstance(node, ast.List):
return list, [type_and_value(n)[1] for n in node.elts]
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
|
import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str, ast.List)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isisntance(node, ast.List):
return list, node.s
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)Add lists as valid parametersimport ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_list(node):
def bool_check(node):
return isinstance(node, ast.NameConstant) and (node.value in (True, False))
return all([(isinstance(n, (ast.Num, ast.Str))
or bool_check(n)) for n in node.elts])
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str)):
return
elif (isinstance(node, ast.List)
and isinstance(node.ctx, ast.Load) and check_list(node)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string, list or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isinstance(node, ast.List):
return list, [type_and_value(n)[1] for n in node.elts]
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
|
<commit_before>import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str, ast.List)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isisntance(node, ast.List):
return list, node.s
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)<commit_msg>Add lists as valid parameters<commit_after>import ast
import astcheck
import astsearch
from ..code import Parameter
__all__ = ['extract_definitions', 'build_definitions']
def check_list(node):
def bool_check(node):
return isinstance(node, ast.NameConstant) and (node.value in (True, False))
return all([(isinstance(n, (ast.Num, ast.Str))
or bool_check(n)) for n in node.elts])
def check_fillable_node(node, path):
if isinstance(node, (ast.Num, ast.Str)):
return
elif (isinstance(node, ast.List)
and isinstance(node.ctx, ast.Load) and check_list(node)):
return
elif isinstance(node, ast.NameConstant) and (node.value in (True, False)):
return
raise astcheck.ASTMismatch(path, node, 'number, string, list or boolean')
definition_pattern = ast.Assign(targets=[ast.Name()], value=check_fillable_node)
def type_and_value(node):
if isinstance(node, ast.Num):
# int or float
return type(node.n), node.n
elif isinstance(node, ast.Str):
return str, node.s
elif isinstance(node, ast.List):
return list, [type_and_value(n)[1] for n in node.elts]
return (bool, node.value)
def extract_definitions(cell):
cell_ast = ast.parse(cell)
for assign in astsearch.ASTPatternFinder(definition_pattern).scan_ast(cell_ast):
yield Parameter(assign.targets[0].id, *type_and_value(assign.value))
def build_definitions(inputs):
return "\n".join("{0.name} = {0.value!r}".format(i) for i in inputs)
|
c6a65af70acfed68036914b983856e1cbe26a235
|
session2/translate_all.py
|
session2/translate_all.py
|
import argparse, logging, codecs
from translation_model import TranslationModel
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
args = parser.parse_args()
return args
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip())
fw_out.write(results[0][1] + '\n')
fw_out.close()
if __name__ == '__main__':
main()
|
import argparse, logging, codecs
from translation_model import TranslationModel
from nltk.translate.bleu_score import sentence_bleu as bleu
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
parser.add_argument('--all', dest='all', action='store_true', help='Check all translations')
args = parser.parse_args()
return args
def find_best_translation(input_line, results):
best_bleu_score = 0.0
best_index = 0
for index, result in enumerate(results):
if len(result.split()) == 0:
continue
bleu_score = bleu([input_line.split()], result.split(), weights=(1.0,))
if bleu_score > best_bleu_score:
best_bleu_score = bleu_score
best_index = index
return best_index
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip(), k = 20)
if args.all:
index = find_best_translation(input_line, results)
else:
index = 0
fw_out.write(results[0][index] + '\n')
fw_out.close()
if __name__ == '__main__':
main()
|
Add option to check among 20 translations
|
Add option to check among 20 translations
|
Python
|
bsd-3-clause
|
vineetm/dl4mt-material,vineetm/dl4mt-material,vineetm/dl4mt-material,vineetm/dl4mt-material,vineetm/dl4mt-material
|
import argparse, logging, codecs
from translation_model import TranslationModel
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
args = parser.parse_args()
return args
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip())
fw_out.write(results[0][1] + '\n')
fw_out.close()
if __name__ == '__main__':
main()Add option to check among 20 translations
|
import argparse, logging, codecs
from translation_model import TranslationModel
from nltk.translate.bleu_score import sentence_bleu as bleu
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
parser.add_argument('--all', dest='all', action='store_true', help='Check all translations')
args = parser.parse_args()
return args
def find_best_translation(input_line, results):
best_bleu_score = 0.0
best_index = 0
for index, result in enumerate(results):
if len(result.split()) == 0:
continue
bleu_score = bleu([input_line.split()], result.split(), weights=(1.0,))
if bleu_score > best_bleu_score:
best_bleu_score = bleu_score
best_index = index
return best_index
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip(), k = 20)
if args.all:
index = find_best_translation(input_line, results)
else:
index = 0
fw_out.write(results[0][index] + '\n')
fw_out.close()
if __name__ == '__main__':
main()
|
<commit_before>import argparse, logging, codecs
from translation_model import TranslationModel
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
args = parser.parse_args()
return args
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip())
fw_out.write(results[0][1] + '\n')
fw_out.close()
if __name__ == '__main__':
main()<commit_msg>Add option to check among 20 translations<commit_after>
|
import argparse, logging, codecs
from translation_model import TranslationModel
from nltk.translate.bleu_score import sentence_bleu as bleu
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
parser.add_argument('--all', dest='all', action='store_true', help='Check all translations')
args = parser.parse_args()
return args
def find_best_translation(input_line, results):
best_bleu_score = 0.0
best_index = 0
for index, result in enumerate(results):
if len(result.split()) == 0:
continue
bleu_score = bleu([input_line.split()], result.split(), weights=(1.0,))
if bleu_score > best_bleu_score:
best_bleu_score = bleu_score
best_index = index
return best_index
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip(), k = 20)
if args.all:
index = find_best_translation(input_line, results)
else:
index = 0
fw_out.write(results[0][index] + '\n')
fw_out.close()
if __name__ == '__main__':
main()
|
import argparse, logging, codecs
from translation_model import TranslationModel
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
args = parser.parse_args()
return args
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip())
fw_out.write(results[0][1] + '\n')
fw_out.close()
if __name__ == '__main__':
main()Add option to check among 20 translationsimport argparse, logging, codecs
from translation_model import TranslationModel
from nltk.translate.bleu_score import sentence_bleu as bleu
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
parser.add_argument('--all', dest='all', action='store_true', help='Check all translations')
args = parser.parse_args()
return args
def find_best_translation(input_line, results):
best_bleu_score = 0.0
best_index = 0
for index, result in enumerate(results):
if len(result.split()) == 0:
continue
bleu_score = bleu([input_line.split()], result.split(), weights=(1.0,))
if bleu_score > best_bleu_score:
best_bleu_score = bleu_score
best_index = index
return best_index
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip(), k = 20)
if args.all:
index = find_best_translation(input_line, results)
else:
index = 0
fw_out.write(results[0][index] + '\n')
fw_out.close()
if __name__ == '__main__':
main()
|
<commit_before>import argparse, logging, codecs
from translation_model import TranslationModel
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
args = parser.parse_args()
return args
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip())
fw_out.write(results[0][1] + '\n')
fw_out.close()
if __name__ == '__main__':
main()<commit_msg>Add option to check among 20 translations<commit_after>import argparse, logging, codecs
from translation_model import TranslationModel
from nltk.translate.bleu_score import sentence_bleu as bleu
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument('model', help='trained model')
parser.add_argument('input', help='input sentences')
parser.add_argument('out', help='translated sentences')
parser.add_argument('--all', dest='all', action='store_true', help='Check all translations')
args = parser.parse_args()
return args
def find_best_translation(input_line, results):
best_bleu_score = 0.0
best_index = 0
for index, result in enumerate(results):
if len(result.split()) == 0:
continue
bleu_score = bleu([input_line.split()], result.split(), weights=(1.0,))
if bleu_score > best_bleu_score:
best_bleu_score = bleu_score
best_index = index
return best_index
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
args = setup_args()
logging.info(args)
tm = TranslationModel(args.model)
fw_out = codecs.open(args.out, 'w', 'utf-8')
for input_line in codecs.open(args.input, 'r', 'utf-8'):
results = tm.translate(input_line.strip(), k = 20)
if args.all:
index = find_best_translation(input_line, results)
else:
index = 0
fw_out.write(results[0][index] + '\n')
fw_out.close()
if __name__ == '__main__':
main()
|
4fd0225ad318d05379d95c2184c4a78ed7fadcd8
|
recipe-server/normandy/recipes/migrations/0045_update_action_hashes.py
|
recipe-server/normandy/recipes/migrations/0045_update_action_hashes.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from base64 import b64encode, urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from base64 import urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
|
Fix lint checks in migration recipes/0045.
|
Fix lint checks in migration recipes/0045.
|
Python
|
mpl-2.0
|
mozilla/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from base64 import b64encode, urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
Fix lint checks in migration recipes/0045.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from base64 import urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from base64 import b64encode, urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
<commit_msg>Fix lint checks in migration recipes/0045.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from base64 import urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from base64 import b64encode, urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
Fix lint checks in migration recipes/0045.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from base64 import urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from base64 import b64encode, urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
<commit_msg>Fix lint checks in migration recipes/0045.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import hashlib
from base64 import urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
|
2febb2e53a7f0b1a0a6952e4ea31c077f45b89f8
|
hooks/post_gen_project.py
|
hooks/post_gen_project.py
|
import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
hooks_dir = os.path.join(project_dir, '.git/hooks')
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(hooks_dir, 'prepare-commit-msg')
process = subprocess.call(['git', 'init', project_dir])
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
|
import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(project_dir, '.git/hooks/prepare-commit-msg')
process = subprocess.call(['git', 'init', project_dir])
os.symlink(src, dst)
|
Remove creation of hooks directory
|
Remove creation of hooks directory
|
Python
|
mit
|
Empiria/matador-cookiecutter
|
import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
hooks_dir = os.path.join(project_dir, '.git/hooks')
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(hooks_dir, 'prepare-commit-msg')
process = subprocess.call(['git', 'init', project_dir])
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
Remove creation of hooks directory
|
import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(project_dir, '.git/hooks/prepare-commit-msg')
process = subprocess.call(['git', 'init', project_dir])
os.symlink(src, dst)
|
<commit_before>import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
hooks_dir = os.path.join(project_dir, '.git/hooks')
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(hooks_dir, 'prepare-commit-msg')
process = subprocess.call(['git', 'init', project_dir])
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
<commit_msg>Remove creation of hooks directory<commit_after>
|
import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(project_dir, '.git/hooks/prepare-commit-msg')
process = subprocess.call(['git', 'init', project_dir])
os.symlink(src, dst)
|
import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
hooks_dir = os.path.join(project_dir, '.git/hooks')
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(hooks_dir, 'prepare-commit-msg')
process = subprocess.call(['git', 'init', project_dir])
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
Remove creation of hooks directoryimport os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(project_dir, '.git/hooks/prepare-commit-msg')
process = subprocess.call(['git', 'init', project_dir])
os.symlink(src, dst)
|
<commit_before>import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
hooks_dir = os.path.join(project_dir, '.git/hooks')
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(hooks_dir, 'prepare-commit-msg')
process = subprocess.call(['git', 'init', project_dir])
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
<commit_msg>Remove creation of hooks directory<commit_after>import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(project_dir, '.git/hooks/prepare-commit-msg')
process = subprocess.call(['git', 'init', project_dir])
os.symlink(src, dst)
|
e075bd4b2ebbbfaf794a5f120605bb52238d5890
|
heufybot/modules/commands/commands.py
|
heufybot/modules/commands/commands.py
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class CommandsCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Commands"
def triggers(self):
return ["commands"]
def load(self):
self.help = "Commands: commands | Lists all bot commands from the modules tht are enabled for this server."
self.commandHelp = {}
def execute(self, server, source, command, params, data):
commandsList = []
for moduleName, module in self.bot.moduleHandler.loadedModules.iteritems():
if self.bot.moduleHandler.useModuleOnServer(moduleName, server) and isinstance(module, BotCommand):
commandsList += module.triggers()
msg = "Available commands: {}".format(", ".join(sorted(commandsList)))
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, msg)
commandsCommand = CommandsCommand()
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class CommandsCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Commands"
def triggers(self):
return ["commands"]
def load(self):
self.help = "Commands: commands | Lists all bot commands from the modules that are enabled for this server."
self.commandHelp = {}
def execute(self, server, source, command, params, data):
commandsList = []
for moduleName, module in self.bot.moduleHandler.loadedModules.iteritems():
if self.bot.moduleHandler.useModuleOnServer(moduleName, server) and isinstance(module, BotCommand):
commandsList += module.triggers()
msg = "Available commands: {}".format(", ".join(sorted(commandsList)))
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, msg)
commandsCommand = CommandsCommand()
|
Fix a typo in the Commands module help text
|
Fix a typo in the Commands module help text
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class CommandsCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Commands"
def triggers(self):
return ["commands"]
def load(self):
self.help = "Commands: commands | Lists all bot commands from the modules tht are enabled for this server."
self.commandHelp = {}
def execute(self, server, source, command, params, data):
commandsList = []
for moduleName, module in self.bot.moduleHandler.loadedModules.iteritems():
if self.bot.moduleHandler.useModuleOnServer(moduleName, server) and isinstance(module, BotCommand):
commandsList += module.triggers()
msg = "Available commands: {}".format(", ".join(sorted(commandsList)))
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, msg)
commandsCommand = CommandsCommand()
Fix a typo in the Commands module help text
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class CommandsCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Commands"
def triggers(self):
return ["commands"]
def load(self):
self.help = "Commands: commands | Lists all bot commands from the modules that are enabled for this server."
self.commandHelp = {}
def execute(self, server, source, command, params, data):
commandsList = []
for moduleName, module in self.bot.moduleHandler.loadedModules.iteritems():
if self.bot.moduleHandler.useModuleOnServer(moduleName, server) and isinstance(module, BotCommand):
commandsList += module.triggers()
msg = "Available commands: {}".format(", ".join(sorted(commandsList)))
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, msg)
commandsCommand = CommandsCommand()
|
<commit_before>from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class CommandsCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Commands"
def triggers(self):
return ["commands"]
def load(self):
self.help = "Commands: commands | Lists all bot commands from the modules tht are enabled for this server."
self.commandHelp = {}
def execute(self, server, source, command, params, data):
commandsList = []
for moduleName, module in self.bot.moduleHandler.loadedModules.iteritems():
if self.bot.moduleHandler.useModuleOnServer(moduleName, server) and isinstance(module, BotCommand):
commandsList += module.triggers()
msg = "Available commands: {}".format(", ".join(sorted(commandsList)))
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, msg)
commandsCommand = CommandsCommand()
<commit_msg>Fix a typo in the Commands module help text<commit_after>
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class CommandsCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Commands"
def triggers(self):
return ["commands"]
def load(self):
self.help = "Commands: commands | Lists all bot commands from the modules that are enabled for this server."
self.commandHelp = {}
def execute(self, server, source, command, params, data):
commandsList = []
for moduleName, module in self.bot.moduleHandler.loadedModules.iteritems():
if self.bot.moduleHandler.useModuleOnServer(moduleName, server) and isinstance(module, BotCommand):
commandsList += module.triggers()
msg = "Available commands: {}".format(", ".join(sorted(commandsList)))
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, msg)
commandsCommand = CommandsCommand()
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class CommandsCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Commands"
def triggers(self):
return ["commands"]
def load(self):
self.help = "Commands: commands | Lists all bot commands from the modules tht are enabled for this server."
self.commandHelp = {}
def execute(self, server, source, command, params, data):
commandsList = []
for moduleName, module in self.bot.moduleHandler.loadedModules.iteritems():
if self.bot.moduleHandler.useModuleOnServer(moduleName, server) and isinstance(module, BotCommand):
commandsList += module.triggers()
msg = "Available commands: {}".format(", ".join(sorted(commandsList)))
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, msg)
commandsCommand = CommandsCommand()
Fix a typo in the Commands module help textfrom twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class CommandsCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Commands"
def triggers(self):
return ["commands"]
def load(self):
self.help = "Commands: commands | Lists all bot commands from the modules that are enabled for this server."
self.commandHelp = {}
def execute(self, server, source, command, params, data):
commandsList = []
for moduleName, module in self.bot.moduleHandler.loadedModules.iteritems():
if self.bot.moduleHandler.useModuleOnServer(moduleName, server) and isinstance(module, BotCommand):
commandsList += module.triggers()
msg = "Available commands: {}".format(", ".join(sorted(commandsList)))
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, msg)
commandsCommand = CommandsCommand()
|
<commit_before>from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class CommandsCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Commands"
def triggers(self):
return ["commands"]
def load(self):
self.help = "Commands: commands | Lists all bot commands from the modules tht are enabled for this server."
self.commandHelp = {}
def execute(self, server, source, command, params, data):
commandsList = []
for moduleName, module in self.bot.moduleHandler.loadedModules.iteritems():
if self.bot.moduleHandler.useModuleOnServer(moduleName, server) and isinstance(module, BotCommand):
commandsList += module.triggers()
msg = "Available commands: {}".format(", ".join(sorted(commandsList)))
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, msg)
commandsCommand = CommandsCommand()
<commit_msg>Fix a typo in the Commands module help text<commit_after>from twisted.plugin import IPlugin
from heufybot.moduleinterface import IBotModule
from heufybot.modules.commandinterface import BotCommand
from zope.interface import implements
class CommandsCommand(BotCommand):
implements(IPlugin, IBotModule)
name = "Commands"
def triggers(self):
return ["commands"]
def load(self):
self.help = "Commands: commands | Lists all bot commands from the modules that are enabled for this server."
self.commandHelp = {}
def execute(self, server, source, command, params, data):
commandsList = []
for moduleName, module in self.bot.moduleHandler.loadedModules.iteritems():
if self.bot.moduleHandler.useModuleOnServer(moduleName, server) and isinstance(module, BotCommand):
commandsList += module.triggers()
msg = "Available commands: {}".format(", ".join(sorted(commandsList)))
self.bot.servers[server].outputHandler.cmdPRIVMSG(source, msg)
commandsCommand = CommandsCommand()
|
df2d43e8ae84af605b845b3cbbb9c318f300e4e9
|
server.py
|
server.py
|
#!/usr/bin/python3
import subprocess
import sys
from flask import Flask, request
app = Flask(__name__)
SPASS=sys.argv[1] if len(sys.argv) > 1 else '/usr/local/bin/spass'
def call_spass(name, master):
p = subprocess.Popen([
SPASS,
'get',
name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
try:
out, err = p.communicate(bytes('%s\n' % master, 'utf8'), timeout=5)
except subprocess.TimeoutExpired:
p.kill()
return 'Error: spass process timedout'
return out if p.returncode == 0 else err
@app.route('/getpwd', methods=['POST'])
def getpwd():
return call_spass(request.form['name'], request.form['master'])
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == '__main__':
app.run()
|
#!/usr/bin/python3
import logging
import subprocess
import sys
from flask import Flask, request
app = Flask(__name__)
SPASS=sys.argv[1] if len(sys.argv) > 1 else '/usr/local/bin/spass'
@app.before_first_request
def setup():
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
def call_spass(name, master):
p = subprocess.Popen([
SPASS,
'get',
name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
try:
out, err = p.communicate(bytes('%s\n' % master, 'utf8'), timeout=5)
except subprocess.TimeoutExpired:
p.kill()
return 'Error: spass process timedout'
return (out if p.returncode == 0 else err), p.returncode
@app.route('/getpwd', methods=['POST'])
def getpwd():
val, code = call_spass(request.form['name'], request.form['master'])
app.logger.info('%s %s %d', request.remote_addr, request.form['name'], code)
return val
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == '__main__':
app.run()
|
Add logging for requested password and result
|
Add logging for requested password and result
|
Python
|
mit
|
iburinoc/spass-server,iburinoc/spass-server
|
#!/usr/bin/python3
import subprocess
import sys
from flask import Flask, request
app = Flask(__name__)
SPASS=sys.argv[1] if len(sys.argv) > 1 else '/usr/local/bin/spass'
def call_spass(name, master):
p = subprocess.Popen([
SPASS,
'get',
name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
try:
out, err = p.communicate(bytes('%s\n' % master, 'utf8'), timeout=5)
except subprocess.TimeoutExpired:
p.kill()
return 'Error: spass process timedout'
return out if p.returncode == 0 else err
@app.route('/getpwd', methods=['POST'])
def getpwd():
return call_spass(request.form['name'], request.form['master'])
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == '__main__':
app.run()
Add logging for requested password and result
|
#!/usr/bin/python3
import logging
import subprocess
import sys
from flask import Flask, request
app = Flask(__name__)
SPASS=sys.argv[1] if len(sys.argv) > 1 else '/usr/local/bin/spass'
@app.before_first_request
def setup():
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
def call_spass(name, master):
p = subprocess.Popen([
SPASS,
'get',
name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
try:
out, err = p.communicate(bytes('%s\n' % master, 'utf8'), timeout=5)
except subprocess.TimeoutExpired:
p.kill()
return 'Error: spass process timedout'
return (out if p.returncode == 0 else err), p.returncode
@app.route('/getpwd', methods=['POST'])
def getpwd():
val, code = call_spass(request.form['name'], request.form['master'])
app.logger.info('%s %s %d', request.remote_addr, request.form['name'], code)
return val
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == '__main__':
app.run()
|
<commit_before>#!/usr/bin/python3
import subprocess
import sys
from flask import Flask, request
app = Flask(__name__)
SPASS=sys.argv[1] if len(sys.argv) > 1 else '/usr/local/bin/spass'
def call_spass(name, master):
p = subprocess.Popen([
SPASS,
'get',
name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
try:
out, err = p.communicate(bytes('%s\n' % master, 'utf8'), timeout=5)
except subprocess.TimeoutExpired:
p.kill()
return 'Error: spass process timedout'
return out if p.returncode == 0 else err
@app.route('/getpwd', methods=['POST'])
def getpwd():
return call_spass(request.form['name'], request.form['master'])
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == '__main__':
app.run()
<commit_msg>Add logging for requested password and result<commit_after>
|
#!/usr/bin/python3
import logging
import subprocess
import sys
from flask import Flask, request
app = Flask(__name__)
SPASS=sys.argv[1] if len(sys.argv) > 1 else '/usr/local/bin/spass'
@app.before_first_request
def setup():
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
def call_spass(name, master):
p = subprocess.Popen([
SPASS,
'get',
name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
try:
out, err = p.communicate(bytes('%s\n' % master, 'utf8'), timeout=5)
except subprocess.TimeoutExpired:
p.kill()
return 'Error: spass process timedout'
return (out if p.returncode == 0 else err), p.returncode
@app.route('/getpwd', methods=['POST'])
def getpwd():
val, code = call_spass(request.form['name'], request.form['master'])
app.logger.info('%s %s %d', request.remote_addr, request.form['name'], code)
return val
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == '__main__':
app.run()
|
#!/usr/bin/python3
import subprocess
import sys
from flask import Flask, request
app = Flask(__name__)
SPASS=sys.argv[1] if len(sys.argv) > 1 else '/usr/local/bin/spass'
def call_spass(name, master):
p = subprocess.Popen([
SPASS,
'get',
name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
try:
out, err = p.communicate(bytes('%s\n' % master, 'utf8'), timeout=5)
except subprocess.TimeoutExpired:
p.kill()
return 'Error: spass process timedout'
return out if p.returncode == 0 else err
@app.route('/getpwd', methods=['POST'])
def getpwd():
return call_spass(request.form['name'], request.form['master'])
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == '__main__':
app.run()
Add logging for requested password and result#!/usr/bin/python3
import logging
import subprocess
import sys
from flask import Flask, request
app = Flask(__name__)
SPASS=sys.argv[1] if len(sys.argv) > 1 else '/usr/local/bin/spass'
@app.before_first_request
def setup():
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
def call_spass(name, master):
p = subprocess.Popen([
SPASS,
'get',
name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
try:
out, err = p.communicate(bytes('%s\n' % master, 'utf8'), timeout=5)
except subprocess.TimeoutExpired:
p.kill()
return 'Error: spass process timedout'
return (out if p.returncode == 0 else err), p.returncode
@app.route('/getpwd', methods=['POST'])
def getpwd():
val, code = call_spass(request.form['name'], request.form['master'])
app.logger.info('%s %s %d', request.remote_addr, request.form['name'], code)
return val
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == '__main__':
app.run()
|
<commit_before>#!/usr/bin/python3
import subprocess
import sys
from flask import Flask, request
app = Flask(__name__)
SPASS=sys.argv[1] if len(sys.argv) > 1 else '/usr/local/bin/spass'
def call_spass(name, master):
p = subprocess.Popen([
SPASS,
'get',
name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
try:
out, err = p.communicate(bytes('%s\n' % master, 'utf8'), timeout=5)
except subprocess.TimeoutExpired:
p.kill()
return 'Error: spass process timedout'
return out if p.returncode == 0 else err
@app.route('/getpwd', methods=['POST'])
def getpwd():
return call_spass(request.form['name'], request.form['master'])
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == '__main__':
app.run()
<commit_msg>Add logging for requested password and result<commit_after>#!/usr/bin/python3
import logging
import subprocess
import sys
from flask import Flask, request
app = Flask(__name__)
SPASS=sys.argv[1] if len(sys.argv) > 1 else '/usr/local/bin/spass'
@app.before_first_request
def setup():
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
def call_spass(name, master):
p = subprocess.Popen([
SPASS,
'get',
name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
try:
out, err = p.communicate(bytes('%s\n' % master, 'utf8'), timeout=5)
except subprocess.TimeoutExpired:
p.kill()
return 'Error: spass process timedout'
return (out if p.returncode == 0 else err), p.returncode
@app.route('/getpwd', methods=['POST'])
def getpwd():
val, code = call_spass(request.form['name'], request.form['master'])
app.logger.info('%s %s %d', request.remote_addr, request.form['name'], code)
return val
@app.route('/')
def index():
return app.send_static_file('index.html')
if __name__ == '__main__':
app.run()
|
467b9c6b32ea48aac1786148be53f3da59182592
|
lyli.py
|
lyli.py
|
#!flask/bin/python
from app import app
app.run(port=3004, debug=False)
|
#!flask/bin/python
import logging
from os import fork
import werkzeug.serving
from app import app
pid = fork()
if pid > 0:
print('PID: %d' % pid)
exit(0)
elif pid < 0:
print('Could not fork: %d' % pid)
exit(1)
# we are behind a proxy. log the ip of the end-user, not the proxy.
# this will also work without the proxy
werkzeug.serving.WSGIRequestHandler.address_string = lambda self: self.headers.get('x-real-ip', self.client_address[0])
# log to a file (access.log), not stderr
logging.basicConfig(filename='access.log', level=logging.DEBUG, format='%(message)s')
app.run(port=3004, debug=False, use_reloader=False)
|
Add fork and log real ip
|
Add fork and log real ip
|
Python
|
artistic-2.0
|
felixbade/lyli,felixbade/lyli,felixbade/lyli
|
#!flask/bin/python
from app import app
app.run(port=3004, debug=False)
Add fork and log real ip
|
#!flask/bin/python
import logging
from os import fork
import werkzeug.serving
from app import app
pid = fork()
if pid > 0:
print('PID: %d' % pid)
exit(0)
elif pid < 0:
print('Could not fork: %d' % pid)
exit(1)
# we are behind a proxy. log the ip of the end-user, not the proxy.
# this will also work without the proxy
werkzeug.serving.WSGIRequestHandler.address_string = lambda self: self.headers.get('x-real-ip', self.client_address[0])
# log to a file (access.log), not stderr
logging.basicConfig(filename='access.log', level=logging.DEBUG, format='%(message)s')
app.run(port=3004, debug=False, use_reloader=False)
|
<commit_before>#!flask/bin/python
from app import app
app.run(port=3004, debug=False)
<commit_msg>Add fork and log real ip<commit_after>
|
#!flask/bin/python
import logging
from os import fork
import werkzeug.serving
from app import app
pid = fork()
if pid > 0:
print('PID: %d' % pid)
exit(0)
elif pid < 0:
print('Could not fork: %d' % pid)
exit(1)
# we are behind a proxy. log the ip of the end-user, not the proxy.
# this will also work without the proxy
werkzeug.serving.WSGIRequestHandler.address_string = lambda self: self.headers.get('x-real-ip', self.client_address[0])
# log to a file (access.log), not stderr
logging.basicConfig(filename='access.log', level=logging.DEBUG, format='%(message)s')
app.run(port=3004, debug=False, use_reloader=False)
|
#!flask/bin/python
from app import app
app.run(port=3004, debug=False)
Add fork and log real ip#!flask/bin/python
import logging
from os import fork
import werkzeug.serving
from app import app
pid = fork()
if pid > 0:
print('PID: %d' % pid)
exit(0)
elif pid < 0:
print('Could not fork: %d' % pid)
exit(1)
# we are behind a proxy. log the ip of the end-user, not the proxy.
# this will also work without the proxy
werkzeug.serving.WSGIRequestHandler.address_string = lambda self: self.headers.get('x-real-ip', self.client_address[0])
# log to a file (access.log), not stderr
logging.basicConfig(filename='access.log', level=logging.DEBUG, format='%(message)s')
app.run(port=3004, debug=False, use_reloader=False)
|
<commit_before>#!flask/bin/python
from app import app
app.run(port=3004, debug=False)
<commit_msg>Add fork and log real ip<commit_after>#!flask/bin/python
import logging
from os import fork
import werkzeug.serving
from app import app
pid = fork()
if pid > 0:
print('PID: %d' % pid)
exit(0)
elif pid < 0:
print('Could not fork: %d' % pid)
exit(1)
# we are behind a proxy. log the ip of the end-user, not the proxy.
# this will also work without the proxy
werkzeug.serving.WSGIRequestHandler.address_string = lambda self: self.headers.get('x-real-ip', self.client_address[0])
# log to a file (access.log), not stderr
logging.basicConfig(filename='access.log', level=logging.DEBUG, format='%(message)s')
app.run(port=3004, debug=False, use_reloader=False)
|
d59aef4c883b9343ac1aa396ee39a0308b1207db
|
src/config.py
|
src/config.py
|
from os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
# the chat_id of the maintainer, used to alert about possible errors.
# leave empty to disable this feature.
# note that leaving empty also disables the /sendto command
MAINTAINER_ID = environ['MAINTAINER_ID']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
AUTO_MSG_TIME = (22, 5)
|
from os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
# the chat_id of the maintainer, used to alert about possible errors.
# leave empty to disable this feature.
# note that leaving empty also disables the /sendto command
MAINTAINER_ID = environ['MAINTAINER_ID']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
AUTO_MSG_TIME = (10, 40)
|
Set auto message time to correct time
|
Set auto message time to correct time
|
Python
|
mit
|
caiopo/quibe-bot
|
from os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
# the chat_id of the maintainer, used to alert about possible errors.
# leave empty to disable this feature.
# note that leaving empty also disables the /sendto command
MAINTAINER_ID = environ['MAINTAINER_ID']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
AUTO_MSG_TIME = (22, 5)
Set auto message time to correct time
|
from os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
# the chat_id of the maintainer, used to alert about possible errors.
# leave empty to disable this feature.
# note that leaving empty also disables the /sendto command
MAINTAINER_ID = environ['MAINTAINER_ID']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
AUTO_MSG_TIME = (10, 40)
|
<commit_before>from os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
# the chat_id of the maintainer, used to alert about possible errors.
# leave empty to disable this feature.
# note that leaving empty also disables the /sendto command
MAINTAINER_ID = environ['MAINTAINER_ID']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
AUTO_MSG_TIME = (22, 5)
<commit_msg>Set auto message time to correct time<commit_after>
|
from os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
# the chat_id of the maintainer, used to alert about possible errors.
# leave empty to disable this feature.
# note that leaving empty also disables the /sendto command
MAINTAINER_ID = environ['MAINTAINER_ID']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
AUTO_MSG_TIME = (10, 40)
|
from os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
# the chat_id of the maintainer, used to alert about possible errors.
# leave empty to disable this feature.
# note that leaving empty also disables the /sendto command
MAINTAINER_ID = environ['MAINTAINER_ID']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
AUTO_MSG_TIME = (22, 5)
Set auto message time to correct timefrom os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
# the chat_id of the maintainer, used to alert about possible errors.
# leave empty to disable this feature.
# note that leaving empty also disables the /sendto command
MAINTAINER_ID = environ['MAINTAINER_ID']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
AUTO_MSG_TIME = (10, 40)
|
<commit_before>from os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
# the chat_id of the maintainer, used to alert about possible errors.
# leave empty to disable this feature.
# note that leaving empty also disables the /sendto command
MAINTAINER_ID = environ['MAINTAINER_ID']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
AUTO_MSG_TIME = (22, 5)
<commit_msg>Set auto message time to correct time<commit_after>from os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
# the chat_id of the maintainer, used to alert about possible errors.
# leave empty to disable this feature.
# note that leaving empty also disables the /sendto command
MAINTAINER_ID = environ['MAINTAINER_ID']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
AUTO_MSG_TIME = (10, 40)
|
75b82feac8ebc12450a44c5927579e30c604f973
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
print propertyList
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
print propertySet
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
print propertyList
|
Create documentation of DataSource Settings
|
8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
print propertyList8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
print propertySet
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
print propertyList
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
print propertyList<commit_msg>8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
print propertySet
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
print propertyList
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
print propertyList8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
print propertySet
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
print propertyList
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
print propertyList<commit_msg>8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
print propertySet
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
print propertyList
|
b47bbb9a995fd7be2e7a49513094ad9eb065aa46
|
main.py
|
main.py
|
import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
return render_template('result.html', redditor=username, cakeday=cakeday)
if __name__ == '__main__':
app.run(debug=True)
|
import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
error_message = 'Redditor does not exist or Shadowbanned'
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
if cakeday:
return render_template('result.html', redditor=username,
cakeday=cakeday)
return render_template('index.html', error_message=error_message)
if __name__ == '__main__':
app.run(debug=True)
|
Update app to add an error message if user does not exist or shadowbanned
|
Update app to add an error message if user does not exist or shadowbanned
|
Python
|
mit
|
avinassh/kekday,avinassh/kekday
|
import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
return render_template('result.html', redditor=username, cakeday=cakeday)
if __name__ == '__main__':
app.run(debug=True)
Update app to add an error message if user does not exist or shadowbanned
|
import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
error_message = 'Redditor does not exist or Shadowbanned'
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
if cakeday:
return render_template('result.html', redditor=username,
cakeday=cakeday)
return render_template('index.html', error_message=error_message)
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before>import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
return render_template('result.html', redditor=username, cakeday=cakeday)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Update app to add an error message if user does not exist or shadowbanned<commit_after>
|
import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
error_message = 'Redditor does not exist or Shadowbanned'
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
if cakeday:
return render_template('result.html', redditor=username,
cakeday=cakeday)
return render_template('index.html', error_message=error_message)
if __name__ == '__main__':
app.run(debug=True)
|
import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
return render_template('result.html', redditor=username, cakeday=cakeday)
if __name__ == '__main__':
app.run(debug=True)
Update app to add an error message if user does not exist or shadowbannedimport praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
error_message = 'Redditor does not exist or Shadowbanned'
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
if cakeday:
return render_template('result.html', redditor=username,
cakeday=cakeday)
return render_template('index.html', error_message=error_message)
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before>import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
return render_template('result.html', redditor=username, cakeday=cakeday)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Update app to add an error message if user does not exist or shadowbanned<commit_after>import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
error_message = 'Redditor does not exist or Shadowbanned'
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
if cakeday:
return render_template('result.html', redditor=username,
cakeday=cakeday)
return render_template('index.html', error_message=error_message)
if __name__ == '__main__':
app.run(debug=True)
|
69f19c4678b548f452f92ec29db0a3800c3c633d
|
cgi-bin/github_hook.py
|
cgi-bin/github_hook.py
|
#!/usr/bin/python
from hashlib import sha1
import hmac
import json
import os
import subprocess
import sys
def verify_signature(payload_body):
x_hub_signature = os.getenv("HTTP_X_HUB_SIGNATURE")
if not x_hub_signature:
return False
sha_name, signature = x_hub_signature.split('=')
if sha_name != 'sha1':
return False
# Never hardcode the token into real product, but now we are finishing
# homework.
SECRET_TOKEN = 'nQLr1TFpNvheiPPw9FnsUYD8vSeEV79L'
mac = hmac.new(SECRET_TOKEN, msg=payload_body, digestmod=sha1)
return hmac.compare_digest(mac.hexdigest(), signature)
print 'Content-Type: application/json\n\n'
result = {}
event = os.getenv('HTTP_X_GITHUB_EVENT')
if event != 'push':
result['success'] = False
else:
payload_body = sys.stdin.read()
result['success'] = verify_signature(payload_body)
if result['success']:
process = subprocess.Popen('git pull && git submodule update',
cwd=PATH,
shell=True)
print json.dumps(result)
|
#!/usr/bin/python
from hashlib import sha1
import hmac
import json
import os
import subprocess
import sys
import cgitb; cgitb.enable()
def verify_signature(payload_body):
x_hub_signature = os.getenv("HTTP_X_HUB_SIGNATURE")
if not x_hub_signature:
return False
sha_name, signature = x_hub_signature.split('=')
if sha_name != 'sha1':
return False
# Never hardcode the token into real product, but now we are finishing
# homework.
SECRET_TOKEN = 'nQLr1TFpNvheiPPw9FnsUYD8vSeEV79L'
mac = hmac.new(SECRET_TOKEN, msg=payload_body, digestmod=sha1)
return hmac.compare_digest(mac.hexdigest(), signature)
print 'Content-Type: application/json\n\n'
result = {}
event = os.getenv('HTTP_X_GITHUB_EVENT')
if event != 'push':
result['success'] = False
else:
payload_body = sys.stdin.read()
result['success'] = verify_signature(payload_body)
if result['success']:
path = os.path.dirname(os.path.realpath(__file__))
process = subprocess.Popen('git pull && git submodule update',
cwd=path,
shell=True)
print json.dumps(result)
|
Fix bug of error cwd when pulling.
|
Fix bug of error cwd when pulling.
|
Python
|
mit
|
zhchbin/Yagra,zhchbin/Yagra,zhchbin/Yagra
|
#!/usr/bin/python
from hashlib import sha1
import hmac
import json
import os
import subprocess
import sys
def verify_signature(payload_body):
x_hub_signature = os.getenv("HTTP_X_HUB_SIGNATURE")
if not x_hub_signature:
return False
sha_name, signature = x_hub_signature.split('=')
if sha_name != 'sha1':
return False
# Never hardcode the token into real product, but now we are finishing
# homework.
SECRET_TOKEN = 'nQLr1TFpNvheiPPw9FnsUYD8vSeEV79L'
mac = hmac.new(SECRET_TOKEN, msg=payload_body, digestmod=sha1)
return hmac.compare_digest(mac.hexdigest(), signature)
print 'Content-Type: application/json\n\n'
result = {}
event = os.getenv('HTTP_X_GITHUB_EVENT')
if event != 'push':
result['success'] = False
else:
payload_body = sys.stdin.read()
result['success'] = verify_signature(payload_body)
if result['success']:
process = subprocess.Popen('git pull && git submodule update',
cwd=PATH,
shell=True)
print json.dumps(result)
Fix bug of error cwd when pulling.
|
#!/usr/bin/python
from hashlib import sha1
import hmac
import json
import os
import subprocess
import sys
import cgitb; cgitb.enable()
def verify_signature(payload_body):
x_hub_signature = os.getenv("HTTP_X_HUB_SIGNATURE")
if not x_hub_signature:
return False
sha_name, signature = x_hub_signature.split('=')
if sha_name != 'sha1':
return False
# Never hardcode the token into real product, but now we are finishing
# homework.
SECRET_TOKEN = 'nQLr1TFpNvheiPPw9FnsUYD8vSeEV79L'
mac = hmac.new(SECRET_TOKEN, msg=payload_body, digestmod=sha1)
return hmac.compare_digest(mac.hexdigest(), signature)
print 'Content-Type: application/json\n\n'
result = {}
event = os.getenv('HTTP_X_GITHUB_EVENT')
if event != 'push':
result['success'] = False
else:
payload_body = sys.stdin.read()
result['success'] = verify_signature(payload_body)
if result['success']:
path = os.path.dirname(os.path.realpath(__file__))
process = subprocess.Popen('git pull && git submodule update',
cwd=path,
shell=True)
print json.dumps(result)
|
<commit_before>#!/usr/bin/python
from hashlib import sha1
import hmac
import json
import os
import subprocess
import sys
def verify_signature(payload_body):
x_hub_signature = os.getenv("HTTP_X_HUB_SIGNATURE")
if not x_hub_signature:
return False
sha_name, signature = x_hub_signature.split('=')
if sha_name != 'sha1':
return False
# Never hardcode the token into real product, but now we are finishing
# homework.
SECRET_TOKEN = 'nQLr1TFpNvheiPPw9FnsUYD8vSeEV79L'
mac = hmac.new(SECRET_TOKEN, msg=payload_body, digestmod=sha1)
return hmac.compare_digest(mac.hexdigest(), signature)
print 'Content-Type: application/json\n\n'
result = {}
event = os.getenv('HTTP_X_GITHUB_EVENT')
if event != 'push':
result['success'] = False
else:
payload_body = sys.stdin.read()
result['success'] = verify_signature(payload_body)
if result['success']:
process = subprocess.Popen('git pull && git submodule update',
cwd=PATH,
shell=True)
print json.dumps(result)
<commit_msg>Fix bug of error cwd when pulling.<commit_after>
|
#!/usr/bin/python
from hashlib import sha1
import hmac
import json
import os
import subprocess
import sys
import cgitb; cgitb.enable()
def verify_signature(payload_body):
x_hub_signature = os.getenv("HTTP_X_HUB_SIGNATURE")
if not x_hub_signature:
return False
sha_name, signature = x_hub_signature.split('=')
if sha_name != 'sha1':
return False
# Never hardcode the token into real product, but now we are finishing
# homework.
SECRET_TOKEN = 'nQLr1TFpNvheiPPw9FnsUYD8vSeEV79L'
mac = hmac.new(SECRET_TOKEN, msg=payload_body, digestmod=sha1)
return hmac.compare_digest(mac.hexdigest(), signature)
print 'Content-Type: application/json\n\n'
result = {}
event = os.getenv('HTTP_X_GITHUB_EVENT')
if event != 'push':
result['success'] = False
else:
payload_body = sys.stdin.read()
result['success'] = verify_signature(payload_body)
if result['success']:
path = os.path.dirname(os.path.realpath(__file__))
process = subprocess.Popen('git pull && git submodule update',
cwd=path,
shell=True)
print json.dumps(result)
|
#!/usr/bin/python
from hashlib import sha1
import hmac
import json
import os
import subprocess
import sys
def verify_signature(payload_body):
x_hub_signature = os.getenv("HTTP_X_HUB_SIGNATURE")
if not x_hub_signature:
return False
sha_name, signature = x_hub_signature.split('=')
if sha_name != 'sha1':
return False
# Never hardcode the token into real product, but now we are finishing
# homework.
SECRET_TOKEN = 'nQLr1TFpNvheiPPw9FnsUYD8vSeEV79L'
mac = hmac.new(SECRET_TOKEN, msg=payload_body, digestmod=sha1)
return hmac.compare_digest(mac.hexdigest(), signature)
print 'Content-Type: application/json\n\n'
result = {}
event = os.getenv('HTTP_X_GITHUB_EVENT')
if event != 'push':
result['success'] = False
else:
payload_body = sys.stdin.read()
result['success'] = verify_signature(payload_body)
if result['success']:
process = subprocess.Popen('git pull && git submodule update',
cwd=PATH,
shell=True)
print json.dumps(result)
Fix bug of error cwd when pulling.#!/usr/bin/python
from hashlib import sha1
import hmac
import json
import os
import subprocess
import sys
import cgitb; cgitb.enable()
def verify_signature(payload_body):
x_hub_signature = os.getenv("HTTP_X_HUB_SIGNATURE")
if not x_hub_signature:
return False
sha_name, signature = x_hub_signature.split('=')
if sha_name != 'sha1':
return False
# Never hardcode the token into real product, but now we are finishing
# homework.
SECRET_TOKEN = 'nQLr1TFpNvheiPPw9FnsUYD8vSeEV79L'
mac = hmac.new(SECRET_TOKEN, msg=payload_body, digestmod=sha1)
return hmac.compare_digest(mac.hexdigest(), signature)
print 'Content-Type: application/json\n\n'
result = {}
event = os.getenv('HTTP_X_GITHUB_EVENT')
if event != 'push':
result['success'] = False
else:
payload_body = sys.stdin.read()
result['success'] = verify_signature(payload_body)
if result['success']:
path = os.path.dirname(os.path.realpath(__file__))
process = subprocess.Popen('git pull && git submodule update',
cwd=path,
shell=True)
print json.dumps(result)
|
<commit_before>#!/usr/bin/python
from hashlib import sha1
import hmac
import json
import os
import subprocess
import sys
def verify_signature(payload_body):
x_hub_signature = os.getenv("HTTP_X_HUB_SIGNATURE")
if not x_hub_signature:
return False
sha_name, signature = x_hub_signature.split('=')
if sha_name != 'sha1':
return False
# Never hardcode the token into real product, but now we are finishing
# homework.
SECRET_TOKEN = 'nQLr1TFpNvheiPPw9FnsUYD8vSeEV79L'
mac = hmac.new(SECRET_TOKEN, msg=payload_body, digestmod=sha1)
return hmac.compare_digest(mac.hexdigest(), signature)
print 'Content-Type: application/json\n\n'
result = {}
event = os.getenv('HTTP_X_GITHUB_EVENT')
if event != 'push':
result['success'] = False
else:
payload_body = sys.stdin.read()
result['success'] = verify_signature(payload_body)
if result['success']:
process = subprocess.Popen('git pull && git submodule update',
cwd=PATH,
shell=True)
print json.dumps(result)
<commit_msg>Fix bug of error cwd when pulling.<commit_after>#!/usr/bin/python
from hashlib import sha1
import hmac
import json
import os
import subprocess
import sys
import cgitb; cgitb.enable()
def verify_signature(payload_body):
x_hub_signature = os.getenv("HTTP_X_HUB_SIGNATURE")
if not x_hub_signature:
return False
sha_name, signature = x_hub_signature.split('=')
if sha_name != 'sha1':
return False
# Never hardcode the token into real product, but now we are finishing
# homework.
SECRET_TOKEN = 'nQLr1TFpNvheiPPw9FnsUYD8vSeEV79L'
mac = hmac.new(SECRET_TOKEN, msg=payload_body, digestmod=sha1)
return hmac.compare_digest(mac.hexdigest(), signature)
print 'Content-Type: application/json\n\n'
result = {}
event = os.getenv('HTTP_X_GITHUB_EVENT')
if event != 'push':
result['success'] = False
else:
payload_body = sys.stdin.read()
result['success'] = verify_signature(payload_body)
if result['success']:
path = os.path.dirname(os.path.realpath(__file__))
process = subprocess.Popen('git pull && git submodule update',
cwd=path,
shell=True)
print json.dumps(result)
|
0bdd2df16823f129b39549a0e41adf1b29470d88
|
challenges/__init__.py
|
challenges/__init__.py
|
from os.path import dirname, basename, isfile
import glob
import sys
modules = glob.glob(dirname(__file__)+"/c*[0-9].py")
sys.path.append(dirname(__file__))
# Load all of the modules containing the challenge classes
modules = [basename(path)[:-3] for path in modules]
modules.sort() # Ensure that modules are in c1-c* order
modules = [__import__(mod) for mod in modules]
# Extract the challenge class from each module
challengeClasses = []
for i in range(1, len(modules)+1):
challengeClasses.append(getattr(modules[i-1], 'c' + str(i)))
|
from os.path import dirname, basename, isfile
import glob
import sys
modules = glob.glob(dirname(__file__)+"/c*[0-9].py")
sys.path.append(dirname(__file__))
# Load all of the modules containing the challenge classes
modules = [basename(path)[:-3] for path in modules]
modules.sort() # Ensure that modules are in c1-c* order
modules = [__import__(mod) for mod in modules]
# Extract the challenge class from each module
challengeClasses = []
for i in range(1, len(modules)+1):
try:
challengeClasses.append(getattr(modules[i-1], 'c' + str(i)))
except:
continue
|
Fix bug in loading of c* modules
|
Fix bug in loading of c* modules
|
Python
|
mit
|
GunshipPenguin/billionaire_challenge,GunshipPenguin/billionaire_challenge
|
from os.path import dirname, basename, isfile
import glob
import sys
modules = glob.glob(dirname(__file__)+"/c*[0-9].py")
sys.path.append(dirname(__file__))
# Load all of the modules containing the challenge classes
modules = [basename(path)[:-3] for path in modules]
modules.sort() # Ensure that modules are in c1-c* order
modules = [__import__(mod) for mod in modules]
# Extract the challenge class from each module
challengeClasses = []
for i in range(1, len(modules)+1):
challengeClasses.append(getattr(modules[i-1], 'c' + str(i)))
Fix bug in loading of c* modules
|
from os.path import dirname, basename, isfile
import glob
import sys
modules = glob.glob(dirname(__file__)+"/c*[0-9].py")
sys.path.append(dirname(__file__))
# Load all of the modules containing the challenge classes
modules = [basename(path)[:-3] for path in modules]
modules.sort() # Ensure that modules are in c1-c* order
modules = [__import__(mod) for mod in modules]
# Extract the challenge class from each module
challengeClasses = []
for i in range(1, len(modules)+1):
try:
challengeClasses.append(getattr(modules[i-1], 'c' + str(i)))
except:
continue
|
<commit_before>from os.path import dirname, basename, isfile
import glob
import sys
modules = glob.glob(dirname(__file__)+"/c*[0-9].py")
sys.path.append(dirname(__file__))
# Load all of the modules containing the challenge classes
modules = [basename(path)[:-3] for path in modules]
modules.sort() # Ensure that modules are in c1-c* order
modules = [__import__(mod) for mod in modules]
# Extract the challenge class from each module
challengeClasses = []
for i in range(1, len(modules)+1):
challengeClasses.append(getattr(modules[i-1], 'c' + str(i)))
<commit_msg>Fix bug in loading of c* modules<commit_after>
|
from os.path import dirname, basename, isfile
import glob
import sys
modules = glob.glob(dirname(__file__)+"/c*[0-9].py")
sys.path.append(dirname(__file__))
# Load all of the modules containing the challenge classes
modules = [basename(path)[:-3] for path in modules]
modules.sort() # Ensure that modules are in c1-c* order
modules = [__import__(mod) for mod in modules]
# Extract the challenge class from each module
challengeClasses = []
for i in range(1, len(modules)+1):
try:
challengeClasses.append(getattr(modules[i-1], 'c' + str(i)))
except:
continue
|
from os.path import dirname, basename, isfile
import glob
import sys
modules = glob.glob(dirname(__file__)+"/c*[0-9].py")
sys.path.append(dirname(__file__))
# Load all of the modules containing the challenge classes
modules = [basename(path)[:-3] for path in modules]
modules.sort() # Ensure that modules are in c1-c* order
modules = [__import__(mod) for mod in modules]
# Extract the challenge class from each module
challengeClasses = []
for i in range(1, len(modules)+1):
challengeClasses.append(getattr(modules[i-1], 'c' + str(i)))
Fix bug in loading of c* modulesfrom os.path import dirname, basename, isfile
import glob
import sys
modules = glob.glob(dirname(__file__)+"/c*[0-9].py")
sys.path.append(dirname(__file__))
# Load all of the modules containing the challenge classes
modules = [basename(path)[:-3] for path in modules]
modules.sort() # Ensure that modules are in c1-c* order
modules = [__import__(mod) for mod in modules]
# Extract the challenge class from each module
challengeClasses = []
for i in range(1, len(modules)+1):
try:
challengeClasses.append(getattr(modules[i-1], 'c' + str(i)))
except:
continue
|
<commit_before>from os.path import dirname, basename, isfile
import glob
import sys
modules = glob.glob(dirname(__file__)+"/c*[0-9].py")
sys.path.append(dirname(__file__))
# Load all of the modules containing the challenge classes
modules = [basename(path)[:-3] for path in modules]
modules.sort() # Ensure that modules are in c1-c* order
modules = [__import__(mod) for mod in modules]
# Extract the challenge class from each module
challengeClasses = []
for i in range(1, len(modules)+1):
challengeClasses.append(getattr(modules[i-1], 'c' + str(i)))
<commit_msg>Fix bug in loading of c* modules<commit_after>from os.path import dirname, basename, isfile
import glob
import sys
modules = glob.glob(dirname(__file__)+"/c*[0-9].py")
sys.path.append(dirname(__file__))
# Load all of the modules containing the challenge classes
modules = [basename(path)[:-3] for path in modules]
modules.sort() # Ensure that modules are in c1-c* order
modules = [__import__(mod) for mod in modules]
# Extract the challenge class from each module
challengeClasses = []
for i in range(1, len(modules)+1):
try:
challengeClasses.append(getattr(modules[i-1], 'c' + str(i)))
except:
continue
|
5ed364189b630d862ad9b9381e91f0e0e7268015
|
flask_mongorest/exceptions.py
|
flask_mongorest/exceptions.py
|
class MongoRestException(Exception):
def __init__(self, message):
self._message = message
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
class OperatorNotAllowed(MongoRestException):
def __init__(self, operator_name):
self.op_name = operator_name
def __unicode__(self):
return u'"'+self.op_name+'" is not a valid operator name.'
class InvalidFilter(MongoRestException):
pass
class ValidationError(MongoRestException):
pass
class UnknownFieldError(Exception):
pass
|
class MongoRestException(Exception):
pass
class OperatorNotAllowed(MongoRestException):
def __init__(self, operator_name):
self.op_name = operator_name
def __unicode__(self):
return u'"'+self.op_name+'" is not a valid operator name.'
class InvalidFilter(MongoRestException):
pass
class ValidationError(MongoRestException):
pass
class UnknownFieldError(Exception):
pass
|
Reduce MongoRestException to pass for py3 compat
|
Reduce MongoRestException to pass for py3 compat
|
Python
|
bsd-3-clause
|
elasticsales/flask-mongorest,elasticsales/flask-mongorest
|
class MongoRestException(Exception):
def __init__(self, message):
self._message = message
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
class OperatorNotAllowed(MongoRestException):
def __init__(self, operator_name):
self.op_name = operator_name
def __unicode__(self):
return u'"'+self.op_name+'" is not a valid operator name.'
class InvalidFilter(MongoRestException):
pass
class ValidationError(MongoRestException):
pass
class UnknownFieldError(Exception):
pass
Reduce MongoRestException to pass for py3 compat
|
class MongoRestException(Exception):
pass
class OperatorNotAllowed(MongoRestException):
def __init__(self, operator_name):
self.op_name = operator_name
def __unicode__(self):
return u'"'+self.op_name+'" is not a valid operator name.'
class InvalidFilter(MongoRestException):
pass
class ValidationError(MongoRestException):
pass
class UnknownFieldError(Exception):
pass
|
<commit_before>
class MongoRestException(Exception):
def __init__(self, message):
self._message = message
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
class OperatorNotAllowed(MongoRestException):
def __init__(self, operator_name):
self.op_name = operator_name
def __unicode__(self):
return u'"'+self.op_name+'" is not a valid operator name.'
class InvalidFilter(MongoRestException):
pass
class ValidationError(MongoRestException):
pass
class UnknownFieldError(Exception):
pass
<commit_msg>Reduce MongoRestException to pass for py3 compat<commit_after>
|
class MongoRestException(Exception):
pass
class OperatorNotAllowed(MongoRestException):
def __init__(self, operator_name):
self.op_name = operator_name
def __unicode__(self):
return u'"'+self.op_name+'" is not a valid operator name.'
class InvalidFilter(MongoRestException):
pass
class ValidationError(MongoRestException):
pass
class UnknownFieldError(Exception):
pass
|
class MongoRestException(Exception):
def __init__(self, message):
self._message = message
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
class OperatorNotAllowed(MongoRestException):
def __init__(self, operator_name):
self.op_name = operator_name
def __unicode__(self):
return u'"'+self.op_name+'" is not a valid operator name.'
class InvalidFilter(MongoRestException):
pass
class ValidationError(MongoRestException):
pass
class UnknownFieldError(Exception):
pass
Reduce MongoRestException to pass for py3 compat
class MongoRestException(Exception):
pass
class OperatorNotAllowed(MongoRestException):
def __init__(self, operator_name):
self.op_name = operator_name
def __unicode__(self):
return u'"'+self.op_name+'" is not a valid operator name.'
class InvalidFilter(MongoRestException):
pass
class ValidationError(MongoRestException):
pass
class UnknownFieldError(Exception):
pass
|
<commit_before>
class MongoRestException(Exception):
def __init__(self, message):
self._message = message
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
class OperatorNotAllowed(MongoRestException):
def __init__(self, operator_name):
self.op_name = operator_name
def __unicode__(self):
return u'"'+self.op_name+'" is not a valid operator name.'
class InvalidFilter(MongoRestException):
pass
class ValidationError(MongoRestException):
pass
class UnknownFieldError(Exception):
pass
<commit_msg>Reduce MongoRestException to pass for py3 compat<commit_after>
class MongoRestException(Exception):
pass
class OperatorNotAllowed(MongoRestException):
def __init__(self, operator_name):
self.op_name = operator_name
def __unicode__(self):
return u'"'+self.op_name+'" is not a valid operator name.'
class InvalidFilter(MongoRestException):
pass
class ValidationError(MongoRestException):
pass
class UnknownFieldError(Exception):
pass
|
482721b13f40e4c763c9ff861897c18c3ca9179a
|
fluentcms_googlemaps/views.py
|
fluentcms_googlemaps/views.py
|
from __future__ import unicode_literals
import json
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.views.generic.detail import BaseDetailView
from .models import Marker
class MarkerDetailView(BaseDetailView):
"""
Simple view for fetching marker details.
"""
# TODO: support different object types. Perhaps through django-polymorphic?
model = Marker
pk_url_kwarg = 'id'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
"""
if queryset is None:
queryset = self.get_queryset()
# Take a GET parameter instead of URLConf variable.
try:
pk = long(self.request.GET[self.pk_url_kwarg])
except (KeyError, ValueError):
raise Http404("Invalid Parameters")
queryset = queryset.filter(pk=pk)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist as e:
raise Http404(e)
return obj
def render_to_response(self, context):
return HttpResponse(json.dumps(self.get_json_data(context)), content_type='application/json; charset=utf-8')
def get_json_data(self, context):
"""
Generate the JSON data to send back to the client.
:rtype: dict
"""
return self.object.to_dict(detailed=True)
|
from __future__ import unicode_literals
import sys
import json
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.views.generic.detail import BaseDetailView
from .models import Marker
if sys.version_info[0] >= 3:
long = int
class MarkerDetailView(BaseDetailView):
"""
Simple view for fetching marker details.
"""
# TODO: support different object types. Perhaps through django-polymorphic?
model = Marker
pk_url_kwarg = 'id'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
"""
if queryset is None:
queryset = self.get_queryset()
# Take a GET parameter instead of URLConf variable.
try:
pk = long(self.request.GET[self.pk_url_kwarg])
except (KeyError, ValueError):
raise Http404("Invalid Parameters")
queryset = queryset.filter(pk=pk)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist as e:
raise Http404(e)
return obj
def render_to_response(self, context):
return HttpResponse(json.dumps(self.get_json_data(context)), content_type='application/json; charset=utf-8')
def get_json_data(self, context):
"""
Generate the JSON data to send back to the client.
:rtype: dict
"""
return self.object.to_dict(detailed=True)
|
Fix Python 3 issue with long()
|
Fix Python 3 issue with long()
|
Python
|
apache-2.0
|
edoburu/fluentcms-googlemaps,edoburu/fluentcms-googlemaps,edoburu/fluentcms-googlemaps
|
from __future__ import unicode_literals
import json
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.views.generic.detail import BaseDetailView
from .models import Marker
class MarkerDetailView(BaseDetailView):
"""
Simple view for fetching marker details.
"""
# TODO: support different object types. Perhaps through django-polymorphic?
model = Marker
pk_url_kwarg = 'id'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
"""
if queryset is None:
queryset = self.get_queryset()
# Take a GET parameter instead of URLConf variable.
try:
pk = long(self.request.GET[self.pk_url_kwarg])
except (KeyError, ValueError):
raise Http404("Invalid Parameters")
queryset = queryset.filter(pk=pk)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist as e:
raise Http404(e)
return obj
def render_to_response(self, context):
return HttpResponse(json.dumps(self.get_json_data(context)), content_type='application/json; charset=utf-8')
def get_json_data(self, context):
"""
Generate the JSON data to send back to the client.
:rtype: dict
"""
return self.object.to_dict(detailed=True)
Fix Python 3 issue with long()
|
from __future__ import unicode_literals
import sys
import json
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.views.generic.detail import BaseDetailView
from .models import Marker
if sys.version_info[0] >= 3:
long = int
class MarkerDetailView(BaseDetailView):
"""
Simple view for fetching marker details.
"""
# TODO: support different object types. Perhaps through django-polymorphic?
model = Marker
pk_url_kwarg = 'id'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
"""
if queryset is None:
queryset = self.get_queryset()
# Take a GET parameter instead of URLConf variable.
try:
pk = long(self.request.GET[self.pk_url_kwarg])
except (KeyError, ValueError):
raise Http404("Invalid Parameters")
queryset = queryset.filter(pk=pk)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist as e:
raise Http404(e)
return obj
def render_to_response(self, context):
return HttpResponse(json.dumps(self.get_json_data(context)), content_type='application/json; charset=utf-8')
def get_json_data(self, context):
"""
Generate the JSON data to send back to the client.
:rtype: dict
"""
return self.object.to_dict(detailed=True)
|
<commit_before>from __future__ import unicode_literals
import json
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.views.generic.detail import BaseDetailView
from .models import Marker
class MarkerDetailView(BaseDetailView):
"""
Simple view for fetching marker details.
"""
# TODO: support different object types. Perhaps through django-polymorphic?
model = Marker
pk_url_kwarg = 'id'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
"""
if queryset is None:
queryset = self.get_queryset()
# Take a GET parameter instead of URLConf variable.
try:
pk = long(self.request.GET[self.pk_url_kwarg])
except (KeyError, ValueError):
raise Http404("Invalid Parameters")
queryset = queryset.filter(pk=pk)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist as e:
raise Http404(e)
return obj
def render_to_response(self, context):
return HttpResponse(json.dumps(self.get_json_data(context)), content_type='application/json; charset=utf-8')
def get_json_data(self, context):
"""
Generate the JSON data to send back to the client.
:rtype: dict
"""
return self.object.to_dict(detailed=True)
<commit_msg>Fix Python 3 issue with long()<commit_after>
|
from __future__ import unicode_literals
import sys
import json
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.views.generic.detail import BaseDetailView
from .models import Marker
if sys.version_info[0] >= 3:
long = int
class MarkerDetailView(BaseDetailView):
"""
Simple view for fetching marker details.
"""
# TODO: support different object types. Perhaps through django-polymorphic?
model = Marker
pk_url_kwarg = 'id'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
"""
if queryset is None:
queryset = self.get_queryset()
# Take a GET parameter instead of URLConf variable.
try:
pk = long(self.request.GET[self.pk_url_kwarg])
except (KeyError, ValueError):
raise Http404("Invalid Parameters")
queryset = queryset.filter(pk=pk)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist as e:
raise Http404(e)
return obj
def render_to_response(self, context):
return HttpResponse(json.dumps(self.get_json_data(context)), content_type='application/json; charset=utf-8')
def get_json_data(self, context):
"""
Generate the JSON data to send back to the client.
:rtype: dict
"""
return self.object.to_dict(detailed=True)
|
from __future__ import unicode_literals
import json
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.views.generic.detail import BaseDetailView
from .models import Marker
class MarkerDetailView(BaseDetailView):
"""
Simple view for fetching marker details.
"""
# TODO: support different object types. Perhaps through django-polymorphic?
model = Marker
pk_url_kwarg = 'id'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
"""
if queryset is None:
queryset = self.get_queryset()
# Take a GET parameter instead of URLConf variable.
try:
pk = long(self.request.GET[self.pk_url_kwarg])
except (KeyError, ValueError):
raise Http404("Invalid Parameters")
queryset = queryset.filter(pk=pk)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist as e:
raise Http404(e)
return obj
def render_to_response(self, context):
return HttpResponse(json.dumps(self.get_json_data(context)), content_type='application/json; charset=utf-8')
def get_json_data(self, context):
"""
Generate the JSON data to send back to the client.
:rtype: dict
"""
return self.object.to_dict(detailed=True)
Fix Python 3 issue with long()from __future__ import unicode_literals
import sys
import json
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.views.generic.detail import BaseDetailView
from .models import Marker
if sys.version_info[0] >= 3:
long = int
class MarkerDetailView(BaseDetailView):
"""
Simple view for fetching marker details.
"""
# TODO: support different object types. Perhaps through django-polymorphic?
model = Marker
pk_url_kwarg = 'id'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
"""
if queryset is None:
queryset = self.get_queryset()
# Take a GET parameter instead of URLConf variable.
try:
pk = long(self.request.GET[self.pk_url_kwarg])
except (KeyError, ValueError):
raise Http404("Invalid Parameters")
queryset = queryset.filter(pk=pk)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist as e:
raise Http404(e)
return obj
def render_to_response(self, context):
return HttpResponse(json.dumps(self.get_json_data(context)), content_type='application/json; charset=utf-8')
def get_json_data(self, context):
"""
Generate the JSON data to send back to the client.
:rtype: dict
"""
return self.object.to_dict(detailed=True)
|
<commit_before>from __future__ import unicode_literals
import json
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.views.generic.detail import BaseDetailView
from .models import Marker
class MarkerDetailView(BaseDetailView):
"""
Simple view for fetching marker details.
"""
# TODO: support different object types. Perhaps through django-polymorphic?
model = Marker
pk_url_kwarg = 'id'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
"""
if queryset is None:
queryset = self.get_queryset()
# Take a GET parameter instead of URLConf variable.
try:
pk = long(self.request.GET[self.pk_url_kwarg])
except (KeyError, ValueError):
raise Http404("Invalid Parameters")
queryset = queryset.filter(pk=pk)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist as e:
raise Http404(e)
return obj
def render_to_response(self, context):
return HttpResponse(json.dumps(self.get_json_data(context)), content_type='application/json; charset=utf-8')
def get_json_data(self, context):
"""
Generate the JSON data to send back to the client.
:rtype: dict
"""
return self.object.to_dict(detailed=True)
<commit_msg>Fix Python 3 issue with long()<commit_after>from __future__ import unicode_literals
import sys
import json
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.views.generic.detail import BaseDetailView
from .models import Marker
if sys.version_info[0] >= 3:
long = int
class MarkerDetailView(BaseDetailView):
"""
Simple view for fetching marker details.
"""
# TODO: support different object types. Perhaps through django-polymorphic?
model = Marker
pk_url_kwarg = 'id'
def get_object(self, queryset=None):
"""
Returns the object the view is displaying.
"""
if queryset is None:
queryset = self.get_queryset()
# Take a GET parameter instead of URLConf variable.
try:
pk = long(self.request.GET[self.pk_url_kwarg])
except (KeyError, ValueError):
raise Http404("Invalid Parameters")
queryset = queryset.filter(pk=pk)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except ObjectDoesNotExist as e:
raise Http404(e)
return obj
def render_to_response(self, context):
return HttpResponse(json.dumps(self.get_json_data(context)), content_type='application/json; charset=utf-8')
def get_json_data(self, context):
"""
Generate the JSON data to send back to the client.
:rtype: dict
"""
return self.object.to_dict(detailed=True)
|
06aba23adbb281ff64a82059a07ffde95c361e6d
|
tests/app/test_cloudfoundry_config.py
|
tests/app/test_cloudfoundry_config.py
|
import json
import os
import pytest
from app.cloudfoundry_config import (
extract_cloudfoundry_config,
set_config_env_vars,
)
@pytest.fixture
def postgres_config():
return [
{
'credentials': {
'uri': 'postgres uri'
}
}
]
@pytest.fixture
def cloudfoundry_config(postgres_config):
return {
'postgres': postgres_config,
'user-provided': []
}
@pytest.fixture
def cloudfoundry_environ(os_environ, cloudfoundry_config):
os.environ['VCAP_SERVICES'] = json.dumps(cloudfoundry_config)
os.environ['VCAP_APPLICATION'] = '{"space_name": "🚀🌌"}'
def test_extract_cloudfoundry_config_populates_other_vars(cloudfoundry_environ):
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgresql uri'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log'
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config, cloudfoundry_environ):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
|
import json
import os
import pytest
from app.cloudfoundry_config import (
extract_cloudfoundry_config,
set_config_env_vars,
)
@pytest.fixture
def cloudfoundry_config():
return {
'postgres': [{
'credentials': {
'uri': 'postgres uri'
}
}],
'user-provided': []
}
@pytest.fixture
def cloudfoundry_environ(os_environ, cloudfoundry_config):
os.environ['VCAP_SERVICES'] = json.dumps(cloudfoundry_config)
os.environ['VCAP_APPLICATION'] = '{"space_name": "🚀🌌"}'
def test_extract_cloudfoundry_config_populates_other_vars(cloudfoundry_environ):
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgresql uri'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log'
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config, cloudfoundry_environ):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
|
Remove redundant postgres CloudFoundry fixture
|
Remove redundant postgres CloudFoundry fixture
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
import json
import os
import pytest
from app.cloudfoundry_config import (
extract_cloudfoundry_config,
set_config_env_vars,
)
@pytest.fixture
def postgres_config():
return [
{
'credentials': {
'uri': 'postgres uri'
}
}
]
@pytest.fixture
def cloudfoundry_config(postgres_config):
return {
'postgres': postgres_config,
'user-provided': []
}
@pytest.fixture
def cloudfoundry_environ(os_environ, cloudfoundry_config):
os.environ['VCAP_SERVICES'] = json.dumps(cloudfoundry_config)
os.environ['VCAP_APPLICATION'] = '{"space_name": "🚀🌌"}'
def test_extract_cloudfoundry_config_populates_other_vars(cloudfoundry_environ):
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgresql uri'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log'
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config, cloudfoundry_environ):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
Remove redundant postgres CloudFoundry fixture
|
import json
import os
import pytest
from app.cloudfoundry_config import (
extract_cloudfoundry_config,
set_config_env_vars,
)
@pytest.fixture
def cloudfoundry_config():
return {
'postgres': [{
'credentials': {
'uri': 'postgres uri'
}
}],
'user-provided': []
}
@pytest.fixture
def cloudfoundry_environ(os_environ, cloudfoundry_config):
os.environ['VCAP_SERVICES'] = json.dumps(cloudfoundry_config)
os.environ['VCAP_APPLICATION'] = '{"space_name": "🚀🌌"}'
def test_extract_cloudfoundry_config_populates_other_vars(cloudfoundry_environ):
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgresql uri'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log'
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config, cloudfoundry_environ):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
|
<commit_before>import json
import os
import pytest
from app.cloudfoundry_config import (
extract_cloudfoundry_config,
set_config_env_vars,
)
@pytest.fixture
def postgres_config():
return [
{
'credentials': {
'uri': 'postgres uri'
}
}
]
@pytest.fixture
def cloudfoundry_config(postgres_config):
return {
'postgres': postgres_config,
'user-provided': []
}
@pytest.fixture
def cloudfoundry_environ(os_environ, cloudfoundry_config):
os.environ['VCAP_SERVICES'] = json.dumps(cloudfoundry_config)
os.environ['VCAP_APPLICATION'] = '{"space_name": "🚀🌌"}'
def test_extract_cloudfoundry_config_populates_other_vars(cloudfoundry_environ):
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgresql uri'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log'
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config, cloudfoundry_environ):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
<commit_msg>Remove redundant postgres CloudFoundry fixture<commit_after>
|
import json
import os
import pytest
from app.cloudfoundry_config import (
extract_cloudfoundry_config,
set_config_env_vars,
)
@pytest.fixture
def cloudfoundry_config():
return {
'postgres': [{
'credentials': {
'uri': 'postgres uri'
}
}],
'user-provided': []
}
@pytest.fixture
def cloudfoundry_environ(os_environ, cloudfoundry_config):
os.environ['VCAP_SERVICES'] = json.dumps(cloudfoundry_config)
os.environ['VCAP_APPLICATION'] = '{"space_name": "🚀🌌"}'
def test_extract_cloudfoundry_config_populates_other_vars(cloudfoundry_environ):
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgresql uri'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log'
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config, cloudfoundry_environ):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
|
import json
import os
import pytest
from app.cloudfoundry_config import (
extract_cloudfoundry_config,
set_config_env_vars,
)
@pytest.fixture
def postgres_config():
return [
{
'credentials': {
'uri': 'postgres uri'
}
}
]
@pytest.fixture
def cloudfoundry_config(postgres_config):
return {
'postgres': postgres_config,
'user-provided': []
}
@pytest.fixture
def cloudfoundry_environ(os_environ, cloudfoundry_config):
os.environ['VCAP_SERVICES'] = json.dumps(cloudfoundry_config)
os.environ['VCAP_APPLICATION'] = '{"space_name": "🚀🌌"}'
def test_extract_cloudfoundry_config_populates_other_vars(cloudfoundry_environ):
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgresql uri'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log'
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config, cloudfoundry_environ):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
Remove redundant postgres CloudFoundry fixtureimport json
import os
import pytest
from app.cloudfoundry_config import (
extract_cloudfoundry_config,
set_config_env_vars,
)
@pytest.fixture
def cloudfoundry_config():
return {
'postgres': [{
'credentials': {
'uri': 'postgres uri'
}
}],
'user-provided': []
}
@pytest.fixture
def cloudfoundry_environ(os_environ, cloudfoundry_config):
os.environ['VCAP_SERVICES'] = json.dumps(cloudfoundry_config)
os.environ['VCAP_APPLICATION'] = '{"space_name": "🚀🌌"}'
def test_extract_cloudfoundry_config_populates_other_vars(cloudfoundry_environ):
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgresql uri'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log'
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config, cloudfoundry_environ):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
|
<commit_before>import json
import os
import pytest
from app.cloudfoundry_config import (
extract_cloudfoundry_config,
set_config_env_vars,
)
@pytest.fixture
def postgres_config():
return [
{
'credentials': {
'uri': 'postgres uri'
}
}
]
@pytest.fixture
def cloudfoundry_config(postgres_config):
return {
'postgres': postgres_config,
'user-provided': []
}
@pytest.fixture
def cloudfoundry_environ(os_environ, cloudfoundry_config):
os.environ['VCAP_SERVICES'] = json.dumps(cloudfoundry_config)
os.environ['VCAP_APPLICATION'] = '{"space_name": "🚀🌌"}'
def test_extract_cloudfoundry_config_populates_other_vars(cloudfoundry_environ):
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgresql uri'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log'
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config, cloudfoundry_environ):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
<commit_msg>Remove redundant postgres CloudFoundry fixture<commit_after>import json
import os
import pytest
from app.cloudfoundry_config import (
extract_cloudfoundry_config,
set_config_env_vars,
)
@pytest.fixture
def cloudfoundry_config():
return {
'postgres': [{
'credentials': {
'uri': 'postgres uri'
}
}],
'user-provided': []
}
@pytest.fixture
def cloudfoundry_environ(os_environ, cloudfoundry_config):
os.environ['VCAP_SERVICES'] = json.dumps(cloudfoundry_config)
os.environ['VCAP_APPLICATION'] = '{"space_name": "🚀🌌"}'
def test_extract_cloudfoundry_config_populates_other_vars(cloudfoundry_environ):
extract_cloudfoundry_config()
assert os.environ['SQLALCHEMY_DATABASE_URI'] == 'postgresql uri'
assert os.environ['NOTIFY_ENVIRONMENT'] == '🚀🌌'
assert os.environ['NOTIFY_LOG_PATH'] == '/home/vcap/logs/app.log'
def test_set_config_env_vars_ignores_unknown_configs(cloudfoundry_config, cloudfoundry_environ):
cloudfoundry_config['foo'] = {'credentials': {'foo': 'foo'}}
cloudfoundry_config['user-provided'].append({
'name': 'bar', 'credentials': {'bar': 'bar'}
})
set_config_env_vars(cloudfoundry_config)
assert 'foo' not in os.environ
assert 'bar' not in os.environ
|
e14ceda6370b506b80f65d45abd36c9f728e5699
|
pitchfork/manage_globals/forms.py
|
pitchfork/manage_globals/forms.py
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import TextField, SelectField, IntegerField, BooleanField,\
PasswordField, TextAreaField, SubmitField, HiddenField, RadioField
from wtforms import validators
class VerbSet(Form):
name = TextField('Verb:', validators=[validators.required()])
active = BooleanField('Active:')
submit = SubmitField('Submit')
class DCSet(Form):
name = TextField('Name:', validators=[validators.required()])
abbreviation = TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = SubmitField('Submit')
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import fields, validators
class VerbSet(Form):
name = fields.TextField('Verb:', validators=[validators.required()])
active = fields.BooleanField('Active:')
submit = fields.SubmitField('Submit')
class DCSet(Form):
name = fields.TextField('Name:', validators=[validators.required()])
abbreviation = fields.TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = fields.SubmitField('Submit')
|
Rework imports so not having to specify every type of field. Alter field definitions to reflect change
|
Rework imports so not having to specify every type of field. Alter field definitions to reflect change
|
Python
|
apache-2.0
|
rackerlabs/pitchfork,oldarmyc/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import TextField, SelectField, IntegerField, BooleanField,\
PasswordField, TextAreaField, SubmitField, HiddenField, RadioField
from wtforms import validators
class VerbSet(Form):
name = TextField('Verb:', validators=[validators.required()])
active = BooleanField('Active:')
submit = SubmitField('Submit')
class DCSet(Form):
name = TextField('Name:', validators=[validators.required()])
abbreviation = TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = SubmitField('Submit')
Rework imports so not having to specify every type of field. Alter field definitions to reflect change
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import fields, validators
class VerbSet(Form):
name = fields.TextField('Verb:', validators=[validators.required()])
active = fields.BooleanField('Active:')
submit = fields.SubmitField('Submit')
class DCSet(Form):
name = fields.TextField('Name:', validators=[validators.required()])
abbreviation = fields.TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = fields.SubmitField('Submit')
|
<commit_before># Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import TextField, SelectField, IntegerField, BooleanField,\
PasswordField, TextAreaField, SubmitField, HiddenField, RadioField
from wtforms import validators
class VerbSet(Form):
name = TextField('Verb:', validators=[validators.required()])
active = BooleanField('Active:')
submit = SubmitField('Submit')
class DCSet(Form):
name = TextField('Name:', validators=[validators.required()])
abbreviation = TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = SubmitField('Submit')
<commit_msg>Rework imports so not having to specify every type of field. Alter field definitions to reflect change<commit_after>
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import fields, validators
class VerbSet(Form):
name = fields.TextField('Verb:', validators=[validators.required()])
active = fields.BooleanField('Active:')
submit = fields.SubmitField('Submit')
class DCSet(Form):
name = fields.TextField('Name:', validators=[validators.required()])
abbreviation = fields.TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = fields.SubmitField('Submit')
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import TextField, SelectField, IntegerField, BooleanField,\
PasswordField, TextAreaField, SubmitField, HiddenField, RadioField
from wtforms import validators
class VerbSet(Form):
name = TextField('Verb:', validators=[validators.required()])
active = BooleanField('Active:')
submit = SubmitField('Submit')
class DCSet(Form):
name = TextField('Name:', validators=[validators.required()])
abbreviation = TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = SubmitField('Submit')
Rework imports so not having to specify every type of field. Alter field definitions to reflect change# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import fields, validators
class VerbSet(Form):
name = fields.TextField('Verb:', validators=[validators.required()])
active = fields.BooleanField('Active:')
submit = fields.SubmitField('Submit')
class DCSet(Form):
name = fields.TextField('Name:', validators=[validators.required()])
abbreviation = fields.TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = fields.SubmitField('Submit')
|
<commit_before># Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import TextField, SelectField, IntegerField, BooleanField,\
PasswordField, TextAreaField, SubmitField, HiddenField, RadioField
from wtforms import validators
class VerbSet(Form):
name = TextField('Verb:', validators=[validators.required()])
active = BooleanField('Active:')
submit = SubmitField('Submit')
class DCSet(Form):
name = TextField('Name:', validators=[validators.required()])
abbreviation = TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = SubmitField('Submit')
<commit_msg>Rework imports so not having to specify every type of field. Alter field definitions to reflect change<commit_after># Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import fields, validators
class VerbSet(Form):
name = fields.TextField('Verb:', validators=[validators.required()])
active = fields.BooleanField('Active:')
submit = fields.SubmitField('Submit')
class DCSet(Form):
name = fields.TextField('Name:', validators=[validators.required()])
abbreviation = fields.TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = fields.SubmitField('Submit')
|
4b9e76c1db2f6ab8ddcd7a8eb0cc60c71af32a09
|
panoptes_client/workflow.py
|
panoptes_client/workflow.py
|
from panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.subject import Subject
class Workflow(PanoptesObject):
_api_slug = 'workflows'
_link_slug = 'workflows'
_edit_attributes = []
def retire_subjects(self, subjects, reason='other'):
if type(subjects) not in (list, tuple):
subjects = [ subjects ]
subjects = [ s.id if isinstance(s, Subject) else s for s in subjects ]
return Workflow.post(
'{}/retired_subjects'.format(self.id),
json={
'subjects': subjects,
'retirement_reason': reason
}
)
LinkResolver.register(Workflow)
|
from panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.subject import Subject
class Workflow(PanoptesObject):
_api_slug = 'workflows'
_link_slug = 'workflows'
_edit_attributes = []
def retire_subjects(self, subjects, reason='other'):
if type(subjects) not in (list, tuple):
subjects = [ subjects ]
subjects = [ s.id if isinstance(s, Subject) else s for s in subjects ]
return Workflow.post(
'{}/retired_subjects'.format(self.id),
json={
'subject_ids': subjects,
'retirement_reason': reason
}
)
LinkResolver.register(Workflow)
|
Change subjects -> subject_ids when retiring subjects
|
Change subjects -> subject_ids when retiring subjects
|
Python
|
apache-2.0
|
zooniverse/panoptes-python-client
|
from panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.subject import Subject
class Workflow(PanoptesObject):
_api_slug = 'workflows'
_link_slug = 'workflows'
_edit_attributes = []
def retire_subjects(self, subjects, reason='other'):
if type(subjects) not in (list, tuple):
subjects = [ subjects ]
subjects = [ s.id if isinstance(s, Subject) else s for s in subjects ]
return Workflow.post(
'{}/retired_subjects'.format(self.id),
json={
'subjects': subjects,
'retirement_reason': reason
}
)
LinkResolver.register(Workflow)
Change subjects -> subject_ids when retiring subjects
|
from panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.subject import Subject
class Workflow(PanoptesObject):
_api_slug = 'workflows'
_link_slug = 'workflows'
_edit_attributes = []
def retire_subjects(self, subjects, reason='other'):
if type(subjects) not in (list, tuple):
subjects = [ subjects ]
subjects = [ s.id if isinstance(s, Subject) else s for s in subjects ]
return Workflow.post(
'{}/retired_subjects'.format(self.id),
json={
'subject_ids': subjects,
'retirement_reason': reason
}
)
LinkResolver.register(Workflow)
|
<commit_before>from panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.subject import Subject
class Workflow(PanoptesObject):
_api_slug = 'workflows'
_link_slug = 'workflows'
_edit_attributes = []
def retire_subjects(self, subjects, reason='other'):
if type(subjects) not in (list, tuple):
subjects = [ subjects ]
subjects = [ s.id if isinstance(s, Subject) else s for s in subjects ]
return Workflow.post(
'{}/retired_subjects'.format(self.id),
json={
'subjects': subjects,
'retirement_reason': reason
}
)
LinkResolver.register(Workflow)
<commit_msg>Change subjects -> subject_ids when retiring subjects<commit_after>
|
from panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.subject import Subject
class Workflow(PanoptesObject):
_api_slug = 'workflows'
_link_slug = 'workflows'
_edit_attributes = []
def retire_subjects(self, subjects, reason='other'):
if type(subjects) not in (list, tuple):
subjects = [ subjects ]
subjects = [ s.id if isinstance(s, Subject) else s for s in subjects ]
return Workflow.post(
'{}/retired_subjects'.format(self.id),
json={
'subject_ids': subjects,
'retirement_reason': reason
}
)
LinkResolver.register(Workflow)
|
from panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.subject import Subject
class Workflow(PanoptesObject):
_api_slug = 'workflows'
_link_slug = 'workflows'
_edit_attributes = []
def retire_subjects(self, subjects, reason='other'):
if type(subjects) not in (list, tuple):
subjects = [ subjects ]
subjects = [ s.id if isinstance(s, Subject) else s for s in subjects ]
return Workflow.post(
'{}/retired_subjects'.format(self.id),
json={
'subjects': subjects,
'retirement_reason': reason
}
)
LinkResolver.register(Workflow)
Change subjects -> subject_ids when retiring subjectsfrom panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.subject import Subject
class Workflow(PanoptesObject):
_api_slug = 'workflows'
_link_slug = 'workflows'
_edit_attributes = []
def retire_subjects(self, subjects, reason='other'):
if type(subjects) not in (list, tuple):
subjects = [ subjects ]
subjects = [ s.id if isinstance(s, Subject) else s for s in subjects ]
return Workflow.post(
'{}/retired_subjects'.format(self.id),
json={
'subject_ids': subjects,
'retirement_reason': reason
}
)
LinkResolver.register(Workflow)
|
<commit_before>from panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.subject import Subject
class Workflow(PanoptesObject):
_api_slug = 'workflows'
_link_slug = 'workflows'
_edit_attributes = []
def retire_subjects(self, subjects, reason='other'):
if type(subjects) not in (list, tuple):
subjects = [ subjects ]
subjects = [ s.id if isinstance(s, Subject) else s for s in subjects ]
return Workflow.post(
'{}/retired_subjects'.format(self.id),
json={
'subjects': subjects,
'retirement_reason': reason
}
)
LinkResolver.register(Workflow)
<commit_msg>Change subjects -> subject_ids when retiring subjects<commit_after>from panoptes_client.panoptes import PanoptesObject, LinkResolver
from panoptes_client.subject import Subject
class Workflow(PanoptesObject):
_api_slug = 'workflows'
_link_slug = 'workflows'
_edit_attributes = []
def retire_subjects(self, subjects, reason='other'):
if type(subjects) not in (list, tuple):
subjects = [ subjects ]
subjects = [ s.id if isinstance(s, Subject) else s for s in subjects ]
return Workflow.post(
'{}/retired_subjects'.format(self.id),
json={
'subject_ids': subjects,
'retirement_reason': reason
}
)
LinkResolver.register(Workflow)
|
233ce96d96caff3070f24d9d3dff3ed85be81fee
|
halaqat/settings/shaha.py
|
halaqat/settings/shaha.py
|
from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
|
from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
|
Fix The STATICFILES_DIRS setting should not contain the STATIC_ROOT setting
|
Fix The STATICFILES_DIRS setting should not contain the STATIC_ROOT setting
|
Python
|
mit
|
EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat
|
from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
Fix The STATICFILES_DIRS setting should not contain the STATIC_ROOT setting
|
from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
|
<commit_before>from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
<commit_msg>Fix The STATICFILES_DIRS setting should not contain the STATIC_ROOT setting<commit_after>
|
from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
|
from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
Fix The STATICFILES_DIRS setting should not contain the STATIC_ROOT settingfrom .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
|
<commit_before>from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
<commit_msg>Fix The STATICFILES_DIRS setting should not contain the STATIC_ROOT setting<commit_after>from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
|
d99dd477d8b0849815bd5255d1eaedb2879294bb
|
general_itests/environment.py
|
general_itests/environment.py
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from behave_pytest.hook import install_pytest_asserts
from paasta_tools.utils import get_docker_client
def before_all(context):
install_pytest_asserts()
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from paasta_tools.utils import get_docker_client
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
|
Remove behave_pytest from general_itests too
|
Remove behave_pytest from general_itests too
|
Python
|
apache-2.0
|
somic/paasta,somic/paasta,Yelp/paasta,Yelp/paasta
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from behave_pytest.hook import install_pytest_asserts
from paasta_tools.utils import get_docker_client
def before_all(context):
install_pytest_asserts()
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
Remove behave_pytest from general_itests too
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from paasta_tools.utils import get_docker_client
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
|
<commit_before># Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from behave_pytest.hook import install_pytest_asserts
from paasta_tools.utils import get_docker_client
def before_all(context):
install_pytest_asserts()
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
<commit_msg>Remove behave_pytest from general_itests too<commit_after>
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from paasta_tools.utils import get_docker_client
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
|
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from behave_pytest.hook import install_pytest_asserts
from paasta_tools.utils import get_docker_client
def before_all(context):
install_pytest_asserts()
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
Remove behave_pytest from general_itests too# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from paasta_tools.utils import get_docker_client
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
|
<commit_before># Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from behave_pytest.hook import install_pytest_asserts
from paasta_tools.utils import get_docker_client
def before_all(context):
install_pytest_asserts()
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
<commit_msg>Remove behave_pytest from general_itests too<commit_after># Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from paasta_tools.utils import get_docker_client
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
|
fcc913c1f714fb823e4405a2af821d7a63b68591
|
integration_tests/emulator/beacon_parser/imtq_state_telemetry_parser.py
|
integration_tests/emulator/beacon_parser/imtq_state_telemetry_parser.py
|
from parser import Parser
class ImtqStateTelemetryParser(Parser):
def __init__(self, tree_control):
Parser.__init__(self, tree_control, 'Imtq State')
def get_bit_count(self):
return 8 + 2 + 2 + 1 + 32
def parse(self, address, bits):
self.append_byte(address, bits, "Status")
self.append_byte(address, bits, "Mode", 2)
self.append_byte(address, bits, "Error during previous iteration", 2)
self.append_byte(address, bits, "Configuration changed", 1)
self.append_dword(address, bits, "Uptime")
|
from parser import Parser
class ImtqStateTelemetryParser(Parser):
def __init__(self, tree_control):
Parser.__init__(self, tree_control, 'Imtq State')
def get_bit_count(self):
return 8 + 2 + 2 + 1 + 32
def parse(self, address, bits):
self.append_byte(address, bits, "Status")
self.append_byte(address, bits, "Mode", 2)
self.append_byte(address, bits, "Error during previous iteration", 8)
self.append_byte(address, bits, "Configuration changed", 1)
self.append_dword(address, bits, "Uptime")
|
Fix imtq state telemetry parser.
|
[telemetry][imtq] Fix imtq state telemetry parser.
|
Python
|
agpl-3.0
|
PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC
|
from parser import Parser
class ImtqStateTelemetryParser(Parser):
def __init__(self, tree_control):
Parser.__init__(self, tree_control, 'Imtq State')
def get_bit_count(self):
return 8 + 2 + 2 + 1 + 32
def parse(self, address, bits):
self.append_byte(address, bits, "Status")
self.append_byte(address, bits, "Mode", 2)
self.append_byte(address, bits, "Error during previous iteration", 2)
self.append_byte(address, bits, "Configuration changed", 1)
self.append_dword(address, bits, "Uptime")
[telemetry][imtq] Fix imtq state telemetry parser.
|
from parser import Parser
class ImtqStateTelemetryParser(Parser):
def __init__(self, tree_control):
Parser.__init__(self, tree_control, 'Imtq State')
def get_bit_count(self):
return 8 + 2 + 2 + 1 + 32
def parse(self, address, bits):
self.append_byte(address, bits, "Status")
self.append_byte(address, bits, "Mode", 2)
self.append_byte(address, bits, "Error during previous iteration", 8)
self.append_byte(address, bits, "Configuration changed", 1)
self.append_dword(address, bits, "Uptime")
|
<commit_before>from parser import Parser
class ImtqStateTelemetryParser(Parser):
def __init__(self, tree_control):
Parser.__init__(self, tree_control, 'Imtq State')
def get_bit_count(self):
return 8 + 2 + 2 + 1 + 32
def parse(self, address, bits):
self.append_byte(address, bits, "Status")
self.append_byte(address, bits, "Mode", 2)
self.append_byte(address, bits, "Error during previous iteration", 2)
self.append_byte(address, bits, "Configuration changed", 1)
self.append_dword(address, bits, "Uptime")
<commit_msg>[telemetry][imtq] Fix imtq state telemetry parser.<commit_after>
|
from parser import Parser
class ImtqStateTelemetryParser(Parser):
def __init__(self, tree_control):
Parser.__init__(self, tree_control, 'Imtq State')
def get_bit_count(self):
return 8 + 2 + 2 + 1 + 32
def parse(self, address, bits):
self.append_byte(address, bits, "Status")
self.append_byte(address, bits, "Mode", 2)
self.append_byte(address, bits, "Error during previous iteration", 8)
self.append_byte(address, bits, "Configuration changed", 1)
self.append_dword(address, bits, "Uptime")
|
from parser import Parser
class ImtqStateTelemetryParser(Parser):
def __init__(self, tree_control):
Parser.__init__(self, tree_control, 'Imtq State')
def get_bit_count(self):
return 8 + 2 + 2 + 1 + 32
def parse(self, address, bits):
self.append_byte(address, bits, "Status")
self.append_byte(address, bits, "Mode", 2)
self.append_byte(address, bits, "Error during previous iteration", 2)
self.append_byte(address, bits, "Configuration changed", 1)
self.append_dword(address, bits, "Uptime")
[telemetry][imtq] Fix imtq state telemetry parser.from parser import Parser
class ImtqStateTelemetryParser(Parser):
def __init__(self, tree_control):
Parser.__init__(self, tree_control, 'Imtq State')
def get_bit_count(self):
return 8 + 2 + 2 + 1 + 32
def parse(self, address, bits):
self.append_byte(address, bits, "Status")
self.append_byte(address, bits, "Mode", 2)
self.append_byte(address, bits, "Error during previous iteration", 8)
self.append_byte(address, bits, "Configuration changed", 1)
self.append_dword(address, bits, "Uptime")
|
<commit_before>from parser import Parser
class ImtqStateTelemetryParser(Parser):
def __init__(self, tree_control):
Parser.__init__(self, tree_control, 'Imtq State')
def get_bit_count(self):
return 8 + 2 + 2 + 1 + 32
def parse(self, address, bits):
self.append_byte(address, bits, "Status")
self.append_byte(address, bits, "Mode", 2)
self.append_byte(address, bits, "Error during previous iteration", 2)
self.append_byte(address, bits, "Configuration changed", 1)
self.append_dword(address, bits, "Uptime")
<commit_msg>[telemetry][imtq] Fix imtq state telemetry parser.<commit_after>from parser import Parser
class ImtqStateTelemetryParser(Parser):
def __init__(self, tree_control):
Parser.__init__(self, tree_control, 'Imtq State')
def get_bit_count(self):
return 8 + 2 + 2 + 1 + 32
def parse(self, address, bits):
self.append_byte(address, bits, "Status")
self.append_byte(address, bits, "Mode", 2)
self.append_byte(address, bits, "Error during previous iteration", 8)
self.append_byte(address, bits, "Configuration changed", 1)
self.append_dword(address, bits, "Uptime")
|
db0253a228b3253e23bb5190fba9930a2f313d66
|
basictracer/context.py
|
basictracer/context.py
|
from __future__ import absolute_import
import opentracing
class SpanContext(opentracing.SpanContext):
"""SpanContext satisfies the opentracing.SpanContext contract.
trace_id and span_id are uint64's, so their range is [0, 2^64).
"""
def __init__(
self,
trace_id=None,
span_id=None,
baggage=None,
sampled=True):
self.trace_id = trace_id
self.span_id = span_id
self.sampled = sampled
self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE
@property
def baggage(self):
return self._baggage or opentracing.SpanContext.EMPTY_BAGGAGE
def with_baggage_item(self, key, value):
new_baggage = self._baggage.copy()
new_baggage[key] = value
return SpanContext(
trace_id=self.trace_id,
span_id=self.span_id,
sampled=self.sampled,
baggage=new_baggage)
|
from __future__ import absolute_import
import opentracing
class SpanContext(opentracing.SpanContext):
"""SpanContext satisfies the opentracing.SpanContext contract.
trace_id and span_id are uint64's, so their range is [0, 2^64).
"""
def __init__(
self,
trace_id=None,
span_id=None,
baggage=None,
sampled=True):
self.trace_id = trace_id
self.span_id = span_id
self.sampled = sampled
self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE
@property
def baggage(self):
return self._baggage
def with_baggage_item(self, key, value):
new_baggage = self._baggage.copy()
new_baggage[key] = value
return SpanContext(
trace_id=self.trace_id,
span_id=self.span_id,
sampled=self.sampled,
baggage=new_baggage)
|
Remove superfluous check for None baggage
|
Remove superfluous check for None baggage
|
Python
|
apache-2.0
|
opentracing/basictracer-python
|
from __future__ import absolute_import
import opentracing
class SpanContext(opentracing.SpanContext):
"""SpanContext satisfies the opentracing.SpanContext contract.
trace_id and span_id are uint64's, so their range is [0, 2^64).
"""
def __init__(
self,
trace_id=None,
span_id=None,
baggage=None,
sampled=True):
self.trace_id = trace_id
self.span_id = span_id
self.sampled = sampled
self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE
@property
def baggage(self):
return self._baggage or opentracing.SpanContext.EMPTY_BAGGAGE
def with_baggage_item(self, key, value):
new_baggage = self._baggage.copy()
new_baggage[key] = value
return SpanContext(
trace_id=self.trace_id,
span_id=self.span_id,
sampled=self.sampled,
baggage=new_baggage)
Remove superfluous check for None baggage
|
from __future__ import absolute_import
import opentracing
class SpanContext(opentracing.SpanContext):
"""SpanContext satisfies the opentracing.SpanContext contract.
trace_id and span_id are uint64's, so their range is [0, 2^64).
"""
def __init__(
self,
trace_id=None,
span_id=None,
baggage=None,
sampled=True):
self.trace_id = trace_id
self.span_id = span_id
self.sampled = sampled
self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE
@property
def baggage(self):
return self._baggage
def with_baggage_item(self, key, value):
new_baggage = self._baggage.copy()
new_baggage[key] = value
return SpanContext(
trace_id=self.trace_id,
span_id=self.span_id,
sampled=self.sampled,
baggage=new_baggage)
|
<commit_before>from __future__ import absolute_import
import opentracing
class SpanContext(opentracing.SpanContext):
"""SpanContext satisfies the opentracing.SpanContext contract.
trace_id and span_id are uint64's, so their range is [0, 2^64).
"""
def __init__(
self,
trace_id=None,
span_id=None,
baggage=None,
sampled=True):
self.trace_id = trace_id
self.span_id = span_id
self.sampled = sampled
self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE
@property
def baggage(self):
return self._baggage or opentracing.SpanContext.EMPTY_BAGGAGE
def with_baggage_item(self, key, value):
new_baggage = self._baggage.copy()
new_baggage[key] = value
return SpanContext(
trace_id=self.trace_id,
span_id=self.span_id,
sampled=self.sampled,
baggage=new_baggage)
<commit_msg>Remove superfluous check for None baggage<commit_after>
|
from __future__ import absolute_import
import opentracing
class SpanContext(opentracing.SpanContext):
"""SpanContext satisfies the opentracing.SpanContext contract.
trace_id and span_id are uint64's, so their range is [0, 2^64).
"""
def __init__(
self,
trace_id=None,
span_id=None,
baggage=None,
sampled=True):
self.trace_id = trace_id
self.span_id = span_id
self.sampled = sampled
self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE
@property
def baggage(self):
return self._baggage
def with_baggage_item(self, key, value):
new_baggage = self._baggage.copy()
new_baggage[key] = value
return SpanContext(
trace_id=self.trace_id,
span_id=self.span_id,
sampled=self.sampled,
baggage=new_baggage)
|
from __future__ import absolute_import
import opentracing
class SpanContext(opentracing.SpanContext):
"""SpanContext satisfies the opentracing.SpanContext contract.
trace_id and span_id are uint64's, so their range is [0, 2^64).
"""
def __init__(
self,
trace_id=None,
span_id=None,
baggage=None,
sampled=True):
self.trace_id = trace_id
self.span_id = span_id
self.sampled = sampled
self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE
@property
def baggage(self):
return self._baggage or opentracing.SpanContext.EMPTY_BAGGAGE
def with_baggage_item(self, key, value):
new_baggage = self._baggage.copy()
new_baggage[key] = value
return SpanContext(
trace_id=self.trace_id,
span_id=self.span_id,
sampled=self.sampled,
baggage=new_baggage)
Remove superfluous check for None baggagefrom __future__ import absolute_import
import opentracing
class SpanContext(opentracing.SpanContext):
"""SpanContext satisfies the opentracing.SpanContext contract.
trace_id and span_id are uint64's, so their range is [0, 2^64).
"""
def __init__(
self,
trace_id=None,
span_id=None,
baggage=None,
sampled=True):
self.trace_id = trace_id
self.span_id = span_id
self.sampled = sampled
self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE
@property
def baggage(self):
return self._baggage
def with_baggage_item(self, key, value):
new_baggage = self._baggage.copy()
new_baggage[key] = value
return SpanContext(
trace_id=self.trace_id,
span_id=self.span_id,
sampled=self.sampled,
baggage=new_baggage)
|
<commit_before>from __future__ import absolute_import
import opentracing
class SpanContext(opentracing.SpanContext):
"""SpanContext satisfies the opentracing.SpanContext contract.
trace_id and span_id are uint64's, so their range is [0, 2^64).
"""
def __init__(
self,
trace_id=None,
span_id=None,
baggage=None,
sampled=True):
self.trace_id = trace_id
self.span_id = span_id
self.sampled = sampled
self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE
@property
def baggage(self):
return self._baggage or opentracing.SpanContext.EMPTY_BAGGAGE
def with_baggage_item(self, key, value):
new_baggage = self._baggage.copy()
new_baggage[key] = value
return SpanContext(
trace_id=self.trace_id,
span_id=self.span_id,
sampled=self.sampled,
baggage=new_baggage)
<commit_msg>Remove superfluous check for None baggage<commit_after>from __future__ import absolute_import
import opentracing
class SpanContext(opentracing.SpanContext):
"""SpanContext satisfies the opentracing.SpanContext contract.
trace_id and span_id are uint64's, so their range is [0, 2^64).
"""
def __init__(
self,
trace_id=None,
span_id=None,
baggage=None,
sampled=True):
self.trace_id = trace_id
self.span_id = span_id
self.sampled = sampled
self._baggage = baggage or opentracing.SpanContext.EMPTY_BAGGAGE
@property
def baggage(self):
return self._baggage
def with_baggage_item(self, key, value):
new_baggage = self._baggage.copy()
new_baggage[key] = value
return SpanContext(
trace_id=self.trace_id,
span_id=self.span_id,
sampled=self.sampled,
baggage=new_baggage)
|
5c91e99ab733020e123e7cac9f6b2a39713bcee0
|
src/c3nav/api/__init__.py
|
src/c3nav/api/__init__.py
|
from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
# JSONRenderer.render = nicer_renderer
|
from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
JSONRenderer.render = nicer_renderer
|
Revert "disabling json indenter for testing"
|
Revert "disabling json indenter for testing"
This reverts commit e12882c1ee11fbfb3d1f131e4d9ed2d1348907df.
|
Python
|
apache-2.0
|
c3nav/c3nav,c3nav/c3nav,c3nav/c3nav,c3nav/c3nav
|
from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
# JSONRenderer.render = nicer_renderer
Revert "disabling json indenter for testing"
This reverts commit e12882c1ee11fbfb3d1f131e4d9ed2d1348907df.
|
from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
JSONRenderer.render = nicer_renderer
|
<commit_before>from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
# JSONRenderer.render = nicer_renderer
<commit_msg>Revert "disabling json indenter for testing"
This reverts commit e12882c1ee11fbfb3d1f131e4d9ed2d1348907df.<commit_after>
|
from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
JSONRenderer.render = nicer_renderer
|
from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
# JSONRenderer.render = nicer_renderer
Revert "disabling json indenter for testing"
This reverts commit e12882c1ee11fbfb3d1f131e4d9ed2d1348907df.from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
JSONRenderer.render = nicer_renderer
|
<commit_before>from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
# JSONRenderer.render = nicer_renderer
<commit_msg>Revert "disabling json indenter for testing"
This reverts commit e12882c1ee11fbfb3d1f131e4d9ed2d1348907df.<commit_after>from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
JSONRenderer.render = nicer_renderer
|
efd1c945cafda82e48077e75e3231cac95d6e077
|
evesrp/util/fields.py
|
evesrp/util/fields.py
|
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
|
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
from wtforms.utils import unset_value
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
super(ImageInput, self).__init__()
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
def process(self, formdata, data=unset_value):
if formdata:
for key in formdata:
if key.startswith(self.name):
self.data = True
break
else:
self.data = False
else:
self.data = False
|
Update customs ImageField to work with IE
|
Update customs ImageField to work with IE
As opposed to Chrome, IE (and maybe other browsers) just returns the coordinates of where the click occurred.
|
Python
|
bsd-2-clause
|
paxswill/evesrp,paxswill/evesrp,paxswill/evesrp
|
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
Update customs ImageField to work with IE
As opposed to Chrome, IE (and maybe other browsers) just returns the coordinates of where the click occurred.
|
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
from wtforms.utils import unset_value
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
super(ImageInput, self).__init__()
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
def process(self, formdata, data=unset_value):
if formdata:
for key in formdata:
if key.startswith(self.name):
self.data = True
break
else:
self.data = False
else:
self.data = False
|
<commit_before>from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
<commit_msg>Update customs ImageField to work with IE
As opposed to Chrome, IE (and maybe other browsers) just returns the coordinates of where the click occurred.<commit_after>
|
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
from wtforms.utils import unset_value
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
super(ImageInput, self).__init__()
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
def process(self, formdata, data=unset_value):
if formdata:
for key in formdata:
if key.startswith(self.name):
self.data = True
break
else:
self.data = False
else:
self.data = False
|
from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
Update customs ImageField to work with IE
As opposed to Chrome, IE (and maybe other browsers) just returns the coordinates of where the click occurred.from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
from wtforms.utils import unset_value
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
super(ImageInput, self).__init__()
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
def process(self, formdata, data=unset_value):
if formdata:
for key in formdata:
if key.startswith(self.name):
self.data = True
break
else:
self.data = False
else:
self.data = False
|
<commit_before>from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
<commit_msg>Update customs ImageField to work with IE
As opposed to Chrome, IE (and maybe other browsers) just returns the coordinates of where the click occurred.<commit_after>from __future__ import absolute_import
import wtforms
import wtforms.widgets
import wtforms.fields
from wtforms.utils import unset_value
class ImageInput(wtforms.widgets.Input):
"""WTForms widget for image inputs (<input type="image">)
"""
input_type = u'image'
def __init__(self, src='', alt=''):
super(ImageInput, self).__init__()
self.src = src
self.alt = alt
def __call__(self, field, **kwargs):
kwargs['src'] = self.src
kwargs['alt'] = self.alt
return super(ImageInput, self).__call__(field, **kwargs)
class ImageField(wtforms.fields.BooleanField):
"""WTForms field for image fields.
"""
def __init__(self, src, alt='', **kwargs):
widget = ImageInput(src, alt)
super(wtforms.fields.BooleanField, self).__init__(widget=widget,
**kwargs)
def process(self, formdata, data=unset_value):
if formdata:
for key in formdata:
if key.startswith(self.name):
self.data = True
break
else:
self.data = False
else:
self.data = False
|
e92339046fb47fc2275f432dfe3d998f702e40b2
|
pycalphad/tests/test_tdb.py
|
pycalphad/tests/test_tdb.py
|
"""
This module tests the functionality of the TDB file parser.
"""
import nose.tools
from pycalphad import Database
from sympy import SympifyError
@nose.tools.raises(SympifyError)
def test_tdb_popen_exploit():
"Prevent execution of arbitrary code using Popen."
tdb_exploit_string = \
"""
PARAMETER G(L12_FCC,AL,CR,NI:NI;0)
298.15 [].__class__.__base__.__subclasses__()[158]('/bin/ls'); 6000 N !
"""
Database(tdb_exploit_string)
|
"""
This module tests the functionality of the TDB file parser.
"""
import nose.tools
from pycalphad import Database
@nose.tools.raises(ValueError, TypeError)
def test_tdb_popen_exploit():
"Prevent execution of arbitrary code using Popen."
tdb_exploit_string = \
"""
PARAMETER G(L12_FCC,AL,CR,NI:NI;0)
298.15 [].__class__.__base__.__subclasses__()[158]('/bin/ls'); 6000 N !
"""
Database(tdb_exploit_string)
|
Fix unit test for py26
|
Fix unit test for py26
|
Python
|
mit
|
tkphd/pycalphad,tkphd/pycalphad,tkphd/pycalphad
|
"""
This module tests the functionality of the TDB file parser.
"""
import nose.tools
from pycalphad import Database
from sympy import SympifyError
@nose.tools.raises(SympifyError)
def test_tdb_popen_exploit():
"Prevent execution of arbitrary code using Popen."
tdb_exploit_string = \
"""
PARAMETER G(L12_FCC,AL,CR,NI:NI;0)
298.15 [].__class__.__base__.__subclasses__()[158]('/bin/ls'); 6000 N !
"""
Database(tdb_exploit_string)
Fix unit test for py26
|
"""
This module tests the functionality of the TDB file parser.
"""
import nose.tools
from pycalphad import Database
@nose.tools.raises(ValueError, TypeError)
def test_tdb_popen_exploit():
"Prevent execution of arbitrary code using Popen."
tdb_exploit_string = \
"""
PARAMETER G(L12_FCC,AL,CR,NI:NI;0)
298.15 [].__class__.__base__.__subclasses__()[158]('/bin/ls'); 6000 N !
"""
Database(tdb_exploit_string)
|
<commit_before>"""
This module tests the functionality of the TDB file parser.
"""
import nose.tools
from pycalphad import Database
from sympy import SympifyError
@nose.tools.raises(SympifyError)
def test_tdb_popen_exploit():
"Prevent execution of arbitrary code using Popen."
tdb_exploit_string = \
"""
PARAMETER G(L12_FCC,AL,CR,NI:NI;0)
298.15 [].__class__.__base__.__subclasses__()[158]('/bin/ls'); 6000 N !
"""
Database(tdb_exploit_string)
<commit_msg>Fix unit test for py26<commit_after>
|
"""
This module tests the functionality of the TDB file parser.
"""
import nose.tools
from pycalphad import Database
@nose.tools.raises(ValueError, TypeError)
def test_tdb_popen_exploit():
"Prevent execution of arbitrary code using Popen."
tdb_exploit_string = \
"""
PARAMETER G(L12_FCC,AL,CR,NI:NI;0)
298.15 [].__class__.__base__.__subclasses__()[158]('/bin/ls'); 6000 N !
"""
Database(tdb_exploit_string)
|
"""
This module tests the functionality of the TDB file parser.
"""
import nose.tools
from pycalphad import Database
from sympy import SympifyError
@nose.tools.raises(SympifyError)
def test_tdb_popen_exploit():
"Prevent execution of arbitrary code using Popen."
tdb_exploit_string = \
"""
PARAMETER G(L12_FCC,AL,CR,NI:NI;0)
298.15 [].__class__.__base__.__subclasses__()[158]('/bin/ls'); 6000 N !
"""
Database(tdb_exploit_string)
Fix unit test for py26"""
This module tests the functionality of the TDB file parser.
"""
import nose.tools
from pycalphad import Database
@nose.tools.raises(ValueError, TypeError)
def test_tdb_popen_exploit():
"Prevent execution of arbitrary code using Popen."
tdb_exploit_string = \
"""
PARAMETER G(L12_FCC,AL,CR,NI:NI;0)
298.15 [].__class__.__base__.__subclasses__()[158]('/bin/ls'); 6000 N !
"""
Database(tdb_exploit_string)
|
<commit_before>"""
This module tests the functionality of the TDB file parser.
"""
import nose.tools
from pycalphad import Database
from sympy import SympifyError
@nose.tools.raises(SympifyError)
def test_tdb_popen_exploit():
"Prevent execution of arbitrary code using Popen."
tdb_exploit_string = \
"""
PARAMETER G(L12_FCC,AL,CR,NI:NI;0)
298.15 [].__class__.__base__.__subclasses__()[158]('/bin/ls'); 6000 N !
"""
Database(tdb_exploit_string)
<commit_msg>Fix unit test for py26<commit_after>"""
This module tests the functionality of the TDB file parser.
"""
import nose.tools
from pycalphad import Database
@nose.tools.raises(ValueError, TypeError)
def test_tdb_popen_exploit():
"Prevent execution of arbitrary code using Popen."
tdb_exploit_string = \
"""
PARAMETER G(L12_FCC,AL,CR,NI:NI;0)
298.15 [].__class__.__base__.__subclasses__()[158]('/bin/ls'); 6000 N !
"""
Database(tdb_exploit_string)
|
ace1afc8491821c16311042d8115d31df119165d
|
build_chrome_webapp.py
|
build_chrome_webapp.py
|
try:
from jinja2 import Template
except:
print "Could not import Jinja2, run 'easy_install Jinja2'"
exit()
def render_main_template():
f = open('./html/index.html')
template = Template(f.read().decode('utf-8'))
f.close()
html = template.render(og_tag='', url='', ON_PRODUCTION=True, ON_DEV=False, USE_PRODUCTION_JAVASCRIPT=True)
f = open('./chrome_webapp_index.html', 'w')
f.write(html.encode('utf-8'))
f.close()
print "Template rendered"
render_main_template()
|
from zipfile import ZipFile
try:
from jinja2 import Template
except:
print "Could not import Jinja2, run 'easy_install Jinja2'"
exit()
zipfile = ZipFile("webapp.zip", "w")
def render_main_template():
f = open('./html/index.html')
template = Template(f.read().decode('utf-8'))
f.close()
html = template.render(og_tag='', url='', ON_PRODUCTION=True, ON_DEV=False, USE_PRODUCTION_JAVASCRIPT=True)
zipfile.writestr('index.html', html.encode('utf-8'))
print "Template rendered"
render_main_template()
|
Write output to a zip file
|
Write output to a zip file
|
Python
|
mit
|
youtify/youtify,youtify/youtify,youtify/youtify
|
try:
from jinja2 import Template
except:
print "Could not import Jinja2, run 'easy_install Jinja2'"
exit()
def render_main_template():
f = open('./html/index.html')
template = Template(f.read().decode('utf-8'))
f.close()
html = template.render(og_tag='', url='', ON_PRODUCTION=True, ON_DEV=False, USE_PRODUCTION_JAVASCRIPT=True)
f = open('./chrome_webapp_index.html', 'w')
f.write(html.encode('utf-8'))
f.close()
print "Template rendered"
render_main_template()
Write output to a zip file
|
from zipfile import ZipFile
try:
from jinja2 import Template
except:
print "Could not import Jinja2, run 'easy_install Jinja2'"
exit()
zipfile = ZipFile("webapp.zip", "w")
def render_main_template():
f = open('./html/index.html')
template = Template(f.read().decode('utf-8'))
f.close()
html = template.render(og_tag='', url='', ON_PRODUCTION=True, ON_DEV=False, USE_PRODUCTION_JAVASCRIPT=True)
zipfile.writestr('index.html', html.encode('utf-8'))
print "Template rendered"
render_main_template()
|
<commit_before>try:
from jinja2 import Template
except:
print "Could not import Jinja2, run 'easy_install Jinja2'"
exit()
def render_main_template():
f = open('./html/index.html')
template = Template(f.read().decode('utf-8'))
f.close()
html = template.render(og_tag='', url='', ON_PRODUCTION=True, ON_DEV=False, USE_PRODUCTION_JAVASCRIPT=True)
f = open('./chrome_webapp_index.html', 'w')
f.write(html.encode('utf-8'))
f.close()
print "Template rendered"
render_main_template()
<commit_msg>Write output to a zip file<commit_after>
|
from zipfile import ZipFile
try:
from jinja2 import Template
except:
print "Could not import Jinja2, run 'easy_install Jinja2'"
exit()
zipfile = ZipFile("webapp.zip", "w")
def render_main_template():
f = open('./html/index.html')
template = Template(f.read().decode('utf-8'))
f.close()
html = template.render(og_tag='', url='', ON_PRODUCTION=True, ON_DEV=False, USE_PRODUCTION_JAVASCRIPT=True)
zipfile.writestr('index.html', html.encode('utf-8'))
print "Template rendered"
render_main_template()
|
try:
from jinja2 import Template
except:
print "Could not import Jinja2, run 'easy_install Jinja2'"
exit()
def render_main_template():
f = open('./html/index.html')
template = Template(f.read().decode('utf-8'))
f.close()
html = template.render(og_tag='', url='', ON_PRODUCTION=True, ON_DEV=False, USE_PRODUCTION_JAVASCRIPT=True)
f = open('./chrome_webapp_index.html', 'w')
f.write(html.encode('utf-8'))
f.close()
print "Template rendered"
render_main_template()
Write output to a zip filefrom zipfile import ZipFile
try:
from jinja2 import Template
except:
print "Could not import Jinja2, run 'easy_install Jinja2'"
exit()
zipfile = ZipFile("webapp.zip", "w")
def render_main_template():
f = open('./html/index.html')
template = Template(f.read().decode('utf-8'))
f.close()
html = template.render(og_tag='', url='', ON_PRODUCTION=True, ON_DEV=False, USE_PRODUCTION_JAVASCRIPT=True)
zipfile.writestr('index.html', html.encode('utf-8'))
print "Template rendered"
render_main_template()
|
<commit_before>try:
from jinja2 import Template
except:
print "Could not import Jinja2, run 'easy_install Jinja2'"
exit()
def render_main_template():
f = open('./html/index.html')
template = Template(f.read().decode('utf-8'))
f.close()
html = template.render(og_tag='', url='', ON_PRODUCTION=True, ON_DEV=False, USE_PRODUCTION_JAVASCRIPT=True)
f = open('./chrome_webapp_index.html', 'w')
f.write(html.encode('utf-8'))
f.close()
print "Template rendered"
render_main_template()
<commit_msg>Write output to a zip file<commit_after>from zipfile import ZipFile
try:
from jinja2 import Template
except:
print "Could not import Jinja2, run 'easy_install Jinja2'"
exit()
zipfile = ZipFile("webapp.zip", "w")
def render_main_template():
f = open('./html/index.html')
template = Template(f.read().decode('utf-8'))
f.close()
html = template.render(og_tag='', url='', ON_PRODUCTION=True, ON_DEV=False, USE_PRODUCTION_JAVASCRIPT=True)
zipfile.writestr('index.html', html.encode('utf-8'))
print "Template rendered"
render_main_template()
|
fe19fa7ac7f98525980e5b074bb17015531b2b58
|
buzzwordbingo/views.py
|
buzzwordbingo/views.py
|
from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
|
from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
"""The buzzword bingo REST API provides an interface to a collection of
boards, which contain the buzzwords on the board and a list of win
conditions, which are Python code which determines if a given board is a
winning board.
For more about the game of buzzword bingo, please see the [Buzzword Bingo
article on Wikipedia](http://en.wikipedia.org/wiki/Buzzword_bingo).
"""
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
|
Add a description to the main view using Markdown.
|
Add a description to the main view using Markdown.
|
Python
|
bsd-3-clause
|
seanfisk/buzzword-bingo-server,seanfisk/buzzword-bingo-server
|
from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
Add a description to the main view using Markdown.
|
from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
"""The buzzword bingo REST API provides an interface to a collection of
boards, which contain the buzzwords on the board and a list of win
conditions, which are Python code which determines if a given board is a
winning board.
For more about the game of buzzword bingo, please see the [Buzzword Bingo
article on Wikipedia](http://en.wikipedia.org/wiki/Buzzword_bingo).
"""
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
|
<commit_before>from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
<commit_msg>Add a description to the main view using Markdown.<commit_after>
|
from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
"""The buzzword bingo REST API provides an interface to a collection of
boards, which contain the buzzwords on the board and a list of win
conditions, which are Python code which determines if a given board is a
winning board.
For more about the game of buzzword bingo, please see the [Buzzword Bingo
article on Wikipedia](http://en.wikipedia.org/wiki/Buzzword_bingo).
"""
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
|
from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
Add a description to the main view using Markdown.from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
"""The buzzword bingo REST API provides an interface to a collection of
boards, which contain the buzzwords on the board and a list of win
conditions, which are Python code which determines if a given board is a
winning board.
For more about the game of buzzword bingo, please see the [Buzzword Bingo
article on Wikipedia](http://en.wikipedia.org/wiki/Buzzword_bingo).
"""
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
|
<commit_before>from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
<commit_msg>Add a description to the main view using Markdown.<commit_after>from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
"""The buzzword bingo REST API provides an interface to a collection of
boards, which contain the buzzwords on the board and a list of win
conditions, which are Python code which determines if a given board is a
winning board.
For more about the game of buzzword bingo, please see the [Buzzword Bingo
article on Wikipedia](http://en.wikipedia.org/wiki/Buzzword_bingo).
"""
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
|
8d925147bf459021ca9735faec375608963d0269
|
gatekeeper/nodered.py
|
gatekeeper/nodered.py
|
import threading
import socket
NODE_RED_SERVER_PORT = 4445
NODE_RED_CLIENT_PORT = 4444
class NodeRedDoorbellServerThread(threading.Thread):
"""
Get doorbell triggers from NodeRed.
"""
def __init__(self, intercom):
super(NodeRedDoorbellServerThread, self).__init__()
self.intercom = intercom
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.running = True
def run(self):
self.server_socket.bind(('', NODE_RED_SERVER_PORT))
self.server_socket.listen(1)
conn, addr = self.server_socket.accept()
while self.running:
data = conn.recv(1024)
if not data:
print("no data breaking")
break
else:
self.intercom.onBellPressed()
conn.close()
class NodeRedDoorOpenClient():
"""
Send open door commands to NodeRed.
"""
def __init__(self):
super(NodeRedDoorOpenClient, self).__init__()
self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect(("127.0.0.1", NODE_RED_CLIENT_PORT))
def sendOpenDoor(self):
self.client_socket.send(b'open')
|
import threading
import socket
NODE_RED_SERVER_PORT = 4445
NODE_RED_CLIENT_PORT = 4444
class NodeRedDoorbellServerThread(threading.Thread):
"""
Get doorbell triggers from NodeRed.
"""
def __init__(self, intercom):
super(NodeRedDoorbellServerThread, self).__init__()
self.intercom = intercom
def run(self):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as server_socket:
self.running = True
self.server_socket.bind(('', NODE_RED_SERVER_PORT))
self.server_socket.listen(1)
while self.running:
conn, addr = self.server_socket.accept()
with conn:
while self.running:
data = conn.recv(1024)
if not data:
print("no data breaking")
break
else:
self.intercom.onBellPressed()
class NodeRedDoorOpenClient():
"""
Send open door commands to NodeRed.
"""
def __init__(self):
super(NodeRedDoorOpenClient, self).__init__()
self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect(("127.0.0.1", NODE_RED_CLIENT_PORT))
def sendOpenDoor(self):
self.client_socket.send(b'open')
|
Use with handler to safely close the server socket and connection
|
Use with handler to safely close the server socket and connection
|
Python
|
mit
|
git-commit/iot-gatekeeper,git-commit/iot-gatekeeper
|
import threading
import socket
NODE_RED_SERVER_PORT = 4445
NODE_RED_CLIENT_PORT = 4444
class NodeRedDoorbellServerThread(threading.Thread):
"""
Get doorbell triggers from NodeRed.
"""
def __init__(self, intercom):
super(NodeRedDoorbellServerThread, self).__init__()
self.intercom = intercom
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.running = True
def run(self):
self.server_socket.bind(('', NODE_RED_SERVER_PORT))
self.server_socket.listen(1)
conn, addr = self.server_socket.accept()
while self.running:
data = conn.recv(1024)
if not data:
print("no data breaking")
break
else:
self.intercom.onBellPressed()
conn.close()
class NodeRedDoorOpenClient():
"""
Send open door commands to NodeRed.
"""
def __init__(self):
super(NodeRedDoorOpenClient, self).__init__()
self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect(("127.0.0.1", NODE_RED_CLIENT_PORT))
def sendOpenDoor(self):
self.client_socket.send(b'open')
Use with handler to safely close the server socket and connection
|
import threading
import socket
NODE_RED_SERVER_PORT = 4445
NODE_RED_CLIENT_PORT = 4444
class NodeRedDoorbellServerThread(threading.Thread):
"""
Get doorbell triggers from NodeRed.
"""
def __init__(self, intercom):
super(NodeRedDoorbellServerThread, self).__init__()
self.intercom = intercom
def run(self):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as server_socket:
self.running = True
self.server_socket.bind(('', NODE_RED_SERVER_PORT))
self.server_socket.listen(1)
while self.running:
conn, addr = self.server_socket.accept()
with conn:
while self.running:
data = conn.recv(1024)
if not data:
print("no data breaking")
break
else:
self.intercom.onBellPressed()
class NodeRedDoorOpenClient():
"""
Send open door commands to NodeRed.
"""
def __init__(self):
super(NodeRedDoorOpenClient, self).__init__()
self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect(("127.0.0.1", NODE_RED_CLIENT_PORT))
def sendOpenDoor(self):
self.client_socket.send(b'open')
|
<commit_before>import threading
import socket
NODE_RED_SERVER_PORT = 4445
NODE_RED_CLIENT_PORT = 4444
class NodeRedDoorbellServerThread(threading.Thread):
"""
Get doorbell triggers from NodeRed.
"""
def __init__(self, intercom):
super(NodeRedDoorbellServerThread, self).__init__()
self.intercom = intercom
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.running = True
def run(self):
self.server_socket.bind(('', NODE_RED_SERVER_PORT))
self.server_socket.listen(1)
conn, addr = self.server_socket.accept()
while self.running:
data = conn.recv(1024)
if not data:
print("no data breaking")
break
else:
self.intercom.onBellPressed()
conn.close()
class NodeRedDoorOpenClient():
"""
Send open door commands to NodeRed.
"""
def __init__(self):
super(NodeRedDoorOpenClient, self).__init__()
self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect(("127.0.0.1", NODE_RED_CLIENT_PORT))
def sendOpenDoor(self):
self.client_socket.send(b'open')
<commit_msg>Use with handler to safely close the server socket and connection<commit_after>
|
import threading
import socket
NODE_RED_SERVER_PORT = 4445
NODE_RED_CLIENT_PORT = 4444
class NodeRedDoorbellServerThread(threading.Thread):
"""
Get doorbell triggers from NodeRed.
"""
def __init__(self, intercom):
super(NodeRedDoorbellServerThread, self).__init__()
self.intercom = intercom
def run(self):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as server_socket:
self.running = True
self.server_socket.bind(('', NODE_RED_SERVER_PORT))
self.server_socket.listen(1)
while self.running:
conn, addr = self.server_socket.accept()
with conn:
while self.running:
data = conn.recv(1024)
if not data:
print("no data breaking")
break
else:
self.intercom.onBellPressed()
class NodeRedDoorOpenClient():
"""
Send open door commands to NodeRed.
"""
def __init__(self):
super(NodeRedDoorOpenClient, self).__init__()
self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect(("127.0.0.1", NODE_RED_CLIENT_PORT))
def sendOpenDoor(self):
self.client_socket.send(b'open')
|
import threading
import socket
NODE_RED_SERVER_PORT = 4445
NODE_RED_CLIENT_PORT = 4444
class NodeRedDoorbellServerThread(threading.Thread):
"""
Get doorbell triggers from NodeRed.
"""
def __init__(self, intercom):
super(NodeRedDoorbellServerThread, self).__init__()
self.intercom = intercom
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.running = True
def run(self):
self.server_socket.bind(('', NODE_RED_SERVER_PORT))
self.server_socket.listen(1)
conn, addr = self.server_socket.accept()
while self.running:
data = conn.recv(1024)
if not data:
print("no data breaking")
break
else:
self.intercom.onBellPressed()
conn.close()
class NodeRedDoorOpenClient():
"""
Send open door commands to NodeRed.
"""
def __init__(self):
super(NodeRedDoorOpenClient, self).__init__()
self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect(("127.0.0.1", NODE_RED_CLIENT_PORT))
def sendOpenDoor(self):
self.client_socket.send(b'open')
Use with handler to safely close the server socket and connectionimport threading
import socket
NODE_RED_SERVER_PORT = 4445
NODE_RED_CLIENT_PORT = 4444
class NodeRedDoorbellServerThread(threading.Thread):
"""
Get doorbell triggers from NodeRed.
"""
def __init__(self, intercom):
super(NodeRedDoorbellServerThread, self).__init__()
self.intercom = intercom
def run(self):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as server_socket:
self.running = True
self.server_socket.bind(('', NODE_RED_SERVER_PORT))
self.server_socket.listen(1)
while self.running:
conn, addr = self.server_socket.accept()
with conn:
while self.running:
data = conn.recv(1024)
if not data:
print("no data breaking")
break
else:
self.intercom.onBellPressed()
class NodeRedDoorOpenClient():
"""
Send open door commands to NodeRed.
"""
def __init__(self):
super(NodeRedDoorOpenClient, self).__init__()
self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect(("127.0.0.1", NODE_RED_CLIENT_PORT))
def sendOpenDoor(self):
self.client_socket.send(b'open')
|
<commit_before>import threading
import socket
NODE_RED_SERVER_PORT = 4445
NODE_RED_CLIENT_PORT = 4444
class NodeRedDoorbellServerThread(threading.Thread):
"""
Get doorbell triggers from NodeRed.
"""
def __init__(self, intercom):
super(NodeRedDoorbellServerThread, self).__init__()
self.intercom = intercom
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.running = True
def run(self):
self.server_socket.bind(('', NODE_RED_SERVER_PORT))
self.server_socket.listen(1)
conn, addr = self.server_socket.accept()
while self.running:
data = conn.recv(1024)
if not data:
print("no data breaking")
break
else:
self.intercom.onBellPressed()
conn.close()
class NodeRedDoorOpenClient():
"""
Send open door commands to NodeRed.
"""
def __init__(self):
super(NodeRedDoorOpenClient, self).__init__()
self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect(("127.0.0.1", NODE_RED_CLIENT_PORT))
def sendOpenDoor(self):
self.client_socket.send(b'open')
<commit_msg>Use with handler to safely close the server socket and connection<commit_after>import threading
import socket
NODE_RED_SERVER_PORT = 4445
NODE_RED_CLIENT_PORT = 4444
class NodeRedDoorbellServerThread(threading.Thread):
"""
Get doorbell triggers from NodeRed.
"""
def __init__(self, intercom):
super(NodeRedDoorbellServerThread, self).__init__()
self.intercom = intercom
def run(self):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as server_socket:
self.running = True
self.server_socket.bind(('', NODE_RED_SERVER_PORT))
self.server_socket.listen(1)
while self.running:
conn, addr = self.server_socket.accept()
with conn:
while self.running:
data = conn.recv(1024)
if not data:
print("no data breaking")
break
else:
self.intercom.onBellPressed()
class NodeRedDoorOpenClient():
"""
Send open door commands to NodeRed.
"""
def __init__(self):
super(NodeRedDoorOpenClient, self).__init__()
self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.client_socket.connect(("127.0.0.1", NODE_RED_CLIENT_PORT))
def sendOpenDoor(self):
self.client_socket.send(b'open')
|
2f4debb95af4f61b0c4b8ac6d8d30394c8551dbc
|
main.py
|
main.py
|
import os.path
from srtmod.srtmod import SrtMod
import argparse
def main():
parser = argparse.ArgumentParser(prog='SrtModPy',
description='Just adjusts the timing of subtitle files')
parser.add_argument('file_name', help='the subtitle to be modified')
parser.add_argument('time_amount', metavar='time_amount', type=int,
help='the amount of time to be added or subtracted to subtitle')
parser.add_argument('time_part',
help='it\'s the part of time to be modified', choices=['S', 'M'])
parser.add_argument('operation',
help='add or discount time to subtitle', choices=['/A', '/D'])
r = parser.parse_args()
time_amount = r.time_amount
file_name = r.file_name
time_part = r.time_part
operation = r.operation
try:
srt = SrtMod(file_name, time_amount, time_part, operation)
if srt.process():
print '\nsubtitle file was created successfully'
print 'file saved at :' + os.path.splitext(file_name)[0] \
+ '(modified).srt'
else:
print '\nsubtitle can not be processed'
except OverflowError:
print 'Exception: Invalid time amount for this operation'
if __name__ == '__main__':
main()
|
import os.path
from srtmod.srtmod import SrtMod
import argparse
def main():
parser = argparse.ArgumentParser(prog='SrtModPy',
description='Just adjusts the timing of subtitle files')
parser.add_argument('file_name', help='the subtitle to be modified')
parser.add_argument('time_amount', metavar='time_amount', type=float,
help='the amount of time to be added or subtracted to subtitle')
parser.add_argument('time_part',
help='it\'s the part of time to be modified', choices=['S', 'M'])
parser.add_argument('operation',
help='add or discount time to subtitle', choices=['/A', '/D'])
r = parser.parse_args()
time_amount = r.time_amount
file_name = r.file_name
time_part = r.time_part
operation = r.operation
try:
srt = SrtMod(file_name, time_amount, time_part, operation)
if srt.process():
print 'subtitle file was created successfully'
print 'file saved at :' + os.path.splitext(file_name)[0] \
+ '(modified).srt'
else:
print '\nsubtitle can not be processed'
except OverflowError:
print 'Exception: Invalid time amount for this operation'
if __name__ == '__main__':
main()
|
Change time_amount type,now handle float values
|
Change time_amount type,now handle float values
|
Python
|
mit
|
sancayouth/SrtModPy
|
import os.path
from srtmod.srtmod import SrtMod
import argparse
def main():
parser = argparse.ArgumentParser(prog='SrtModPy',
description='Just adjusts the timing of subtitle files')
parser.add_argument('file_name', help='the subtitle to be modified')
parser.add_argument('time_amount', metavar='time_amount', type=int,
help='the amount of time to be added or subtracted to subtitle')
parser.add_argument('time_part',
help='it\'s the part of time to be modified', choices=['S', 'M'])
parser.add_argument('operation',
help='add or discount time to subtitle', choices=['/A', '/D'])
r = parser.parse_args()
time_amount = r.time_amount
file_name = r.file_name
time_part = r.time_part
operation = r.operation
try:
srt = SrtMod(file_name, time_amount, time_part, operation)
if srt.process():
print '\nsubtitle file was created successfully'
print 'file saved at :' + os.path.splitext(file_name)[0] \
+ '(modified).srt'
else:
print '\nsubtitle can not be processed'
except OverflowError:
print 'Exception: Invalid time amount for this operation'
if __name__ == '__main__':
main()Change time_amount type,now handle float values
|
import os.path
from srtmod.srtmod import SrtMod
import argparse
def main():
parser = argparse.ArgumentParser(prog='SrtModPy',
description='Just adjusts the timing of subtitle files')
parser.add_argument('file_name', help='the subtitle to be modified')
parser.add_argument('time_amount', metavar='time_amount', type=float,
help='the amount of time to be added or subtracted to subtitle')
parser.add_argument('time_part',
help='it\'s the part of time to be modified', choices=['S', 'M'])
parser.add_argument('operation',
help='add or discount time to subtitle', choices=['/A', '/D'])
r = parser.parse_args()
time_amount = r.time_amount
file_name = r.file_name
time_part = r.time_part
operation = r.operation
try:
srt = SrtMod(file_name, time_amount, time_part, operation)
if srt.process():
print 'subtitle file was created successfully'
print 'file saved at :' + os.path.splitext(file_name)[0] \
+ '(modified).srt'
else:
print '\nsubtitle can not be processed'
except OverflowError:
print 'Exception: Invalid time amount for this operation'
if __name__ == '__main__':
main()
|
<commit_before>import os.path
from srtmod.srtmod import SrtMod
import argparse
def main():
parser = argparse.ArgumentParser(prog='SrtModPy',
description='Just adjusts the timing of subtitle files')
parser.add_argument('file_name', help='the subtitle to be modified')
parser.add_argument('time_amount', metavar='time_amount', type=int,
help='the amount of time to be added or subtracted to subtitle')
parser.add_argument('time_part',
help='it\'s the part of time to be modified', choices=['S', 'M'])
parser.add_argument('operation',
help='add or discount time to subtitle', choices=['/A', '/D'])
r = parser.parse_args()
time_amount = r.time_amount
file_name = r.file_name
time_part = r.time_part
operation = r.operation
try:
srt = SrtMod(file_name, time_amount, time_part, operation)
if srt.process():
print '\nsubtitle file was created successfully'
print 'file saved at :' + os.path.splitext(file_name)[0] \
+ '(modified).srt'
else:
print '\nsubtitle can not be processed'
except OverflowError:
print 'Exception: Invalid time amount for this operation'
if __name__ == '__main__':
main()<commit_msg>Change time_amount type,now handle float values<commit_after>
|
import os.path
from srtmod.srtmod import SrtMod
import argparse
def main():
parser = argparse.ArgumentParser(prog='SrtModPy',
description='Just adjusts the timing of subtitle files')
parser.add_argument('file_name', help='the subtitle to be modified')
parser.add_argument('time_amount', metavar='time_amount', type=float,
help='the amount of time to be added or subtracted to subtitle')
parser.add_argument('time_part',
help='it\'s the part of time to be modified', choices=['S', 'M'])
parser.add_argument('operation',
help='add or discount time to subtitle', choices=['/A', '/D'])
r = parser.parse_args()
time_amount = r.time_amount
file_name = r.file_name
time_part = r.time_part
operation = r.operation
try:
srt = SrtMod(file_name, time_amount, time_part, operation)
if srt.process():
print 'subtitle file was created successfully'
print 'file saved at :' + os.path.splitext(file_name)[0] \
+ '(modified).srt'
else:
print '\nsubtitle can not be processed'
except OverflowError:
print 'Exception: Invalid time amount for this operation'
if __name__ == '__main__':
main()
|
import os.path
from srtmod.srtmod import SrtMod
import argparse
def main():
parser = argparse.ArgumentParser(prog='SrtModPy',
description='Just adjusts the timing of subtitle files')
parser.add_argument('file_name', help='the subtitle to be modified')
parser.add_argument('time_amount', metavar='time_amount', type=int,
help='the amount of time to be added or subtracted to subtitle')
parser.add_argument('time_part',
help='it\'s the part of time to be modified', choices=['S', 'M'])
parser.add_argument('operation',
help='add or discount time to subtitle', choices=['/A', '/D'])
r = parser.parse_args()
time_amount = r.time_amount
file_name = r.file_name
time_part = r.time_part
operation = r.operation
try:
srt = SrtMod(file_name, time_amount, time_part, operation)
if srt.process():
print '\nsubtitle file was created successfully'
print 'file saved at :' + os.path.splitext(file_name)[0] \
+ '(modified).srt'
else:
print '\nsubtitle can not be processed'
except OverflowError:
print 'Exception: Invalid time amount for this operation'
if __name__ == '__main__':
main()Change time_amount type,now handle float valuesimport os.path
from srtmod.srtmod import SrtMod
import argparse
def main():
parser = argparse.ArgumentParser(prog='SrtModPy',
description='Just adjusts the timing of subtitle files')
parser.add_argument('file_name', help='the subtitle to be modified')
parser.add_argument('time_amount', metavar='time_amount', type=float,
help='the amount of time to be added or subtracted to subtitle')
parser.add_argument('time_part',
help='it\'s the part of time to be modified', choices=['S', 'M'])
parser.add_argument('operation',
help='add or discount time to subtitle', choices=['/A', '/D'])
r = parser.parse_args()
time_amount = r.time_amount
file_name = r.file_name
time_part = r.time_part
operation = r.operation
try:
srt = SrtMod(file_name, time_amount, time_part, operation)
if srt.process():
print 'subtitle file was created successfully'
print 'file saved at :' + os.path.splitext(file_name)[0] \
+ '(modified).srt'
else:
print '\nsubtitle can not be processed'
except OverflowError:
print 'Exception: Invalid time amount for this operation'
if __name__ == '__main__':
main()
|
<commit_before>import os.path
from srtmod.srtmod import SrtMod
import argparse
def main():
parser = argparse.ArgumentParser(prog='SrtModPy',
description='Just adjusts the timing of subtitle files')
parser.add_argument('file_name', help='the subtitle to be modified')
parser.add_argument('time_amount', metavar='time_amount', type=int,
help='the amount of time to be added or subtracted to subtitle')
parser.add_argument('time_part',
help='it\'s the part of time to be modified', choices=['S', 'M'])
parser.add_argument('operation',
help='add or discount time to subtitle', choices=['/A', '/D'])
r = parser.parse_args()
time_amount = r.time_amount
file_name = r.file_name
time_part = r.time_part
operation = r.operation
try:
srt = SrtMod(file_name, time_amount, time_part, operation)
if srt.process():
print '\nsubtitle file was created successfully'
print 'file saved at :' + os.path.splitext(file_name)[0] \
+ '(modified).srt'
else:
print '\nsubtitle can not be processed'
except OverflowError:
print 'Exception: Invalid time amount for this operation'
if __name__ == '__main__':
main()<commit_msg>Change time_amount type,now handle float values<commit_after>import os.path
from srtmod.srtmod import SrtMod
import argparse
def main():
parser = argparse.ArgumentParser(prog='SrtModPy',
description='Just adjusts the timing of subtitle files')
parser.add_argument('file_name', help='the subtitle to be modified')
parser.add_argument('time_amount', metavar='time_amount', type=float,
help='the amount of time to be added or subtracted to subtitle')
parser.add_argument('time_part',
help='it\'s the part of time to be modified', choices=['S', 'M'])
parser.add_argument('operation',
help='add or discount time to subtitle', choices=['/A', '/D'])
r = parser.parse_args()
time_amount = r.time_amount
file_name = r.file_name
time_part = r.time_part
operation = r.operation
try:
srt = SrtMod(file_name, time_amount, time_part, operation)
if srt.process():
print 'subtitle file was created successfully'
print 'file saved at :' + os.path.splitext(file_name)[0] \
+ '(modified).srt'
else:
print '\nsubtitle can not be processed'
except OverflowError:
print 'Exception: Invalid time amount for this operation'
if __name__ == '__main__':
main()
|
eec72133d9245a4857c9a8954e235948a5fd9938
|
pokedex.py
|
pokedex.py
|
import json
class NationalDex:
def __init__(self, pathToNationalDex):
dexfile = open(pathToNationalDex, 'r')
self.dexdata = json.load(dexfile)
self.numberOfPokemon = len(self.dexdata.keys())
self.pokemonNames = []
self.pokemonSlugs = []
for i in range (1, self.numberOfPokemon+1):
dexKey = str(i).zfill(3)
name = self.dexdata[dexKey]['name']['eng']
slug = self.dexdata[dexKey]['slug']['eng']
self.pokemonNames.append(name)
self.pokemonSlugs.append(slug)
def pokemonNameForNumber(self, number):
return self.pokemon[number]
|
import json
class NationalDex:
def __init__(self, pathToNationalDex):
dexfile = open(pathToNationalDex, 'r')
self.dexdata = json.load(dexfile)
self.numberOfPokemon = len(self.dexdata.keys())
self.pokemonNames = []
self.pokemonSlugs = []
for i in range (1, self.numberOfPokemon+1):
dexKey = str(i).zfill(3)
name = self.dexdata[dexKey]['name']['eng']
slug = self.dexdata[dexKey]['slug']['eng']
self.pokemonNames.append(name)
self.pokemonSlugs.append(slug)
|
Remove unused method for getting Pokémon names
|
Remove unused method for getting Pokémon names
|
Python
|
bsd-2-clause
|
peterhajas/LivingDex,peterhajas/LivingDex,peterhajas/LivingDex,peterhajas/LivingDex
|
import json
class NationalDex:
def __init__(self, pathToNationalDex):
dexfile = open(pathToNationalDex, 'r')
self.dexdata = json.load(dexfile)
self.numberOfPokemon = len(self.dexdata.keys())
self.pokemonNames = []
self.pokemonSlugs = []
for i in range (1, self.numberOfPokemon+1):
dexKey = str(i).zfill(3)
name = self.dexdata[dexKey]['name']['eng']
slug = self.dexdata[dexKey]['slug']['eng']
self.pokemonNames.append(name)
self.pokemonSlugs.append(slug)
def pokemonNameForNumber(self, number):
return self.pokemon[number]
Remove unused method for getting Pokémon names
|
import json
class NationalDex:
def __init__(self, pathToNationalDex):
dexfile = open(pathToNationalDex, 'r')
self.dexdata = json.load(dexfile)
self.numberOfPokemon = len(self.dexdata.keys())
self.pokemonNames = []
self.pokemonSlugs = []
for i in range (1, self.numberOfPokemon+1):
dexKey = str(i).zfill(3)
name = self.dexdata[dexKey]['name']['eng']
slug = self.dexdata[dexKey]['slug']['eng']
self.pokemonNames.append(name)
self.pokemonSlugs.append(slug)
|
<commit_before>import json
class NationalDex:
def __init__(self, pathToNationalDex):
dexfile = open(pathToNationalDex, 'r')
self.dexdata = json.load(dexfile)
self.numberOfPokemon = len(self.dexdata.keys())
self.pokemonNames = []
self.pokemonSlugs = []
for i in range (1, self.numberOfPokemon+1):
dexKey = str(i).zfill(3)
name = self.dexdata[dexKey]['name']['eng']
slug = self.dexdata[dexKey]['slug']['eng']
self.pokemonNames.append(name)
self.pokemonSlugs.append(slug)
def pokemonNameForNumber(self, number):
return self.pokemon[number]
<commit_msg>Remove unused method for getting Pokémon names<commit_after>
|
import json
class NationalDex:
def __init__(self, pathToNationalDex):
dexfile = open(pathToNationalDex, 'r')
self.dexdata = json.load(dexfile)
self.numberOfPokemon = len(self.dexdata.keys())
self.pokemonNames = []
self.pokemonSlugs = []
for i in range (1, self.numberOfPokemon+1):
dexKey = str(i).zfill(3)
name = self.dexdata[dexKey]['name']['eng']
slug = self.dexdata[dexKey]['slug']['eng']
self.pokemonNames.append(name)
self.pokemonSlugs.append(slug)
|
import json
class NationalDex:
def __init__(self, pathToNationalDex):
dexfile = open(pathToNationalDex, 'r')
self.dexdata = json.load(dexfile)
self.numberOfPokemon = len(self.dexdata.keys())
self.pokemonNames = []
self.pokemonSlugs = []
for i in range (1, self.numberOfPokemon+1):
dexKey = str(i).zfill(3)
name = self.dexdata[dexKey]['name']['eng']
slug = self.dexdata[dexKey]['slug']['eng']
self.pokemonNames.append(name)
self.pokemonSlugs.append(slug)
def pokemonNameForNumber(self, number):
return self.pokemon[number]
Remove unused method for getting Pokémon namesimport json
class NationalDex:
def __init__(self, pathToNationalDex):
dexfile = open(pathToNationalDex, 'r')
self.dexdata = json.load(dexfile)
self.numberOfPokemon = len(self.dexdata.keys())
self.pokemonNames = []
self.pokemonSlugs = []
for i in range (1, self.numberOfPokemon+1):
dexKey = str(i).zfill(3)
name = self.dexdata[dexKey]['name']['eng']
slug = self.dexdata[dexKey]['slug']['eng']
self.pokemonNames.append(name)
self.pokemonSlugs.append(slug)
|
<commit_before>import json
class NationalDex:
def __init__(self, pathToNationalDex):
dexfile = open(pathToNationalDex, 'r')
self.dexdata = json.load(dexfile)
self.numberOfPokemon = len(self.dexdata.keys())
self.pokemonNames = []
self.pokemonSlugs = []
for i in range (1, self.numberOfPokemon+1):
dexKey = str(i).zfill(3)
name = self.dexdata[dexKey]['name']['eng']
slug = self.dexdata[dexKey]['slug']['eng']
self.pokemonNames.append(name)
self.pokemonSlugs.append(slug)
def pokemonNameForNumber(self, number):
return self.pokemon[number]
<commit_msg>Remove unused method for getting Pokémon names<commit_after>import json
class NationalDex:
def __init__(self, pathToNationalDex):
dexfile = open(pathToNationalDex, 'r')
self.dexdata = json.load(dexfile)
self.numberOfPokemon = len(self.dexdata.keys())
self.pokemonNames = []
self.pokemonSlugs = []
for i in range (1, self.numberOfPokemon+1):
dexKey = str(i).zfill(3)
name = self.dexdata[dexKey]['name']['eng']
slug = self.dexdata[dexKey]['slug']['eng']
self.pokemonNames.append(name)
self.pokemonSlugs.append(slug)
|
8f8b313a1b5118b6528e5152252128e075de0401
|
tests/test_terrain.py
|
tests/test_terrain.py
|
import unittest
from randterrainpy import *
class TerrainTesterPy(unittest.TestCase):
def setUp(self):
pass
|
import unittest
from randterrainpy import *
class TerrainTesterPy(unittest.TestCase):
def setUp(self):
self.ter1 = Terrain(1, 1)
self.ter2 = Terrain(2, 4)
self.ter3 = Terrain(1, 1)
def test_getitem(self):
self.assertEqual(self.ter1[0, 0], 0)
self.assertEqual(self.ter2[1, 2], 0)
def test_eq(self):
self.assertEqual(self.ter1, self.ter3)
self.assertNotEqual(self.ter1, self.ter2)
def test_setitem(self):
self.ter1[0, 0] = 1
self.assertEqual(self.ter1[0, 0], 1)
self.ter2[1, 2] = 0.5
self.assertEqual(self.ter2[1, 2], 0.5)
def test_add(self):
self.assertRaises(InvalidDimensionsError, self.ter1.__add__, self.ter2)
self.assertEqual(self.ter1+self.ter3, self.ter1)
if __name__ == "__main__":
unittest.main()
|
Add tests for indexing, equality and addition for Terrain
|
Add tests for indexing, equality and addition for Terrain
|
Python
|
mit
|
jackromo/RandTerrainPy
|
import unittest
from randterrainpy import *
class TerrainTesterPy(unittest.TestCase):
def setUp(self):
pass
Add tests for indexing, equality and addition for Terrain
|
import unittest
from randterrainpy import *
class TerrainTesterPy(unittest.TestCase):
def setUp(self):
self.ter1 = Terrain(1, 1)
self.ter2 = Terrain(2, 4)
self.ter3 = Terrain(1, 1)
def test_getitem(self):
self.assertEqual(self.ter1[0, 0], 0)
self.assertEqual(self.ter2[1, 2], 0)
def test_eq(self):
self.assertEqual(self.ter1, self.ter3)
self.assertNotEqual(self.ter1, self.ter2)
def test_setitem(self):
self.ter1[0, 0] = 1
self.assertEqual(self.ter1[0, 0], 1)
self.ter2[1, 2] = 0.5
self.assertEqual(self.ter2[1, 2], 0.5)
def test_add(self):
self.assertRaises(InvalidDimensionsError, self.ter1.__add__, self.ter2)
self.assertEqual(self.ter1+self.ter3, self.ter1)
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from randterrainpy import *
class TerrainTesterPy(unittest.TestCase):
def setUp(self):
pass
<commit_msg>Add tests for indexing, equality and addition for Terrain<commit_after>
|
import unittest
from randterrainpy import *
class TerrainTesterPy(unittest.TestCase):
def setUp(self):
self.ter1 = Terrain(1, 1)
self.ter2 = Terrain(2, 4)
self.ter3 = Terrain(1, 1)
def test_getitem(self):
self.assertEqual(self.ter1[0, 0], 0)
self.assertEqual(self.ter2[1, 2], 0)
def test_eq(self):
self.assertEqual(self.ter1, self.ter3)
self.assertNotEqual(self.ter1, self.ter2)
def test_setitem(self):
self.ter1[0, 0] = 1
self.assertEqual(self.ter1[0, 0], 1)
self.ter2[1, 2] = 0.5
self.assertEqual(self.ter2[1, 2], 0.5)
def test_add(self):
self.assertRaises(InvalidDimensionsError, self.ter1.__add__, self.ter2)
self.assertEqual(self.ter1+self.ter3, self.ter1)
if __name__ == "__main__":
unittest.main()
|
import unittest
from randterrainpy import *
class TerrainTesterPy(unittest.TestCase):
def setUp(self):
pass
Add tests for indexing, equality and addition for Terrainimport unittest
from randterrainpy import *
class TerrainTesterPy(unittest.TestCase):
def setUp(self):
self.ter1 = Terrain(1, 1)
self.ter2 = Terrain(2, 4)
self.ter3 = Terrain(1, 1)
def test_getitem(self):
self.assertEqual(self.ter1[0, 0], 0)
self.assertEqual(self.ter2[1, 2], 0)
def test_eq(self):
self.assertEqual(self.ter1, self.ter3)
self.assertNotEqual(self.ter1, self.ter2)
def test_setitem(self):
self.ter1[0, 0] = 1
self.assertEqual(self.ter1[0, 0], 1)
self.ter2[1, 2] = 0.5
self.assertEqual(self.ter2[1, 2], 0.5)
def test_add(self):
self.assertRaises(InvalidDimensionsError, self.ter1.__add__, self.ter2)
self.assertEqual(self.ter1+self.ter3, self.ter1)
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from randterrainpy import *
class TerrainTesterPy(unittest.TestCase):
def setUp(self):
pass
<commit_msg>Add tests for indexing, equality and addition for Terrain<commit_after>import unittest
from randterrainpy import *
class TerrainTesterPy(unittest.TestCase):
def setUp(self):
self.ter1 = Terrain(1, 1)
self.ter2 = Terrain(2, 4)
self.ter3 = Terrain(1, 1)
def test_getitem(self):
self.assertEqual(self.ter1[0, 0], 0)
self.assertEqual(self.ter2[1, 2], 0)
def test_eq(self):
self.assertEqual(self.ter1, self.ter3)
self.assertNotEqual(self.ter1, self.ter2)
def test_setitem(self):
self.ter1[0, 0] = 1
self.assertEqual(self.ter1[0, 0], 1)
self.ter2[1, 2] = 0.5
self.assertEqual(self.ter2[1, 2], 0.5)
def test_add(self):
self.assertRaises(InvalidDimensionsError, self.ter1.__add__, self.ter2)
self.assertEqual(self.ter1+self.ter3, self.ter1)
if __name__ == "__main__":
unittest.main()
|
d3428351e005897f45bec1f4db61d776d2d9a962
|
tests/test_migrate.py
|
tests/test_migrate.py
|
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
import pytest
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_open_old(tmpdir):
# Make sure that opening an old database results in an exception and not
# in data loss
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
with pytest.raises(Exception):
TinyDB(str(db_file))
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
Test that opening an old database fails
|
Test that opening an old database fails
|
Python
|
mit
|
cagnosolutions/tinydb,Callwoola/tinydb,ivankravets/tinydb,raquel-ucl/tinydb,msiemens/tinydb
|
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
Test that opening an old database fails
|
import pytest
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_open_old(tmpdir):
# Make sure that opening an old database results in an exception and not
# in data loss
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
with pytest.raises(Exception):
TinyDB(str(db_file))
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
<commit_before>from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
<commit_msg>Test that opening an old database fails<commit_after>
|
import pytest
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_open_old(tmpdir):
# Make sure that opening an old database results in an exception and not
# in data loss
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
with pytest.raises(Exception):
TinyDB(str(db_file))
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
Test that opening an old database failsimport pytest
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_open_old(tmpdir):
# Make sure that opening an old database results in an exception and not
# in data loss
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
with pytest.raises(Exception):
TinyDB(str(db_file))
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
<commit_before>from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
<commit_msg>Test that opening an old database fails<commit_after>import pytest
from tinydb import TinyDB, where
from tinydb.migrate import migrate
v1_0 = """
{
"_default": [{"key": "value", "_id": 1}],
"table": [{"key": "value", "_id": 2}]
}
"""
def test_open_old(tmpdir):
# Make sure that opening an old database results in an exception and not
# in data loss
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
with pytest.raises(Exception):
TinyDB(str(db_file))
def test_upgrade(tmpdir):
db_file = tmpdir.join('db.json')
db_file.write(v1_0)
# Run upgrade
migrate(str(db_file))
db = TinyDB(str(db_file))
assert db.count(where('key') == 'value') == 1
|
2509badb90a23c7c8b85c146f960bcc7bb8d57aa
|
postgresql/protocol/version.py
|
postgresql/protocol/version.py
|
##
# .protocol.version
##
'PQ version class'
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor : '(major, minor)'):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
|
##
# .protocol.version
##
"""
PQ version class used by startup messages.
"""
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""
Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
|
Remove string annotation (likely incomprehensible to mypy) and minor doc-string formatting.
|
Remove string annotation (likely incomprehensible to mypy) and minor doc-string formatting.
|
Python
|
bsd-3-clause
|
python-postgres/fe,python-postgres/fe
|
##
# .protocol.version
##
'PQ version class'
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor : '(major, minor)'):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
Remove string annotation (likely incomprehensible to mypy) and minor doc-string formatting.
|
##
# .protocol.version
##
"""
PQ version class used by startup messages.
"""
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""
Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
|
<commit_before>##
# .protocol.version
##
'PQ version class'
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor : '(major, minor)'):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
<commit_msg>Remove string annotation (likely incomprehensible to mypy) and minor doc-string formatting.<commit_after>
|
##
# .protocol.version
##
"""
PQ version class used by startup messages.
"""
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""
Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
|
##
# .protocol.version
##
'PQ version class'
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor : '(major, minor)'):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
Remove string annotation (likely incomprehensible to mypy) and minor doc-string formatting.##
# .protocol.version
##
"""
PQ version class used by startup messages.
"""
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""
Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
|
<commit_before>##
# .protocol.version
##
'PQ version class'
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor : '(major, minor)'):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
<commit_msg>Remove string annotation (likely incomprehensible to mypy) and minor doc-string formatting.<commit_after>##
# .protocol.version
##
"""
PQ version class used by startup messages.
"""
from struct import Struct
version_struct = Struct('!HH')
class Version(tuple):
"""
Version((major, minor)) -> Version
Version serializer and parser.
"""
major = property(fget = lambda s: s[0])
minor = property(fget = lambda s: s[1])
def __new__(subtype, major_minor):
(major, minor) = major_minor
major = int(major)
minor = int(minor)
# If it can't be packed like this, it's not a valid version.
try:
version_struct.pack(major, minor)
except Exception as e:
raise ValueError("unpackable major and minor") from e
return tuple.__new__(subtype, (major, minor))
def __int__(self):
return (self[0] << 16) | self[1]
def bytes(self):
return version_struct.pack(self[0], self[1])
def __repr__(self):
return '%d.%d' %(self[0], self[1])
def parse(self, data):
return self(version_struct.unpack(data))
parse = classmethod(parse)
CancelRequestCode = Version((1234, 5678))
NegotiateSSLCode = Version((1234, 5679))
V2_0 = Version((2, 0))
V3_0 = Version((3, 0))
|
61af785becd452facb92292260149b5e2b20a489
|
sheldon/__init__.py
|
sheldon/__init__.py
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Seva Zhidkov'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
# Bot file contains bot's main class - Sheldon
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.bot import *
from sheldon.utils import *
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Seva Zhidkov'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
# Bot file contains bot's main class - Sheldon
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.bot import *
from sheldon.hooks import *
from sheldon.utils import *
|
Add sheldon hooks to init file
|
Add sheldon hooks to init file
|
Python
|
mit
|
lises/sheldon
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Seva Zhidkov'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
# Bot file contains bot's main class - Sheldon
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.bot import *
from sheldon.utils import *Add sheldon hooks to init file
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Seva Zhidkov'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
# Bot file contains bot's main class - Sheldon
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.bot import *
from sheldon.hooks import *
from sheldon.utils import *
|
<commit_before># -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Seva Zhidkov'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
# Bot file contains bot's main class - Sheldon
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.bot import *
from sheldon.utils import *<commit_msg>Add sheldon hooks to init file<commit_after>
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Seva Zhidkov'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
# Bot file contains bot's main class - Sheldon
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.bot import *
from sheldon.hooks import *
from sheldon.utils import *
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Seva Zhidkov'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
# Bot file contains bot's main class - Sheldon
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.bot import *
from sheldon.utils import *Add sheldon hooks to init file# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Seva Zhidkov'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
# Bot file contains bot's main class - Sheldon
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.bot import *
from sheldon.hooks import *
from sheldon.utils import *
|
<commit_before># -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Seva Zhidkov'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
# Bot file contains bot's main class - Sheldon
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.bot import *
from sheldon.utils import *<commit_msg>Add sheldon hooks to init file<commit_after># -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Seva Zhidkov'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
# Bot file contains bot's main class - Sheldon
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.bot import *
from sheldon.hooks import *
from sheldon.utils import *
|
94142e31d4189fbcf152eeb6b9ad89d684f1a6d0
|
autoload/splicelib/util/io.py
|
autoload/splicelib/util/io.py
|
import sys
def error(m):
sys.stderr.write(str(m) + '\n')
|
import sys
import vim
def error(m):
sys.stderr.write(str(m) + '\n')
def echomsg(m):
vim.command('echomsg "%s"' % m)
|
Add a utility for echoing in the IO utils.
|
Add a utility for echoing in the IO utils.
|
Python
|
mit
|
sjl/splice.vim,sjl/splice.vim
|
import sys
def error(m):
sys.stderr.write(str(m) + '\n')
Add a utility for echoing in the IO utils.
|
import sys
import vim
def error(m):
sys.stderr.write(str(m) + '\n')
def echomsg(m):
vim.command('echomsg "%s"' % m)
|
<commit_before>import sys
def error(m):
sys.stderr.write(str(m) + '\n')
<commit_msg>Add a utility for echoing in the IO utils.<commit_after>
|
import sys
import vim
def error(m):
sys.stderr.write(str(m) + '\n')
def echomsg(m):
vim.command('echomsg "%s"' % m)
|
import sys
def error(m):
sys.stderr.write(str(m) + '\n')
Add a utility for echoing in the IO utils.import sys
import vim
def error(m):
sys.stderr.write(str(m) + '\n')
def echomsg(m):
vim.command('echomsg "%s"' % m)
|
<commit_before>import sys
def error(m):
sys.stderr.write(str(m) + '\n')
<commit_msg>Add a utility for echoing in the IO utils.<commit_after>import sys
import vim
def error(m):
sys.stderr.write(str(m) + '\n')
def echomsg(m):
vim.command('echomsg "%s"' % m)
|
8f3249904ede8e6ac4fd1398f3d059335a65c8c6
|
galpy/df.py
|
galpy/df.py
|
from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
|
from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Functions
#
impulse_deltav_plummer= streamgapdf.impulse_deltav_plummer
impulse_deltav_plummer_curvedstream= streamgapdf.impulse_deltav_plummer_curvedstream
impulse_deltav_hernquist= streamgapdf.impulse_deltav_hernquist
impulse_deltav_hernquist_curvedstream= streamgapdf.impulse_deltav_hernquist_curvedstream
impulse_deltav_general= streamgapdf.impulse_deltav_general
impulse_deltav_general_curvedstream= streamgapdf.impulse_deltav_general_curvedstream
impulse_deltav_general_orbitintegration= streamgapdf.impulse_deltav_general_orbitintegration
impulse_deltav_general_fullplummerintegration= streamgapdf.impulse_deltav_general_fullplummerintegration
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
|
Add impulse functions to top level
|
Add impulse functions to top level
|
Python
|
bsd-3-clause
|
jobovy/galpy,jobovy/galpy,jobovy/galpy,jobovy/galpy
|
from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
Add impulse functions to top level
|
from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Functions
#
impulse_deltav_plummer= streamgapdf.impulse_deltav_plummer
impulse_deltav_plummer_curvedstream= streamgapdf.impulse_deltav_plummer_curvedstream
impulse_deltav_hernquist= streamgapdf.impulse_deltav_hernquist
impulse_deltav_hernquist_curvedstream= streamgapdf.impulse_deltav_hernquist_curvedstream
impulse_deltav_general= streamgapdf.impulse_deltav_general
impulse_deltav_general_curvedstream= streamgapdf.impulse_deltav_general_curvedstream
impulse_deltav_general_orbitintegration= streamgapdf.impulse_deltav_general_orbitintegration
impulse_deltav_general_fullplummerintegration= streamgapdf.impulse_deltav_general_fullplummerintegration
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
|
<commit_before>from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
<commit_msg>Add impulse functions to top level<commit_after>
|
from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Functions
#
impulse_deltav_plummer= streamgapdf.impulse_deltav_plummer
impulse_deltav_plummer_curvedstream= streamgapdf.impulse_deltav_plummer_curvedstream
impulse_deltav_hernquist= streamgapdf.impulse_deltav_hernquist
impulse_deltav_hernquist_curvedstream= streamgapdf.impulse_deltav_hernquist_curvedstream
impulse_deltav_general= streamgapdf.impulse_deltav_general
impulse_deltav_general_curvedstream= streamgapdf.impulse_deltav_general_curvedstream
impulse_deltav_general_orbitintegration= streamgapdf.impulse_deltav_general_orbitintegration
impulse_deltav_general_fullplummerintegration= streamgapdf.impulse_deltav_general_fullplummerintegration
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
|
from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
Add impulse functions to top levelfrom galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Functions
#
impulse_deltav_plummer= streamgapdf.impulse_deltav_plummer
impulse_deltav_plummer_curvedstream= streamgapdf.impulse_deltav_plummer_curvedstream
impulse_deltav_hernquist= streamgapdf.impulse_deltav_hernquist
impulse_deltav_hernquist_curvedstream= streamgapdf.impulse_deltav_hernquist_curvedstream
impulse_deltav_general= streamgapdf.impulse_deltav_general
impulse_deltav_general_curvedstream= streamgapdf.impulse_deltav_general_curvedstream
impulse_deltav_general_orbitintegration= streamgapdf.impulse_deltav_general_orbitintegration
impulse_deltav_general_fullplummerintegration= streamgapdf.impulse_deltav_general_fullplummerintegration
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
|
<commit_before>from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
<commit_msg>Add impulse functions to top level<commit_after>from galpy.df_src import diskdf
from galpy.df_src import surfaceSigmaProfile
from galpy.df_src import evolveddiskdf
from galpy.df_src import quasiisothermaldf
from galpy.df_src import streamdf
from galpy.df_src import streamgapdf
#
# Functions
#
impulse_deltav_plummer= streamgapdf.impulse_deltav_plummer
impulse_deltav_plummer_curvedstream= streamgapdf.impulse_deltav_plummer_curvedstream
impulse_deltav_hernquist= streamgapdf.impulse_deltav_hernquist
impulse_deltav_hernquist_curvedstream= streamgapdf.impulse_deltav_hernquist_curvedstream
impulse_deltav_general= streamgapdf.impulse_deltav_general
impulse_deltav_general_curvedstream= streamgapdf.impulse_deltav_general_curvedstream
impulse_deltav_general_orbitintegration= streamgapdf.impulse_deltav_general_orbitintegration
impulse_deltav_general_fullplummerintegration= streamgapdf.impulse_deltav_general_fullplummerintegration
#
# Classes
#
shudf= diskdf.shudf
dehnendf= diskdf.dehnendf
DFcorrection= diskdf.DFcorrection
diskdf= diskdf.diskdf
evolveddiskdf= evolveddiskdf.evolveddiskdf
expSurfaceSigmaProfile= surfaceSigmaProfile.expSurfaceSigmaProfile
surfaceSigmaProfile= surfaceSigmaProfile.surfaceSigmaProfile
quasiisothermaldf= quasiisothermaldf.quasiisothermaldf
streamdf= streamdf.streamdf
streamgapdf= streamgapdf.streamgapdf
|
ae2c248cf1d3a2641b05a33d42077a2cace2e786
|
tests/scoring_engine/engine/test_execute_command.py
|
tests/scoring_engine/engine/test_execute_command.py
|
from scoring_engine.engine.execute_command import execute_command
from scoring_engine.engine.job import Job
class TestWorker(object):
def test_basic_run(self):
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is False
assert task['output'] == 'HELLO\n'
def test_timed_out(self):
# this is a weak unit test, but I couldn't figure out
# how to run the job without a worker and still
# honor the soft timeout
assert execute_command.soft_time_limit == 30
|
import mock
from billiard.exceptions import SoftTimeLimitExceeded
from scoring_engine.engine.job import Job
from scoring_engine.engine.execute_command import execute_command
class TestWorker(object):
def test_basic_run(self):
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is False
assert task['output'] == 'HELLO\n'
def test_timed_out(self):
import subprocess
subprocess.run = mock.Mock(side_effect=SoftTimeLimitExceeded)
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is True
|
Add test for soft time limit reached
|
Add test for soft time limit reached
|
Python
|
mit
|
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
|
from scoring_engine.engine.execute_command import execute_command
from scoring_engine.engine.job import Job
class TestWorker(object):
def test_basic_run(self):
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is False
assert task['output'] == 'HELLO\n'
def test_timed_out(self):
# this is a weak unit test, but I couldn't figure out
# how to run the job without a worker and still
# honor the soft timeout
assert execute_command.soft_time_limit == 30
Add test for soft time limit reached
|
import mock
from billiard.exceptions import SoftTimeLimitExceeded
from scoring_engine.engine.job import Job
from scoring_engine.engine.execute_command import execute_command
class TestWorker(object):
def test_basic_run(self):
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is False
assert task['output'] == 'HELLO\n'
def test_timed_out(self):
import subprocess
subprocess.run = mock.Mock(side_effect=SoftTimeLimitExceeded)
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is True
|
<commit_before>from scoring_engine.engine.execute_command import execute_command
from scoring_engine.engine.job import Job
class TestWorker(object):
def test_basic_run(self):
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is False
assert task['output'] == 'HELLO\n'
def test_timed_out(self):
# this is a weak unit test, but I couldn't figure out
# how to run the job without a worker and still
# honor the soft timeout
assert execute_command.soft_time_limit == 30
<commit_msg>Add test for soft time limit reached<commit_after>
|
import mock
from billiard.exceptions import SoftTimeLimitExceeded
from scoring_engine.engine.job import Job
from scoring_engine.engine.execute_command import execute_command
class TestWorker(object):
def test_basic_run(self):
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is False
assert task['output'] == 'HELLO\n'
def test_timed_out(self):
import subprocess
subprocess.run = mock.Mock(side_effect=SoftTimeLimitExceeded)
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is True
|
from scoring_engine.engine.execute_command import execute_command
from scoring_engine.engine.job import Job
class TestWorker(object):
def test_basic_run(self):
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is False
assert task['output'] == 'HELLO\n'
def test_timed_out(self):
# this is a weak unit test, but I couldn't figure out
# how to run the job without a worker and still
# honor the soft timeout
assert execute_command.soft_time_limit == 30
Add test for soft time limit reachedimport mock
from billiard.exceptions import SoftTimeLimitExceeded
from scoring_engine.engine.job import Job
from scoring_engine.engine.execute_command import execute_command
class TestWorker(object):
def test_basic_run(self):
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is False
assert task['output'] == 'HELLO\n'
def test_timed_out(self):
import subprocess
subprocess.run = mock.Mock(side_effect=SoftTimeLimitExceeded)
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is True
|
<commit_before>from scoring_engine.engine.execute_command import execute_command
from scoring_engine.engine.job import Job
class TestWorker(object):
def test_basic_run(self):
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is False
assert task['output'] == 'HELLO\n'
def test_timed_out(self):
# this is a weak unit test, but I couldn't figure out
# how to run the job without a worker and still
# honor the soft timeout
assert execute_command.soft_time_limit == 30
<commit_msg>Add test for soft time limit reached<commit_after>import mock
from billiard.exceptions import SoftTimeLimitExceeded
from scoring_engine.engine.job import Job
from scoring_engine.engine.execute_command import execute_command
class TestWorker(object):
def test_basic_run(self):
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is False
assert task['output'] == 'HELLO\n'
def test_timed_out(self):
import subprocess
subprocess.run = mock.Mock(side_effect=SoftTimeLimitExceeded)
job = Job(environment_id="12345", command="echo 'HELLO'")
task = execute_command.run(job)
assert task['errored_out'] is True
|
88d49172417ef7c99fa59313a10808c2b1a38b86
|
api/views.py
|
api/views.py
|
# -*- coding: utf-8 -*-
from rest_framework import generics
from rest_framework_extensions.cache.decorators import cache_response
from api.serializers import EventListSerializers
from api.processors import get_approved_events
from api.serializers import ScoreboardSerializer
from web.processors.event import count_approved_events_for_country
class CachedListAPIView(generics.ListAPIView):
"""
Concrete cached view for listing a queryset.
"""
@cache_response(240)
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
class EventListApi(CachedListAPIView):
""" Lists approved Events, takes the following optional GET parameters:
* limit
* order
* country_code
* past
"""
serializer_class = EventListSerializers
def get_queryset(self):
params = {
'limit': self.request.GET.get('limit', None),
'order': self.request.GET.get('order', None),
'country_code': self.request.GET.get('country_code', None),
'past': self.request.GET.get('past', False)
}
return get_approved_events(**params)
class ScoreBoardApi(CachedListAPIView):
"Lists scoreboard entries"
serializer_class = ScoreboardSerializer
def get_queryset(self):
return count_approved_events_for_country()
|
# -*- coding: utf-8 -*-
from hashlib import sha1
from rest_framework import generics
from rest_framework_extensions.cache.decorators import cache_response
from api.serializers import EventListSerializers
from api.processors import get_approved_events
from api.serializers import ScoreboardSerializer
from web.processors.event import count_approved_events_for_country
class CachedListAPIView(generics.ListAPIView):
"""
Concrete cached view for listing a queryset.
"""
@cache_response(timeout=240, key_func='calculate_cache_key')
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def calculate_cache_key(self, view_instance, view_method, request, args, kwargs):
return sha1('-'.join([
repr(request.GET),
repr(args),
repr(kwargs),
])).hexdigest()
class EventListApi(CachedListAPIView):
""" Lists approved Events, takes the following optional GET parameters:
* limit
* order
* country_code
* past
"""
serializer_class = EventListSerializers
def get_queryset(self):
params = {
'limit': self.request.GET.get('limit', None),
'order': self.request.GET.get('order', None),
'country_code': self.request.GET.get('country_code', None),
'past': self.request.GET.get('past', False)
}
return get_approved_events(**params)
class ScoreBoardApi(CachedListAPIView):
"Lists scoreboard entries"
serializer_class = ScoreboardSerializer
def get_queryset(self):
return count_approved_events_for_country()
|
Include the query string in the API cache key
|
Include the query string in the API cache key
Otherwise, these two URLs would return the same data:
/api/event/list/?format=json&past=yes
/api/event/list/?format=json
|
Python
|
mit
|
codeeu/coding-events,codeeu/coding-events,codeeu/coding-events,codeeu/coding-events,codeeu/coding-events
|
# -*- coding: utf-8 -*-
from rest_framework import generics
from rest_framework_extensions.cache.decorators import cache_response
from api.serializers import EventListSerializers
from api.processors import get_approved_events
from api.serializers import ScoreboardSerializer
from web.processors.event import count_approved_events_for_country
class CachedListAPIView(generics.ListAPIView):
"""
Concrete cached view for listing a queryset.
"""
@cache_response(240)
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
class EventListApi(CachedListAPIView):
""" Lists approved Events, takes the following optional GET parameters:
* limit
* order
* country_code
* past
"""
serializer_class = EventListSerializers
def get_queryset(self):
params = {
'limit': self.request.GET.get('limit', None),
'order': self.request.GET.get('order', None),
'country_code': self.request.GET.get('country_code', None),
'past': self.request.GET.get('past', False)
}
return get_approved_events(**params)
class ScoreBoardApi(CachedListAPIView):
"Lists scoreboard entries"
serializer_class = ScoreboardSerializer
def get_queryset(self):
return count_approved_events_for_country()
Include the query string in the API cache key
Otherwise, these two URLs would return the same data:
/api/event/list/?format=json&past=yes
/api/event/list/?format=json
|
# -*- coding: utf-8 -*-
from hashlib import sha1
from rest_framework import generics
from rest_framework_extensions.cache.decorators import cache_response
from api.serializers import EventListSerializers
from api.processors import get_approved_events
from api.serializers import ScoreboardSerializer
from web.processors.event import count_approved_events_for_country
class CachedListAPIView(generics.ListAPIView):
"""
Concrete cached view for listing a queryset.
"""
@cache_response(timeout=240, key_func='calculate_cache_key')
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def calculate_cache_key(self, view_instance, view_method, request, args, kwargs):
return sha1('-'.join([
repr(request.GET),
repr(args),
repr(kwargs),
])).hexdigest()
class EventListApi(CachedListAPIView):
""" Lists approved Events, takes the following optional GET parameters:
* limit
* order
* country_code
* past
"""
serializer_class = EventListSerializers
def get_queryset(self):
params = {
'limit': self.request.GET.get('limit', None),
'order': self.request.GET.get('order', None),
'country_code': self.request.GET.get('country_code', None),
'past': self.request.GET.get('past', False)
}
return get_approved_events(**params)
class ScoreBoardApi(CachedListAPIView):
"Lists scoreboard entries"
serializer_class = ScoreboardSerializer
def get_queryset(self):
return count_approved_events_for_country()
|
<commit_before># -*- coding: utf-8 -*-
from rest_framework import generics
from rest_framework_extensions.cache.decorators import cache_response
from api.serializers import EventListSerializers
from api.processors import get_approved_events
from api.serializers import ScoreboardSerializer
from web.processors.event import count_approved_events_for_country
class CachedListAPIView(generics.ListAPIView):
"""
Concrete cached view for listing a queryset.
"""
@cache_response(240)
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
class EventListApi(CachedListAPIView):
""" Lists approved Events, takes the following optional GET parameters:
* limit
* order
* country_code
* past
"""
serializer_class = EventListSerializers
def get_queryset(self):
params = {
'limit': self.request.GET.get('limit', None),
'order': self.request.GET.get('order', None),
'country_code': self.request.GET.get('country_code', None),
'past': self.request.GET.get('past', False)
}
return get_approved_events(**params)
class ScoreBoardApi(CachedListAPIView):
"Lists scoreboard entries"
serializer_class = ScoreboardSerializer
def get_queryset(self):
return count_approved_events_for_country()
<commit_msg>Include the query string in the API cache key
Otherwise, these two URLs would return the same data:
/api/event/list/?format=json&past=yes
/api/event/list/?format=json<commit_after>
|
# -*- coding: utf-8 -*-
from hashlib import sha1
from rest_framework import generics
from rest_framework_extensions.cache.decorators import cache_response
from api.serializers import EventListSerializers
from api.processors import get_approved_events
from api.serializers import ScoreboardSerializer
from web.processors.event import count_approved_events_for_country
class CachedListAPIView(generics.ListAPIView):
"""
Concrete cached view for listing a queryset.
"""
@cache_response(timeout=240, key_func='calculate_cache_key')
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def calculate_cache_key(self, view_instance, view_method, request, args, kwargs):
return sha1('-'.join([
repr(request.GET),
repr(args),
repr(kwargs),
])).hexdigest()
class EventListApi(CachedListAPIView):
""" Lists approved Events, takes the following optional GET parameters:
* limit
* order
* country_code
* past
"""
serializer_class = EventListSerializers
def get_queryset(self):
params = {
'limit': self.request.GET.get('limit', None),
'order': self.request.GET.get('order', None),
'country_code': self.request.GET.get('country_code', None),
'past': self.request.GET.get('past', False)
}
return get_approved_events(**params)
class ScoreBoardApi(CachedListAPIView):
"Lists scoreboard entries"
serializer_class = ScoreboardSerializer
def get_queryset(self):
return count_approved_events_for_country()
|
# -*- coding: utf-8 -*-
from rest_framework import generics
from rest_framework_extensions.cache.decorators import cache_response
from api.serializers import EventListSerializers
from api.processors import get_approved_events
from api.serializers import ScoreboardSerializer
from web.processors.event import count_approved_events_for_country
class CachedListAPIView(generics.ListAPIView):
"""
Concrete cached view for listing a queryset.
"""
@cache_response(240)
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
class EventListApi(CachedListAPIView):
""" Lists approved Events, takes the following optional GET parameters:
* limit
* order
* country_code
* past
"""
serializer_class = EventListSerializers
def get_queryset(self):
params = {
'limit': self.request.GET.get('limit', None),
'order': self.request.GET.get('order', None),
'country_code': self.request.GET.get('country_code', None),
'past': self.request.GET.get('past', False)
}
return get_approved_events(**params)
class ScoreBoardApi(CachedListAPIView):
"Lists scoreboard entries"
serializer_class = ScoreboardSerializer
def get_queryset(self):
return count_approved_events_for_country()
Include the query string in the API cache key
Otherwise, these two URLs would return the same data:
/api/event/list/?format=json&past=yes
/api/event/list/?format=json# -*- coding: utf-8 -*-
from hashlib import sha1
from rest_framework import generics
from rest_framework_extensions.cache.decorators import cache_response
from api.serializers import EventListSerializers
from api.processors import get_approved_events
from api.serializers import ScoreboardSerializer
from web.processors.event import count_approved_events_for_country
class CachedListAPIView(generics.ListAPIView):
"""
Concrete cached view for listing a queryset.
"""
@cache_response(timeout=240, key_func='calculate_cache_key')
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def calculate_cache_key(self, view_instance, view_method, request, args, kwargs):
return sha1('-'.join([
repr(request.GET),
repr(args),
repr(kwargs),
])).hexdigest()
class EventListApi(CachedListAPIView):
""" Lists approved Events, takes the following optional GET parameters:
* limit
* order
* country_code
* past
"""
serializer_class = EventListSerializers
def get_queryset(self):
params = {
'limit': self.request.GET.get('limit', None),
'order': self.request.GET.get('order', None),
'country_code': self.request.GET.get('country_code', None),
'past': self.request.GET.get('past', False)
}
return get_approved_events(**params)
class ScoreBoardApi(CachedListAPIView):
"Lists scoreboard entries"
serializer_class = ScoreboardSerializer
def get_queryset(self):
return count_approved_events_for_country()
|
<commit_before># -*- coding: utf-8 -*-
from rest_framework import generics
from rest_framework_extensions.cache.decorators import cache_response
from api.serializers import EventListSerializers
from api.processors import get_approved_events
from api.serializers import ScoreboardSerializer
from web.processors.event import count_approved_events_for_country
class CachedListAPIView(generics.ListAPIView):
"""
Concrete cached view for listing a queryset.
"""
@cache_response(240)
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
class EventListApi(CachedListAPIView):
""" Lists approved Events, takes the following optional GET parameters:
* limit
* order
* country_code
* past
"""
serializer_class = EventListSerializers
def get_queryset(self):
params = {
'limit': self.request.GET.get('limit', None),
'order': self.request.GET.get('order', None),
'country_code': self.request.GET.get('country_code', None),
'past': self.request.GET.get('past', False)
}
return get_approved_events(**params)
class ScoreBoardApi(CachedListAPIView):
"Lists scoreboard entries"
serializer_class = ScoreboardSerializer
def get_queryset(self):
return count_approved_events_for_country()
<commit_msg>Include the query string in the API cache key
Otherwise, these two URLs would return the same data:
/api/event/list/?format=json&past=yes
/api/event/list/?format=json<commit_after># -*- coding: utf-8 -*-
from hashlib import sha1
from rest_framework import generics
from rest_framework_extensions.cache.decorators import cache_response
from api.serializers import EventListSerializers
from api.processors import get_approved_events
from api.serializers import ScoreboardSerializer
from web.processors.event import count_approved_events_for_country
class CachedListAPIView(generics.ListAPIView):
"""
Concrete cached view for listing a queryset.
"""
@cache_response(timeout=240, key_func='calculate_cache_key')
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def calculate_cache_key(self, view_instance, view_method, request, args, kwargs):
return sha1('-'.join([
repr(request.GET),
repr(args),
repr(kwargs),
])).hexdigest()
class EventListApi(CachedListAPIView):
""" Lists approved Events, takes the following optional GET parameters:
* limit
* order
* country_code
* past
"""
serializer_class = EventListSerializers
def get_queryset(self):
params = {
'limit': self.request.GET.get('limit', None),
'order': self.request.GET.get('order', None),
'country_code': self.request.GET.get('country_code', None),
'past': self.request.GET.get('past', False)
}
return get_approved_events(**params)
class ScoreBoardApi(CachedListAPIView):
"Lists scoreboard entries"
serializer_class = ScoreboardSerializer
def get_queryset(self):
return count_approved_events_for_country()
|
3ced7839a9afbd96d23617f60804d5d580ceb9e6
|
sets/1/challenges/s1c5.py
|
sets/1/challenges/s1c5.py
|
import itertools
def MultipleCharacterXOR(input, key):
out = ""
for i, j in itertools.izip_longest(range(len(input)), range(len(key))):
vi = i
vj = j
if vi is None:
vi = vj % len(input)
elif vj is None:
vj = vi % len(key)
input_c = input[vi]
key_c = key[vj]
out += chr(ord(input_c) ^ ord(key_c))
return out.encode("hex")
if __name__ == "__main__":
input = """Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal"""
key = "ICE"
print MultipleCharacterXOR(input, key)
|
import itertools
def MultipleCharacterXOR(input, key):
out = ""
j = 0
while j < max(len(input), len(key)):
i = j % len(input)
k = j % len(key)
j += 1
input_c = input[i]
key_c = key[k]
out += chr(ord(input_c) ^ ord(key_c))
return out.encode("hex")
if __name__ == "__main__":
input = """Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal"""
key = "ICE"
print MultipleCharacterXOR(input, key)
|
Make the solution easier to read
|
Make the solution easier to read
|
Python
|
mit
|
aawc/cryptopals
|
import itertools
def MultipleCharacterXOR(input, key):
out = ""
for i, j in itertools.izip_longest(range(len(input)), range(len(key))):
vi = i
vj = j
if vi is None:
vi = vj % len(input)
elif vj is None:
vj = vi % len(key)
input_c = input[vi]
key_c = key[vj]
out += chr(ord(input_c) ^ ord(key_c))
return out.encode("hex")
if __name__ == "__main__":
input = """Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal"""
key = "ICE"
print MultipleCharacterXOR(input, key)
Make the solution easier to read
|
import itertools
def MultipleCharacterXOR(input, key):
out = ""
j = 0
while j < max(len(input), len(key)):
i = j % len(input)
k = j % len(key)
j += 1
input_c = input[i]
key_c = key[k]
out += chr(ord(input_c) ^ ord(key_c))
return out.encode("hex")
if __name__ == "__main__":
input = """Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal"""
key = "ICE"
print MultipleCharacterXOR(input, key)
|
<commit_before>import itertools
def MultipleCharacterXOR(input, key):
out = ""
for i, j in itertools.izip_longest(range(len(input)), range(len(key))):
vi = i
vj = j
if vi is None:
vi = vj % len(input)
elif vj is None:
vj = vi % len(key)
input_c = input[vi]
key_c = key[vj]
out += chr(ord(input_c) ^ ord(key_c))
return out.encode("hex")
if __name__ == "__main__":
input = """Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal"""
key = "ICE"
print MultipleCharacterXOR(input, key)
<commit_msg>Make the solution easier to read<commit_after>
|
import itertools
def MultipleCharacterXOR(input, key):
out = ""
j = 0
while j < max(len(input), len(key)):
i = j % len(input)
k = j % len(key)
j += 1
input_c = input[i]
key_c = key[k]
out += chr(ord(input_c) ^ ord(key_c))
return out.encode("hex")
if __name__ == "__main__":
input = """Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal"""
key = "ICE"
print MultipleCharacterXOR(input, key)
|
import itertools
def MultipleCharacterXOR(input, key):
out = ""
for i, j in itertools.izip_longest(range(len(input)), range(len(key))):
vi = i
vj = j
if vi is None:
vi = vj % len(input)
elif vj is None:
vj = vi % len(key)
input_c = input[vi]
key_c = key[vj]
out += chr(ord(input_c) ^ ord(key_c))
return out.encode("hex")
if __name__ == "__main__":
input = """Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal"""
key = "ICE"
print MultipleCharacterXOR(input, key)
Make the solution easier to readimport itertools
def MultipleCharacterXOR(input, key):
out = ""
j = 0
while j < max(len(input), len(key)):
i = j % len(input)
k = j % len(key)
j += 1
input_c = input[i]
key_c = key[k]
out += chr(ord(input_c) ^ ord(key_c))
return out.encode("hex")
if __name__ == "__main__":
input = """Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal"""
key = "ICE"
print MultipleCharacterXOR(input, key)
|
<commit_before>import itertools
def MultipleCharacterXOR(input, key):
out = ""
for i, j in itertools.izip_longest(range(len(input)), range(len(key))):
vi = i
vj = j
if vi is None:
vi = vj % len(input)
elif vj is None:
vj = vi % len(key)
input_c = input[vi]
key_c = key[vj]
out += chr(ord(input_c) ^ ord(key_c))
return out.encode("hex")
if __name__ == "__main__":
input = """Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal"""
key = "ICE"
print MultipleCharacterXOR(input, key)
<commit_msg>Make the solution easier to read<commit_after>import itertools
def MultipleCharacterXOR(input, key):
out = ""
j = 0
while j < max(len(input), len(key)):
i = j % len(input)
k = j % len(key)
j += 1
input_c = input[i]
key_c = key[k]
out += chr(ord(input_c) ^ ord(key_c))
return out.encode("hex")
if __name__ == "__main__":
input = """Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal"""
key = "ICE"
print MultipleCharacterXOR(input, key)
|
44be93c5efb334297fc1bb10eaafec197018b241
|
python/render/render_tracks.py
|
python/render/render_tracks.py
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding sites'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {}, model identifier {}({}))'.format(metadata['protein'], metadata['width'], metadata['serial_number'], metadata['author_identifier'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
Update formatting on track labels
|
Update formatting on track labels
|
Python
|
mit
|
Duke-GCB/TrackHubGenerator,Duke-GCB/TrackHubGenerator
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding sites'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {}, model identifier {}({}))'.format(metadata['protein'], metadata['width'], metadata['serial_number'], metadata['author_identifier'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
Update formatting on track labels
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
<commit_before>__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding sites'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {}, model identifier {}({}))'.format(metadata['protein'], metadata['width'], metadata['serial_number'], metadata['author_identifier'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
<commit_msg>Update formatting on track labels<commit_after>
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding sites'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {}, model identifier {}({}))'.format(metadata['protein'], metadata['width'], metadata['serial_number'], metadata['author_identifier'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
Update formatting on track labels__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
<commit_before>__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding sites'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {}, model identifier {}({}))'.format(metadata['protein'], metadata['width'], metadata['serial_number'], metadata['author_identifier'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
<commit_msg>Update formatting on track labels<commit_after>__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
9ce90b52bff35d5d0ad87d2402a5e8a946938cf7
|
sideloader/forms.py
|
sideloader/forms.py
|
from django.contrib.auth.models import User
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
import models
class BaseModelForm(forms.ModelForm):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class BaseForm(forms.Form):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class ReleaseForm(BaseModelForm):
class Meta:
model = models.ReleaseStream
class ProjectForm(BaseModelForm):
github_url = forms.CharField(label="Git checkout URL")
class Meta:
model = models.Project
exclude = ('idhash', 'created_by_user',)
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
uri = cleaned_data['github_url'].strip()
if not (uri[-4:] == '.git'):
raise forms.ValidationError("Not a valid Git URI")
cleaned_data['github_url'] = uri
return cleaned_data
class UserForm(BaseModelForm):
password = forms.CharField(widget=forms.PasswordInput(), initial='')
class Meta:
model = User
exclude = (
'email', 'username', 'is_staff', 'is_active', 'is_superuser',
'last_login', 'date_joined', 'groups', 'user_permissions'
)
|
from django.contrib.auth.models import User
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
import models
class BaseModelForm(forms.ModelForm):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class BaseForm(forms.Form):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class ReleaseForm(BaseModelForm):
class Meta:
model = models.ReleaseStream
class ProjectForm(BaseModelForm):
github_url = forms.CharField(label="Git checkout URL")
allowed_users = forms.ModelMultipleChoiceField(
queryset=User.objects.all().order_by('username'),
required=False,
widget=forms.widgets.CheckboxSelectMultiple
)
class Meta:
model = models.Project
exclude = ('idhash', 'created_by_user',)
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
uri = cleaned_data['github_url'].strip()
if not (uri[-4:] == '.git'):
raise forms.ValidationError("Not a valid Git URI")
cleaned_data['github_url'] = uri
return cleaned_data
class UserForm(BaseModelForm):
password = forms.CharField(widget=forms.PasswordInput(), initial='')
class Meta:
model = User
exclude = (
'email', 'username', 'is_staff', 'is_active', 'is_superuser',
'last_login', 'date_joined', 'groups', 'user_permissions'
)
|
Improve the project form a bit
|
Improve the project form a bit
|
Python
|
mit
|
praekelt/sideloader,praekelt/sideloader,praekelt/sideloader,praekelt/sideloader
|
from django.contrib.auth.models import User
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
import models
class BaseModelForm(forms.ModelForm):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class BaseForm(forms.Form):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class ReleaseForm(BaseModelForm):
class Meta:
model = models.ReleaseStream
class ProjectForm(BaseModelForm):
github_url = forms.CharField(label="Git checkout URL")
class Meta:
model = models.Project
exclude = ('idhash', 'created_by_user',)
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
uri = cleaned_data['github_url'].strip()
if not (uri[-4:] == '.git'):
raise forms.ValidationError("Not a valid Git URI")
cleaned_data['github_url'] = uri
return cleaned_data
class UserForm(BaseModelForm):
password = forms.CharField(widget=forms.PasswordInput(), initial='')
class Meta:
model = User
exclude = (
'email', 'username', 'is_staff', 'is_active', 'is_superuser',
'last_login', 'date_joined', 'groups', 'user_permissions'
)
Improve the project form a bit
|
from django.contrib.auth.models import User
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
import models
class BaseModelForm(forms.ModelForm):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class BaseForm(forms.Form):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class ReleaseForm(BaseModelForm):
class Meta:
model = models.ReleaseStream
class ProjectForm(BaseModelForm):
github_url = forms.CharField(label="Git checkout URL")
allowed_users = forms.ModelMultipleChoiceField(
queryset=User.objects.all().order_by('username'),
required=False,
widget=forms.widgets.CheckboxSelectMultiple
)
class Meta:
model = models.Project
exclude = ('idhash', 'created_by_user',)
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
uri = cleaned_data['github_url'].strip()
if not (uri[-4:] == '.git'):
raise forms.ValidationError("Not a valid Git URI")
cleaned_data['github_url'] = uri
return cleaned_data
class UserForm(BaseModelForm):
password = forms.CharField(widget=forms.PasswordInput(), initial='')
class Meta:
model = User
exclude = (
'email', 'username', 'is_staff', 'is_active', 'is_superuser',
'last_login', 'date_joined', 'groups', 'user_permissions'
)
|
<commit_before>from django.contrib.auth.models import User
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
import models
class BaseModelForm(forms.ModelForm):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class BaseForm(forms.Form):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class ReleaseForm(BaseModelForm):
class Meta:
model = models.ReleaseStream
class ProjectForm(BaseModelForm):
github_url = forms.CharField(label="Git checkout URL")
class Meta:
model = models.Project
exclude = ('idhash', 'created_by_user',)
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
uri = cleaned_data['github_url'].strip()
if not (uri[-4:] == '.git'):
raise forms.ValidationError("Not a valid Git URI")
cleaned_data['github_url'] = uri
return cleaned_data
class UserForm(BaseModelForm):
password = forms.CharField(widget=forms.PasswordInput(), initial='')
class Meta:
model = User
exclude = (
'email', 'username', 'is_staff', 'is_active', 'is_superuser',
'last_login', 'date_joined', 'groups', 'user_permissions'
)
<commit_msg>Improve the project form a bit<commit_after>
|
from django.contrib.auth.models import User
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
import models
class BaseModelForm(forms.ModelForm):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class BaseForm(forms.Form):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class ReleaseForm(BaseModelForm):
class Meta:
model = models.ReleaseStream
class ProjectForm(BaseModelForm):
github_url = forms.CharField(label="Git checkout URL")
allowed_users = forms.ModelMultipleChoiceField(
queryset=User.objects.all().order_by('username'),
required=False,
widget=forms.widgets.CheckboxSelectMultiple
)
class Meta:
model = models.Project
exclude = ('idhash', 'created_by_user',)
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
uri = cleaned_data['github_url'].strip()
if not (uri[-4:] == '.git'):
raise forms.ValidationError("Not a valid Git URI")
cleaned_data['github_url'] = uri
return cleaned_data
class UserForm(BaseModelForm):
password = forms.CharField(widget=forms.PasswordInput(), initial='')
class Meta:
model = User
exclude = (
'email', 'username', 'is_staff', 'is_active', 'is_superuser',
'last_login', 'date_joined', 'groups', 'user_permissions'
)
|
from django.contrib.auth.models import User
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
import models
class BaseModelForm(forms.ModelForm):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class BaseForm(forms.Form):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class ReleaseForm(BaseModelForm):
class Meta:
model = models.ReleaseStream
class ProjectForm(BaseModelForm):
github_url = forms.CharField(label="Git checkout URL")
class Meta:
model = models.Project
exclude = ('idhash', 'created_by_user',)
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
uri = cleaned_data['github_url'].strip()
if not (uri[-4:] == '.git'):
raise forms.ValidationError("Not a valid Git URI")
cleaned_data['github_url'] = uri
return cleaned_data
class UserForm(BaseModelForm):
password = forms.CharField(widget=forms.PasswordInput(), initial='')
class Meta:
model = User
exclude = (
'email', 'username', 'is_staff', 'is_active', 'is_superuser',
'last_login', 'date_joined', 'groups', 'user_permissions'
)
Improve the project form a bitfrom django.contrib.auth.models import User
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
import models
class BaseModelForm(forms.ModelForm):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class BaseForm(forms.Form):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class ReleaseForm(BaseModelForm):
class Meta:
model = models.ReleaseStream
class ProjectForm(BaseModelForm):
github_url = forms.CharField(label="Git checkout URL")
allowed_users = forms.ModelMultipleChoiceField(
queryset=User.objects.all().order_by('username'),
required=False,
widget=forms.widgets.CheckboxSelectMultiple
)
class Meta:
model = models.Project
exclude = ('idhash', 'created_by_user',)
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
uri = cleaned_data['github_url'].strip()
if not (uri[-4:] == '.git'):
raise forms.ValidationError("Not a valid Git URI")
cleaned_data['github_url'] = uri
return cleaned_data
class UserForm(BaseModelForm):
password = forms.CharField(widget=forms.PasswordInput(), initial='')
class Meta:
model = User
exclude = (
'email', 'username', 'is_staff', 'is_active', 'is_superuser',
'last_login', 'date_joined', 'groups', 'user_permissions'
)
|
<commit_before>from django.contrib.auth.models import User
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
import models
class BaseModelForm(forms.ModelForm):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class BaseForm(forms.Form):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class ReleaseForm(BaseModelForm):
class Meta:
model = models.ReleaseStream
class ProjectForm(BaseModelForm):
github_url = forms.CharField(label="Git checkout URL")
class Meta:
model = models.Project
exclude = ('idhash', 'created_by_user',)
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
uri = cleaned_data['github_url'].strip()
if not (uri[-4:] == '.git'):
raise forms.ValidationError("Not a valid Git URI")
cleaned_data['github_url'] = uri
return cleaned_data
class UserForm(BaseModelForm):
password = forms.CharField(widget=forms.PasswordInput(), initial='')
class Meta:
model = User
exclude = (
'email', 'username', 'is_staff', 'is_active', 'is_superuser',
'last_login', 'date_joined', 'groups', 'user_permissions'
)
<commit_msg>Improve the project form a bit<commit_after>from django.contrib.auth.models import User
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
import models
class BaseModelForm(forms.ModelForm):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class BaseForm(forms.Form):
helper = FormHelper()
helper.form_class = 'form-horizontal'
helper.add_input(Submit('submit', 'Submit'))
class ReleaseForm(BaseModelForm):
class Meta:
model = models.ReleaseStream
class ProjectForm(BaseModelForm):
github_url = forms.CharField(label="Git checkout URL")
allowed_users = forms.ModelMultipleChoiceField(
queryset=User.objects.all().order_by('username'),
required=False,
widget=forms.widgets.CheckboxSelectMultiple
)
class Meta:
model = models.Project
exclude = ('idhash', 'created_by_user',)
def clean(self):
cleaned_data = super(ProjectForm, self).clean()
uri = cleaned_data['github_url'].strip()
if not (uri[-4:] == '.git'):
raise forms.ValidationError("Not a valid Git URI")
cleaned_data['github_url'] = uri
return cleaned_data
class UserForm(BaseModelForm):
password = forms.CharField(widget=forms.PasswordInput(), initial='')
class Meta:
model = User
exclude = (
'email', 'username', 'is_staff', 'is_active', 'is_superuser',
'last_login', 'date_joined', 'groups', 'user_permissions'
)
|
5d2bf1bd3baecd912b0af94bed7b9edcbbd05f40
|
src/oscar/apps/dashboard/tables.py
|
src/oscar/apps/dashboard/tables.py
|
from django.utils.translation import ungettext_lazy
from django_tables2_reports.tables import TableReport
class DashboardTable(TableReport):
caption = ungettext_lazy('%d Row', '%d Rows')
def get_caption_display(self):
# Allow overriding the caption with an arbitrary string that we cannot
# interpolate the number of rows in
try:
return self.caption % self.paginator.count
except TypeError:
pass
return self.caption
class Meta:
template = 'dashboard/table.html'
attrs = {'class': 'table table-striped table-bordered'}
|
from django.utils.translation import ungettext_lazy
from django_tables2_reports.tables import TableReport
class DashboardTable(TableReport):
caption = ungettext_lazy('%d Row', '%d Rows')
def get_caption_display(self):
# Allow overriding the caption with an arbitrary string that we cannot
# interpolate the number of rows in
try:
return self.caption % self.paginator.count
except TypeError:
pass
return self.caption
class Meta(object):
template = 'dashboard/table.html'
attrs = {'class': 'table table-striped table-bordered'}
|
Use new style class to allow for dynamic table class creation
|
Use new style class to allow for dynamic table class creation
|
Python
|
bsd-3-clause
|
machtfit/django-oscar,machtfit/django-oscar,machtfit/django-oscar
|
from django.utils.translation import ungettext_lazy
from django_tables2_reports.tables import TableReport
class DashboardTable(TableReport):
caption = ungettext_lazy('%d Row', '%d Rows')
def get_caption_display(self):
# Allow overriding the caption with an arbitrary string that we cannot
# interpolate the number of rows in
try:
return self.caption % self.paginator.count
except TypeError:
pass
return self.caption
class Meta:
template = 'dashboard/table.html'
attrs = {'class': 'table table-striped table-bordered'}
Use new style class to allow for dynamic table class creation
|
from django.utils.translation import ungettext_lazy
from django_tables2_reports.tables import TableReport
class DashboardTable(TableReport):
caption = ungettext_lazy('%d Row', '%d Rows')
def get_caption_display(self):
# Allow overriding the caption with an arbitrary string that we cannot
# interpolate the number of rows in
try:
return self.caption % self.paginator.count
except TypeError:
pass
return self.caption
class Meta(object):
template = 'dashboard/table.html'
attrs = {'class': 'table table-striped table-bordered'}
|
<commit_before>from django.utils.translation import ungettext_lazy
from django_tables2_reports.tables import TableReport
class DashboardTable(TableReport):
caption = ungettext_lazy('%d Row', '%d Rows')
def get_caption_display(self):
# Allow overriding the caption with an arbitrary string that we cannot
# interpolate the number of rows in
try:
return self.caption % self.paginator.count
except TypeError:
pass
return self.caption
class Meta:
template = 'dashboard/table.html'
attrs = {'class': 'table table-striped table-bordered'}
<commit_msg>Use new style class to allow for dynamic table class creation<commit_after>
|
from django.utils.translation import ungettext_lazy
from django_tables2_reports.tables import TableReport
class DashboardTable(TableReport):
caption = ungettext_lazy('%d Row', '%d Rows')
def get_caption_display(self):
# Allow overriding the caption with an arbitrary string that we cannot
# interpolate the number of rows in
try:
return self.caption % self.paginator.count
except TypeError:
pass
return self.caption
class Meta(object):
template = 'dashboard/table.html'
attrs = {'class': 'table table-striped table-bordered'}
|
from django.utils.translation import ungettext_lazy
from django_tables2_reports.tables import TableReport
class DashboardTable(TableReport):
caption = ungettext_lazy('%d Row', '%d Rows')
def get_caption_display(self):
# Allow overriding the caption with an arbitrary string that we cannot
# interpolate the number of rows in
try:
return self.caption % self.paginator.count
except TypeError:
pass
return self.caption
class Meta:
template = 'dashboard/table.html'
attrs = {'class': 'table table-striped table-bordered'}
Use new style class to allow for dynamic table class creationfrom django.utils.translation import ungettext_lazy
from django_tables2_reports.tables import TableReport
class DashboardTable(TableReport):
caption = ungettext_lazy('%d Row', '%d Rows')
def get_caption_display(self):
# Allow overriding the caption with an arbitrary string that we cannot
# interpolate the number of rows in
try:
return self.caption % self.paginator.count
except TypeError:
pass
return self.caption
class Meta(object):
template = 'dashboard/table.html'
attrs = {'class': 'table table-striped table-bordered'}
|
<commit_before>from django.utils.translation import ungettext_lazy
from django_tables2_reports.tables import TableReport
class DashboardTable(TableReport):
caption = ungettext_lazy('%d Row', '%d Rows')
def get_caption_display(self):
# Allow overriding the caption with an arbitrary string that we cannot
# interpolate the number of rows in
try:
return self.caption % self.paginator.count
except TypeError:
pass
return self.caption
class Meta:
template = 'dashboard/table.html'
attrs = {'class': 'table table-striped table-bordered'}
<commit_msg>Use new style class to allow for dynamic table class creation<commit_after>from django.utils.translation import ungettext_lazy
from django_tables2_reports.tables import TableReport
class DashboardTable(TableReport):
caption = ungettext_lazy('%d Row', '%d Rows')
def get_caption_display(self):
# Allow overriding the caption with an arbitrary string that we cannot
# interpolate the number of rows in
try:
return self.caption % self.paginator.count
except TypeError:
pass
return self.caption
class Meta(object):
template = 'dashboard/table.html'
attrs = {'class': 'table table-striped table-bordered'}
|
cf6d82d9db90572d9a7350cb9d98f3619b86668f
|
examples/plot_quadtree_hanging.py
|
examples/plot_quadtree_hanging.py
|
"""
QuadTree: Hanging Nodes
=======================
You can give the refine method a function, which is evaluated on every
cell of the TreeMesh.
Occasionally it is useful to initially refine to a constant level
(e.g. 3 in this 32x32 mesh). This means the function is first evaluated
on an 8x8 mesh (2^3).
"""
import discretize
import matplotlib.pyplot as plt
def run(plotIt=True):
M = discretize.TreeMesh([8, 8])
def refine(cell):
xyz = cell.center
dist = ((xyz - [0.25, 0.25]) ** 2).sum() ** 0.5
if dist < 0.25:
return 3
return 2
M.refine(refine)
if plotIt:
M.plotGrid(nodes=True, centers=True, faces_x=True)
plt.legend(
(
"Grid",
"Cell Centers",
"Nodes",
"Hanging Nodes",
"X faces",
"Hanging X faces",
)
)
if __name__ == "__main__":
run()
plt.show()
|
"""
QuadTree: Hanging Nodes
=======================
You can give the refine method a function, which is evaluated on every
cell of the TreeMesh.
Occasionally it is useful to initially refine to a constant level
(e.g. 3 in this 32x32 mesh). This means the function is first evaluated
on an 8x8 mesh (2^3).
"""
import discretize
import matplotlib.pyplot as plt
def run(plotIt=True):
M = discretize.TreeMesh([8, 8])
def refine(cell):
xyz = cell.center
dist = ((xyz - [0.25, 0.25]) ** 2).sum() ** 0.5
if dist < 0.25:
return 3
return 2
M.refine(refine)
if plotIt:
M.plotGrid(nodes=True, centers=True, faces_x=True)
plt.legend(
(
"Nodes",
"Hanging Nodes",
"Cell Centers",
"X faces",
"Hanging X faces",
"Grid",
)
)
if __name__ == "__main__":
run()
plt.show()
|
Switch order (again) of legend for example
|
Switch order (again) of legend for example
|
Python
|
mit
|
simpeg/discretize,simpeg/discretize,simpeg/discretize
|
"""
QuadTree: Hanging Nodes
=======================
You can give the refine method a function, which is evaluated on every
cell of the TreeMesh.
Occasionally it is useful to initially refine to a constant level
(e.g. 3 in this 32x32 mesh). This means the function is first evaluated
on an 8x8 mesh (2^3).
"""
import discretize
import matplotlib.pyplot as plt
def run(plotIt=True):
M = discretize.TreeMesh([8, 8])
def refine(cell):
xyz = cell.center
dist = ((xyz - [0.25, 0.25]) ** 2).sum() ** 0.5
if dist < 0.25:
return 3
return 2
M.refine(refine)
if plotIt:
M.plotGrid(nodes=True, centers=True, faces_x=True)
plt.legend(
(
"Grid",
"Cell Centers",
"Nodes",
"Hanging Nodes",
"X faces",
"Hanging X faces",
)
)
if __name__ == "__main__":
run()
plt.show()
Switch order (again) of legend for example
|
"""
QuadTree: Hanging Nodes
=======================
You can give the refine method a function, which is evaluated on every
cell of the TreeMesh.
Occasionally it is useful to initially refine to a constant level
(e.g. 3 in this 32x32 mesh). This means the function is first evaluated
on an 8x8 mesh (2^3).
"""
import discretize
import matplotlib.pyplot as plt
def run(plotIt=True):
M = discretize.TreeMesh([8, 8])
def refine(cell):
xyz = cell.center
dist = ((xyz - [0.25, 0.25]) ** 2).sum() ** 0.5
if dist < 0.25:
return 3
return 2
M.refine(refine)
if plotIt:
M.plotGrid(nodes=True, centers=True, faces_x=True)
plt.legend(
(
"Nodes",
"Hanging Nodes",
"Cell Centers",
"X faces",
"Hanging X faces",
"Grid",
)
)
if __name__ == "__main__":
run()
plt.show()
|
<commit_before>"""
QuadTree: Hanging Nodes
=======================
You can give the refine method a function, which is evaluated on every
cell of the TreeMesh.
Occasionally it is useful to initially refine to a constant level
(e.g. 3 in this 32x32 mesh). This means the function is first evaluated
on an 8x8 mesh (2^3).
"""
import discretize
import matplotlib.pyplot as plt
def run(plotIt=True):
M = discretize.TreeMesh([8, 8])
def refine(cell):
xyz = cell.center
dist = ((xyz - [0.25, 0.25]) ** 2).sum() ** 0.5
if dist < 0.25:
return 3
return 2
M.refine(refine)
if plotIt:
M.plotGrid(nodes=True, centers=True, faces_x=True)
plt.legend(
(
"Grid",
"Cell Centers",
"Nodes",
"Hanging Nodes",
"X faces",
"Hanging X faces",
)
)
if __name__ == "__main__":
run()
plt.show()
<commit_msg>Switch order (again) of legend for example<commit_after>
|
"""
QuadTree: Hanging Nodes
=======================
You can give the refine method a function, which is evaluated on every
cell of the TreeMesh.
Occasionally it is useful to initially refine to a constant level
(e.g. 3 in this 32x32 mesh). This means the function is first evaluated
on an 8x8 mesh (2^3).
"""
import discretize
import matplotlib.pyplot as plt
def run(plotIt=True):
M = discretize.TreeMesh([8, 8])
def refine(cell):
xyz = cell.center
dist = ((xyz - [0.25, 0.25]) ** 2).sum() ** 0.5
if dist < 0.25:
return 3
return 2
M.refine(refine)
if plotIt:
M.plotGrid(nodes=True, centers=True, faces_x=True)
plt.legend(
(
"Nodes",
"Hanging Nodes",
"Cell Centers",
"X faces",
"Hanging X faces",
"Grid",
)
)
if __name__ == "__main__":
run()
plt.show()
|
"""
QuadTree: Hanging Nodes
=======================
You can give the refine method a function, which is evaluated on every
cell of the TreeMesh.
Occasionally it is useful to initially refine to a constant level
(e.g. 3 in this 32x32 mesh). This means the function is first evaluated
on an 8x8 mesh (2^3).
"""
import discretize
import matplotlib.pyplot as plt
def run(plotIt=True):
M = discretize.TreeMesh([8, 8])
def refine(cell):
xyz = cell.center
dist = ((xyz - [0.25, 0.25]) ** 2).sum() ** 0.5
if dist < 0.25:
return 3
return 2
M.refine(refine)
if plotIt:
M.plotGrid(nodes=True, centers=True, faces_x=True)
plt.legend(
(
"Grid",
"Cell Centers",
"Nodes",
"Hanging Nodes",
"X faces",
"Hanging X faces",
)
)
if __name__ == "__main__":
run()
plt.show()
Switch order (again) of legend for example"""
QuadTree: Hanging Nodes
=======================
You can give the refine method a function, which is evaluated on every
cell of the TreeMesh.
Occasionally it is useful to initially refine to a constant level
(e.g. 3 in this 32x32 mesh). This means the function is first evaluated
on an 8x8 mesh (2^3).
"""
import discretize
import matplotlib.pyplot as plt
def run(plotIt=True):
M = discretize.TreeMesh([8, 8])
def refine(cell):
xyz = cell.center
dist = ((xyz - [0.25, 0.25]) ** 2).sum() ** 0.5
if dist < 0.25:
return 3
return 2
M.refine(refine)
if plotIt:
M.plotGrid(nodes=True, centers=True, faces_x=True)
plt.legend(
(
"Nodes",
"Hanging Nodes",
"Cell Centers",
"X faces",
"Hanging X faces",
"Grid",
)
)
if __name__ == "__main__":
run()
plt.show()
|
<commit_before>"""
QuadTree: Hanging Nodes
=======================
You can give the refine method a function, which is evaluated on every
cell of the TreeMesh.
Occasionally it is useful to initially refine to a constant level
(e.g. 3 in this 32x32 mesh). This means the function is first evaluated
on an 8x8 mesh (2^3).
"""
import discretize
import matplotlib.pyplot as plt
def run(plotIt=True):
M = discretize.TreeMesh([8, 8])
def refine(cell):
xyz = cell.center
dist = ((xyz - [0.25, 0.25]) ** 2).sum() ** 0.5
if dist < 0.25:
return 3
return 2
M.refine(refine)
if plotIt:
M.plotGrid(nodes=True, centers=True, faces_x=True)
plt.legend(
(
"Grid",
"Cell Centers",
"Nodes",
"Hanging Nodes",
"X faces",
"Hanging X faces",
)
)
if __name__ == "__main__":
run()
plt.show()
<commit_msg>Switch order (again) of legend for example<commit_after>"""
QuadTree: Hanging Nodes
=======================
You can give the refine method a function, which is evaluated on every
cell of the TreeMesh.
Occasionally it is useful to initially refine to a constant level
(e.g. 3 in this 32x32 mesh). This means the function is first evaluated
on an 8x8 mesh (2^3).
"""
import discretize
import matplotlib.pyplot as plt
def run(plotIt=True):
M = discretize.TreeMesh([8, 8])
def refine(cell):
xyz = cell.center
dist = ((xyz - [0.25, 0.25]) ** 2).sum() ** 0.5
if dist < 0.25:
return 3
return 2
M.refine(refine)
if plotIt:
M.plotGrid(nodes=True, centers=True, faces_x=True)
plt.legend(
(
"Nodes",
"Hanging Nodes",
"Cell Centers",
"X faces",
"Hanging X faces",
"Grid",
)
)
if __name__ == "__main__":
run()
plt.show()
|
f62e0539028b5a327e3c178090ee9316958e65cc
|
pylons/__init__.py
|
pylons/__init__.py
|
"""Base objects to be exported for use in Controllers"""
from paste.registry import StackedObjectProxy
from pylons.config import config
__all__ = ['app_globals', 'c', 'cache', 'config', 'g', 'request', 'response',
'session', 'tmpl_context', 'url']
def __figure_version():
try:
from pkg_resources import require
import os
# NOTE: this only works when the package is either installed,
# or has an .egg-info directory present (i.e. wont work with raw
# SVN checkout)
info = require('pylons')[0]
if os.path.dirname(os.path.dirname(__file__)) == info.location:
return info.version
else:
return '(not installed)'
except:
return '(not installed)'
__version__ = __figure_version()
app_globals = g = StackedObjectProxy(name="app_globals")
cache = StackedObjectProxy(name="cache")
request = StackedObjectProxy(name="request")
response = StackedObjectProxy(name="response")
session = StackedObjectProxy(name="session")
tmpl_context = c = StackedObjectProxy(name="tmpl_context or C")
url = StackedObjectProxy(name="url")
translator = StackedObjectProxy(name="translator")
|
"""Base objects to be exported for use in Controllers"""
from paste.registry import StackedObjectProxy
from pylons.configuration import config
__all__ = ['app_globals', 'c', 'cache', 'config', 'g', 'request', 'response',
'session', 'tmpl_context', 'url']
def __figure_version():
try:
from pkg_resources import require
import os
# NOTE: this only works when the package is either installed,
# or has an .egg-info directory present (i.e. wont work with raw
# SVN checkout)
info = require('pylons')[0]
if os.path.dirname(os.path.dirname(__file__)) == info.location:
return info.version
else:
return '(not installed)'
except:
return '(not installed)'
__version__ = __figure_version()
app_globals = g = StackedObjectProxy(name="app_globals")
cache = StackedObjectProxy(name="cache")
request = StackedObjectProxy(name="request")
response = StackedObjectProxy(name="response")
session = StackedObjectProxy(name="session")
tmpl_context = c = StackedObjectProxy(name="tmpl_context or C")
url = StackedObjectProxy(name="url")
translator = StackedObjectProxy(name="translator")
|
Fix an import error. pylons.config doesn't exist anymore, use pylons.configuration
|
Fix an import error. pylons.config doesn't exist anymore, use pylons.configuration
--HG--
branch : trunk
|
Python
|
bsd-3-clause
|
Pylons/pylons,moreati/pylons,Pylons/pylons,moreati/pylons,Pylons/pylons,moreati/pylons
|
"""Base objects to be exported for use in Controllers"""
from paste.registry import StackedObjectProxy
from pylons.config import config
__all__ = ['app_globals', 'c', 'cache', 'config', 'g', 'request', 'response',
'session', 'tmpl_context', 'url']
def __figure_version():
try:
from pkg_resources import require
import os
# NOTE: this only works when the package is either installed,
# or has an .egg-info directory present (i.e. wont work with raw
# SVN checkout)
info = require('pylons')[0]
if os.path.dirname(os.path.dirname(__file__)) == info.location:
return info.version
else:
return '(not installed)'
except:
return '(not installed)'
__version__ = __figure_version()
app_globals = g = StackedObjectProxy(name="app_globals")
cache = StackedObjectProxy(name="cache")
request = StackedObjectProxy(name="request")
response = StackedObjectProxy(name="response")
session = StackedObjectProxy(name="session")
tmpl_context = c = StackedObjectProxy(name="tmpl_context or C")
url = StackedObjectProxy(name="url")
translator = StackedObjectProxy(name="translator")
Fix an import error. pylons.config doesn't exist anymore, use pylons.configuration
--HG--
branch : trunk
|
"""Base objects to be exported for use in Controllers"""
from paste.registry import StackedObjectProxy
from pylons.configuration import config
__all__ = ['app_globals', 'c', 'cache', 'config', 'g', 'request', 'response',
'session', 'tmpl_context', 'url']
def __figure_version():
try:
from pkg_resources import require
import os
# NOTE: this only works when the package is either installed,
# or has an .egg-info directory present (i.e. wont work with raw
# SVN checkout)
info = require('pylons')[0]
if os.path.dirname(os.path.dirname(__file__)) == info.location:
return info.version
else:
return '(not installed)'
except:
return '(not installed)'
__version__ = __figure_version()
app_globals = g = StackedObjectProxy(name="app_globals")
cache = StackedObjectProxy(name="cache")
request = StackedObjectProxy(name="request")
response = StackedObjectProxy(name="response")
session = StackedObjectProxy(name="session")
tmpl_context = c = StackedObjectProxy(name="tmpl_context or C")
url = StackedObjectProxy(name="url")
translator = StackedObjectProxy(name="translator")
|
<commit_before>"""Base objects to be exported for use in Controllers"""
from paste.registry import StackedObjectProxy
from pylons.config import config
__all__ = ['app_globals', 'c', 'cache', 'config', 'g', 'request', 'response',
'session', 'tmpl_context', 'url']
def __figure_version():
try:
from pkg_resources import require
import os
# NOTE: this only works when the package is either installed,
# or has an .egg-info directory present (i.e. wont work with raw
# SVN checkout)
info = require('pylons')[0]
if os.path.dirname(os.path.dirname(__file__)) == info.location:
return info.version
else:
return '(not installed)'
except:
return '(not installed)'
__version__ = __figure_version()
app_globals = g = StackedObjectProxy(name="app_globals")
cache = StackedObjectProxy(name="cache")
request = StackedObjectProxy(name="request")
response = StackedObjectProxy(name="response")
session = StackedObjectProxy(name="session")
tmpl_context = c = StackedObjectProxy(name="tmpl_context or C")
url = StackedObjectProxy(name="url")
translator = StackedObjectProxy(name="translator")
<commit_msg>Fix an import error. pylons.config doesn't exist anymore, use pylons.configuration
--HG--
branch : trunk<commit_after>
|
"""Base objects to be exported for use in Controllers"""
from paste.registry import StackedObjectProxy
from pylons.configuration import config
__all__ = ['app_globals', 'c', 'cache', 'config', 'g', 'request', 'response',
'session', 'tmpl_context', 'url']
def __figure_version():
try:
from pkg_resources import require
import os
# NOTE: this only works when the package is either installed,
# or has an .egg-info directory present (i.e. wont work with raw
# SVN checkout)
info = require('pylons')[0]
if os.path.dirname(os.path.dirname(__file__)) == info.location:
return info.version
else:
return '(not installed)'
except:
return '(not installed)'
__version__ = __figure_version()
app_globals = g = StackedObjectProxy(name="app_globals")
cache = StackedObjectProxy(name="cache")
request = StackedObjectProxy(name="request")
response = StackedObjectProxy(name="response")
session = StackedObjectProxy(name="session")
tmpl_context = c = StackedObjectProxy(name="tmpl_context or C")
url = StackedObjectProxy(name="url")
translator = StackedObjectProxy(name="translator")
|
"""Base objects to be exported for use in Controllers"""
from paste.registry import StackedObjectProxy
from pylons.config import config
__all__ = ['app_globals', 'c', 'cache', 'config', 'g', 'request', 'response',
'session', 'tmpl_context', 'url']
def __figure_version():
try:
from pkg_resources import require
import os
# NOTE: this only works when the package is either installed,
# or has an .egg-info directory present (i.e. wont work with raw
# SVN checkout)
info = require('pylons')[0]
if os.path.dirname(os.path.dirname(__file__)) == info.location:
return info.version
else:
return '(not installed)'
except:
return '(not installed)'
__version__ = __figure_version()
app_globals = g = StackedObjectProxy(name="app_globals")
cache = StackedObjectProxy(name="cache")
request = StackedObjectProxy(name="request")
response = StackedObjectProxy(name="response")
session = StackedObjectProxy(name="session")
tmpl_context = c = StackedObjectProxy(name="tmpl_context or C")
url = StackedObjectProxy(name="url")
translator = StackedObjectProxy(name="translator")
Fix an import error. pylons.config doesn't exist anymore, use pylons.configuration
--HG--
branch : trunk"""Base objects to be exported for use in Controllers"""
from paste.registry import StackedObjectProxy
from pylons.configuration import config
__all__ = ['app_globals', 'c', 'cache', 'config', 'g', 'request', 'response',
'session', 'tmpl_context', 'url']
def __figure_version():
try:
from pkg_resources import require
import os
# NOTE: this only works when the package is either installed,
# or has an .egg-info directory present (i.e. wont work with raw
# SVN checkout)
info = require('pylons')[0]
if os.path.dirname(os.path.dirname(__file__)) == info.location:
return info.version
else:
return '(not installed)'
except:
return '(not installed)'
__version__ = __figure_version()
app_globals = g = StackedObjectProxy(name="app_globals")
cache = StackedObjectProxy(name="cache")
request = StackedObjectProxy(name="request")
response = StackedObjectProxy(name="response")
session = StackedObjectProxy(name="session")
tmpl_context = c = StackedObjectProxy(name="tmpl_context or C")
url = StackedObjectProxy(name="url")
translator = StackedObjectProxy(name="translator")
|
<commit_before>"""Base objects to be exported for use in Controllers"""
from paste.registry import StackedObjectProxy
from pylons.config import config
__all__ = ['app_globals', 'c', 'cache', 'config', 'g', 'request', 'response',
'session', 'tmpl_context', 'url']
def __figure_version():
try:
from pkg_resources import require
import os
# NOTE: this only works when the package is either installed,
# or has an .egg-info directory present (i.e. wont work with raw
# SVN checkout)
info = require('pylons')[0]
if os.path.dirname(os.path.dirname(__file__)) == info.location:
return info.version
else:
return '(not installed)'
except:
return '(not installed)'
__version__ = __figure_version()
app_globals = g = StackedObjectProxy(name="app_globals")
cache = StackedObjectProxy(name="cache")
request = StackedObjectProxy(name="request")
response = StackedObjectProxy(name="response")
session = StackedObjectProxy(name="session")
tmpl_context = c = StackedObjectProxy(name="tmpl_context or C")
url = StackedObjectProxy(name="url")
translator = StackedObjectProxy(name="translator")
<commit_msg>Fix an import error. pylons.config doesn't exist anymore, use pylons.configuration
--HG--
branch : trunk<commit_after>"""Base objects to be exported for use in Controllers"""
from paste.registry import StackedObjectProxy
from pylons.configuration import config
__all__ = ['app_globals', 'c', 'cache', 'config', 'g', 'request', 'response',
'session', 'tmpl_context', 'url']
def __figure_version():
try:
from pkg_resources import require
import os
# NOTE: this only works when the package is either installed,
# or has an .egg-info directory present (i.e. wont work with raw
# SVN checkout)
info = require('pylons')[0]
if os.path.dirname(os.path.dirname(__file__)) == info.location:
return info.version
else:
return '(not installed)'
except:
return '(not installed)'
__version__ = __figure_version()
app_globals = g = StackedObjectProxy(name="app_globals")
cache = StackedObjectProxy(name="cache")
request = StackedObjectProxy(name="request")
response = StackedObjectProxy(name="response")
session = StackedObjectProxy(name="session")
tmpl_context = c = StackedObjectProxy(name="tmpl_context or C")
url = StackedObjectProxy(name="url")
translator = StackedObjectProxy(name="translator")
|
44572394e4543071025c86ac107d4dfe1fc48b47
|
python/__init__.py
|
python/__init__.py
|
# -*- coding: UTF-8 -*-
# Copyright (C) 2009 Itaapy, ArsAperta, Pierlis, Talend
# Import from itools
from itools.pkg import get_version
__version__ = get_version()
|
# -*- coding: UTF-8 -*-
# Copyright (C) 2009 Itaapy, ArsAperta, Pierlis, Talend
# Import from itools
from itools.core import get_version
__version__ = get_version()
|
Fix a bad import (get_version)
|
Fix a bad import (get_version)
|
Python
|
apache-2.0
|
Agicia/lpod-python,lpod/lpod-docs,lpod/lpod-docs,Agicia/lpod-python
|
# -*- coding: UTF-8 -*-
# Copyright (C) 2009 Itaapy, ArsAperta, Pierlis, Talend
# Import from itools
from itools.pkg import get_version
__version__ = get_version()
Fix a bad import (get_version)
|
# -*- coding: UTF-8 -*-
# Copyright (C) 2009 Itaapy, ArsAperta, Pierlis, Talend
# Import from itools
from itools.core import get_version
__version__ = get_version()
|
<commit_before># -*- coding: UTF-8 -*-
# Copyright (C) 2009 Itaapy, ArsAperta, Pierlis, Talend
# Import from itools
from itools.pkg import get_version
__version__ = get_version()
<commit_msg>Fix a bad import (get_version)<commit_after>
|
# -*- coding: UTF-8 -*-
# Copyright (C) 2009 Itaapy, ArsAperta, Pierlis, Talend
# Import from itools
from itools.core import get_version
__version__ = get_version()
|
# -*- coding: UTF-8 -*-
# Copyright (C) 2009 Itaapy, ArsAperta, Pierlis, Talend
# Import from itools
from itools.pkg import get_version
__version__ = get_version()
Fix a bad import (get_version)# -*- coding: UTF-8 -*-
# Copyright (C) 2009 Itaapy, ArsAperta, Pierlis, Talend
# Import from itools
from itools.core import get_version
__version__ = get_version()
|
<commit_before># -*- coding: UTF-8 -*-
# Copyright (C) 2009 Itaapy, ArsAperta, Pierlis, Talend
# Import from itools
from itools.pkg import get_version
__version__ = get_version()
<commit_msg>Fix a bad import (get_version)<commit_after># -*- coding: UTF-8 -*-
# Copyright (C) 2009 Itaapy, ArsAperta, Pierlis, Talend
# Import from itools
from itools.core import get_version
__version__ = get_version()
|
2a086200b7644c3b3b869359c23366e7a3f36141
|
show_usbcamera.py
|
show_usbcamera.py
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vtk
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vtk.UsbCameraWidget()
widget.show()
sys.exit( application.exec_() )
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
#import sys
import cv2
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( image ) :
# Display the stereo image
cv2.imshow( 'Camera', image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.UsbCameraWidget()
# widget.show()
# sys.exit( application.exec_() )
# Initialize the stereo cameras
usbcamera = vt.UsbCamera( Callback )
# Lower the camera frame rate and resolution
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FPS, 25 )
# Start capture
usbcamera.start()
# Wait for user key press
raw_input( 'Press <enter> to stop the capture...' )
# Stop image acquisition
usbcamera.running = False
usbcamera.join()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
Add OpenCV viewer for debug.
|
Add OpenCV viewer for debug.
|
Python
|
mit
|
microy/StereoVision,microy/StereoVision,microy/VisionToolkit,microy/VisionToolkit,microy/PyStereoVisionToolkit,microy/PyStereoVisionToolkit
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vtk
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vtk.UsbCameraWidget()
widget.show()
sys.exit( application.exec_() )
Add OpenCV viewer for debug.
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
#import sys
import cv2
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( image ) :
# Display the stereo image
cv2.imshow( 'Camera', image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.UsbCameraWidget()
# widget.show()
# sys.exit( application.exec_() )
# Initialize the stereo cameras
usbcamera = vt.UsbCamera( Callback )
# Lower the camera frame rate and resolution
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FPS, 25 )
# Start capture
usbcamera.start()
# Wait for user key press
raw_input( 'Press <enter> to stop the capture...' )
# Stop image acquisition
usbcamera.running = False
usbcamera.join()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
<commit_before>#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vtk
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vtk.UsbCameraWidget()
widget.show()
sys.exit( application.exec_() )
<commit_msg>Add OpenCV viewer for debug.<commit_after>
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
#import sys
import cv2
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( image ) :
# Display the stereo image
cv2.imshow( 'Camera', image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.UsbCameraWidget()
# widget.show()
# sys.exit( application.exec_() )
# Initialize the stereo cameras
usbcamera = vt.UsbCamera( Callback )
# Lower the camera frame rate and resolution
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FPS, 25 )
# Start capture
usbcamera.start()
# Wait for user key press
raw_input( 'Press <enter> to stop the capture...' )
# Stop image acquisition
usbcamera.running = False
usbcamera.join()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vtk
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vtk.UsbCameraWidget()
widget.show()
sys.exit( application.exec_() )
Add OpenCV viewer for debug.#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
#import sys
import cv2
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( image ) :
# Display the stereo image
cv2.imshow( 'Camera', image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.UsbCameraWidget()
# widget.show()
# sys.exit( application.exec_() )
# Initialize the stereo cameras
usbcamera = vt.UsbCamera( Callback )
# Lower the camera frame rate and resolution
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FPS, 25 )
# Start capture
usbcamera.start()
# Wait for user key press
raw_input( 'Press <enter> to stop the capture...' )
# Stop image acquisition
usbcamera.running = False
usbcamera.join()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
<commit_before>#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vtk
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vtk.UsbCameraWidget()
widget.show()
sys.exit( application.exec_() )
<commit_msg>Add OpenCV viewer for debug.<commit_after>#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
#import sys
import cv2
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( image ) :
# Display the stereo image
cv2.imshow( 'Camera', image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.UsbCameraWidget()
# widget.show()
# sys.exit( application.exec_() )
# Initialize the stereo cameras
usbcamera = vt.UsbCamera( Callback )
# Lower the camera frame rate and resolution
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FPS, 25 )
# Start capture
usbcamera.start()
# Wait for user key press
raw_input( 'Press <enter> to stop the capture...' )
# Stop image acquisition
usbcamera.running = False
usbcamera.join()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
7ad3346759f53f57f233319e63361a0ed792535f
|
incrowd/notify/utils.py
|
incrowd/notify/utils.py
|
import logging
from notify.models import Notification
logger = logging.getLogger(__name__)
def ping_filter(message, users, sending_user, notify_text, notify_type,
notify_url=None):
for user in users:
if username_in_message(message, user.username):
# Create notification
if user == sending_user:
continue
note = Notification(
text='{} {}: {}'.format(
sending_user.username, notify_text, message),
user=user,
from_user=sending_user,
type=notify_type,
link=notify_url)
note.save()
logger.info("Created notification for user {} from {}"
.format(note))
return message
def username_in_message(message, username):
message = message.lower()
username = username.lower()
# Check if @username in message. Edge case for username at the end of
# the message.
if '@' + username + ' ' in message.lower():
return True
try:
return (message.index('@' + username) ==
len(message.lower()) - len('@' + username))
except ValueError:
return False
|
import logging
from notify.models import Notification
logger = logging.getLogger(__name__)
def ping_filter(message, users, sending_user, notify_text, notify_type,
notify_url=None):
for user in users:
if username_in_message(message, user.username):
# Create notification
if user == sending_user:
continue
note = Notification(
text='{} {}: {}'.format(
sending_user.username, notify_text, message),
user=user,
from_user=sending_user,
type=notify_type,
link=notify_url)
note.save()
logger.info("Created notification for user {} from {}"
.format(note.user, note.from_user))
return message
def username_in_message(message, username):
message = message.lower()
username = username.lower()
# Check if @username in message. Edge case for username at the end of
# the message.
if '@' + username + ' ' in message.lower():
return True
try:
return (message.index('@' + username) ==
len(message.lower()) - len('@' + username))
except ValueError:
return False
|
Fix pinging in chat throwing errors
|
Fix pinging in chat throwing errors
|
Python
|
apache-2.0
|
pcsforeducation/incrowd,pcsforeducation/incrowd,incrowdio/incrowd,incrowdio/incrowd,incrowdio/incrowd,pcsforeducation/incrowd,pcsforeducation/incrowd,incrowdio/incrowd
|
import logging
from notify.models import Notification
logger = logging.getLogger(__name__)
def ping_filter(message, users, sending_user, notify_text, notify_type,
notify_url=None):
for user in users:
if username_in_message(message, user.username):
# Create notification
if user == sending_user:
continue
note = Notification(
text='{} {}: {}'.format(
sending_user.username, notify_text, message),
user=user,
from_user=sending_user,
type=notify_type,
link=notify_url)
note.save()
logger.info("Created notification for user {} from {}"
.format(note))
return message
def username_in_message(message, username):
message = message.lower()
username = username.lower()
# Check if @username in message. Edge case for username at the end of
# the message.
if '@' + username + ' ' in message.lower():
return True
try:
return (message.index('@' + username) ==
len(message.lower()) - len('@' + username))
except ValueError:
return FalseFix pinging in chat throwing errors
|
import logging
from notify.models import Notification
logger = logging.getLogger(__name__)
def ping_filter(message, users, sending_user, notify_text, notify_type,
notify_url=None):
for user in users:
if username_in_message(message, user.username):
# Create notification
if user == sending_user:
continue
note = Notification(
text='{} {}: {}'.format(
sending_user.username, notify_text, message),
user=user,
from_user=sending_user,
type=notify_type,
link=notify_url)
note.save()
logger.info("Created notification for user {} from {}"
.format(note.user, note.from_user))
return message
def username_in_message(message, username):
message = message.lower()
username = username.lower()
# Check if @username in message. Edge case for username at the end of
# the message.
if '@' + username + ' ' in message.lower():
return True
try:
return (message.index('@' + username) ==
len(message.lower()) - len('@' + username))
except ValueError:
return False
|
<commit_before>import logging
from notify.models import Notification
logger = logging.getLogger(__name__)
def ping_filter(message, users, sending_user, notify_text, notify_type,
notify_url=None):
for user in users:
if username_in_message(message, user.username):
# Create notification
if user == sending_user:
continue
note = Notification(
text='{} {}: {}'.format(
sending_user.username, notify_text, message),
user=user,
from_user=sending_user,
type=notify_type,
link=notify_url)
note.save()
logger.info("Created notification for user {} from {}"
.format(note))
return message
def username_in_message(message, username):
message = message.lower()
username = username.lower()
# Check if @username in message. Edge case for username at the end of
# the message.
if '@' + username + ' ' in message.lower():
return True
try:
return (message.index('@' + username) ==
len(message.lower()) - len('@' + username))
except ValueError:
return False<commit_msg>Fix pinging in chat throwing errors<commit_after>
|
import logging
from notify.models import Notification
logger = logging.getLogger(__name__)
def ping_filter(message, users, sending_user, notify_text, notify_type,
notify_url=None):
for user in users:
if username_in_message(message, user.username):
# Create notification
if user == sending_user:
continue
note = Notification(
text='{} {}: {}'.format(
sending_user.username, notify_text, message),
user=user,
from_user=sending_user,
type=notify_type,
link=notify_url)
note.save()
logger.info("Created notification for user {} from {}"
.format(note.user, note.from_user))
return message
def username_in_message(message, username):
message = message.lower()
username = username.lower()
# Check if @username in message. Edge case for username at the end of
# the message.
if '@' + username + ' ' in message.lower():
return True
try:
return (message.index('@' + username) ==
len(message.lower()) - len('@' + username))
except ValueError:
return False
|
import logging
from notify.models import Notification
logger = logging.getLogger(__name__)
def ping_filter(message, users, sending_user, notify_text, notify_type,
notify_url=None):
for user in users:
if username_in_message(message, user.username):
# Create notification
if user == sending_user:
continue
note = Notification(
text='{} {}: {}'.format(
sending_user.username, notify_text, message),
user=user,
from_user=sending_user,
type=notify_type,
link=notify_url)
note.save()
logger.info("Created notification for user {} from {}"
.format(note))
return message
def username_in_message(message, username):
message = message.lower()
username = username.lower()
# Check if @username in message. Edge case for username at the end of
# the message.
if '@' + username + ' ' in message.lower():
return True
try:
return (message.index('@' + username) ==
len(message.lower()) - len('@' + username))
except ValueError:
return FalseFix pinging in chat throwing errorsimport logging
from notify.models import Notification
logger = logging.getLogger(__name__)
def ping_filter(message, users, sending_user, notify_text, notify_type,
notify_url=None):
for user in users:
if username_in_message(message, user.username):
# Create notification
if user == sending_user:
continue
note = Notification(
text='{} {}: {}'.format(
sending_user.username, notify_text, message),
user=user,
from_user=sending_user,
type=notify_type,
link=notify_url)
note.save()
logger.info("Created notification for user {} from {}"
.format(note.user, note.from_user))
return message
def username_in_message(message, username):
message = message.lower()
username = username.lower()
# Check if @username in message. Edge case for username at the end of
# the message.
if '@' + username + ' ' in message.lower():
return True
try:
return (message.index('@' + username) ==
len(message.lower()) - len('@' + username))
except ValueError:
return False
|
<commit_before>import logging
from notify.models import Notification
logger = logging.getLogger(__name__)
def ping_filter(message, users, sending_user, notify_text, notify_type,
notify_url=None):
for user in users:
if username_in_message(message, user.username):
# Create notification
if user == sending_user:
continue
note = Notification(
text='{} {}: {}'.format(
sending_user.username, notify_text, message),
user=user,
from_user=sending_user,
type=notify_type,
link=notify_url)
note.save()
logger.info("Created notification for user {} from {}"
.format(note))
return message
def username_in_message(message, username):
message = message.lower()
username = username.lower()
# Check if @username in message. Edge case for username at the end of
# the message.
if '@' + username + ' ' in message.lower():
return True
try:
return (message.index('@' + username) ==
len(message.lower()) - len('@' + username))
except ValueError:
return False<commit_msg>Fix pinging in chat throwing errors<commit_after>import logging
from notify.models import Notification
logger = logging.getLogger(__name__)
def ping_filter(message, users, sending_user, notify_text, notify_type,
notify_url=None):
for user in users:
if username_in_message(message, user.username):
# Create notification
if user == sending_user:
continue
note = Notification(
text='{} {}: {}'.format(
sending_user.username, notify_text, message),
user=user,
from_user=sending_user,
type=notify_type,
link=notify_url)
note.save()
logger.info("Created notification for user {} from {}"
.format(note.user, note.from_user))
return message
def username_in_message(message, username):
message = message.lower()
username = username.lower()
# Check if @username in message. Edge case for username at the end of
# the message.
if '@' + username + ' ' in message.lower():
return True
try:
return (message.index('@' + username) ==
len(message.lower()) - len('@' + username))
except ValueError:
return False
|
469c1dc9de1c986beda853b13909bdc5d3ff2b92
|
stagecraft/urls.py
|
stagecraft/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True,
query_string=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
|
Make redirect view pass the GET query string to the new location
|
Make redirect view pass the GET query string to the new location
|
Python
|
mit
|
alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
Make redirect view pass the GET query string to the new location
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True,
query_string=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
<commit_msg>Make redirect view pass the GET query string to the new location<commit_after>
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True,
query_string=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
Make redirect view pass the GET query string to the new locationfrom django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True,
query_string=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
<commit_msg>Make redirect view pass the GET query string to the new location<commit_after>from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from stagecraft.apps.datasets import views as datasets_views
from stagecraft.libs.status import views as status_views
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
# Note that the query string params get transparently passed to the view
url(r'^data-sets$', datasets_views.list, name='data-sets-list'),
url(r'^data-sets/$', RedirectView.as_view(pattern_name='data-sets-list',
permanent=True,
query_string=True)),
url(r'^data-sets/(?P<name>[\w-]+)$', datasets_views.detail),
url(r'^_status/data-sets$', datasets_views.health_check),
url(r'^_status$', status_views.status),
)
|
018abd1a80bf0045d1f2d2c04d1caaa4db9433b8
|
froide/helper/search/paginator.py
|
froide/helper/search/paginator.py
|
from django.core.paginator import Paginator
class ElasticsearchPaginator(Paginator):
"""
Paginator that prevents two queries to ES (for count and objects)
as ES gives count with objects
"""
MAX_ES_OFFSET = 10000
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
bottom = (number - 1) * self.per_page
if bottom >= self.MAX_ES_OFFSET:
# Only validate if bigger than offset
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
self.object_list = self.object_list[bottom:top]
# ignore top boundary
# if top + self.orphans >= self.count:
# top = self.count
# Validate number after limit/offset has been set
number = self.validate_number(number)
return self._get_page(self.object_list, number, self)
|
from django.core.paginator import Paginator, InvalidPage
class ElasticsearchPaginator(Paginator):
"""
Paginator that prevents two queries to ES (for count and objects)
as ES gives count with objects
"""
MAX_ES_OFFSET = 10000
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
bottom = (number - 1) * self.per_page
if bottom >= self.MAX_ES_OFFSET:
# Only validate if bigger than offset
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
try:
self.object_list = self.object_list[bottom:top]
except ValueError:
raise InvalidPage()
# ignore top boundary
# if top + self.orphans >= self.count:
# top = self.count
# Validate number after limit/offset has been set
number = self.validate_number(number)
return self._get_page(self.object_list, number, self)
|
Raise invalid page on paging error
|
Raise invalid page on paging error
|
Python
|
mit
|
stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide
|
from django.core.paginator import Paginator
class ElasticsearchPaginator(Paginator):
"""
Paginator that prevents two queries to ES (for count and objects)
as ES gives count with objects
"""
MAX_ES_OFFSET = 10000
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
bottom = (number - 1) * self.per_page
if bottom >= self.MAX_ES_OFFSET:
# Only validate if bigger than offset
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
self.object_list = self.object_list[bottom:top]
# ignore top boundary
# if top + self.orphans >= self.count:
# top = self.count
# Validate number after limit/offset has been set
number = self.validate_number(number)
return self._get_page(self.object_list, number, self)
Raise invalid page on paging error
|
from django.core.paginator import Paginator, InvalidPage
class ElasticsearchPaginator(Paginator):
"""
Paginator that prevents two queries to ES (for count and objects)
as ES gives count with objects
"""
MAX_ES_OFFSET = 10000
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
bottom = (number - 1) * self.per_page
if bottom >= self.MAX_ES_OFFSET:
# Only validate if bigger than offset
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
try:
self.object_list = self.object_list[bottom:top]
except ValueError:
raise InvalidPage()
# ignore top boundary
# if top + self.orphans >= self.count:
# top = self.count
# Validate number after limit/offset has been set
number = self.validate_number(number)
return self._get_page(self.object_list, number, self)
|
<commit_before>from django.core.paginator import Paginator
class ElasticsearchPaginator(Paginator):
"""
Paginator that prevents two queries to ES (for count and objects)
as ES gives count with objects
"""
MAX_ES_OFFSET = 10000
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
bottom = (number - 1) * self.per_page
if bottom >= self.MAX_ES_OFFSET:
# Only validate if bigger than offset
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
self.object_list = self.object_list[bottom:top]
# ignore top boundary
# if top + self.orphans >= self.count:
# top = self.count
# Validate number after limit/offset has been set
number = self.validate_number(number)
return self._get_page(self.object_list, number, self)
<commit_msg>Raise invalid page on paging error<commit_after>
|
from django.core.paginator import Paginator, InvalidPage
class ElasticsearchPaginator(Paginator):
"""
Paginator that prevents two queries to ES (for count and objects)
as ES gives count with objects
"""
MAX_ES_OFFSET = 10000
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
bottom = (number - 1) * self.per_page
if bottom >= self.MAX_ES_OFFSET:
# Only validate if bigger than offset
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
try:
self.object_list = self.object_list[bottom:top]
except ValueError:
raise InvalidPage()
# ignore top boundary
# if top + self.orphans >= self.count:
# top = self.count
# Validate number after limit/offset has been set
number = self.validate_number(number)
return self._get_page(self.object_list, number, self)
|
from django.core.paginator import Paginator
class ElasticsearchPaginator(Paginator):
"""
Paginator that prevents two queries to ES (for count and objects)
as ES gives count with objects
"""
MAX_ES_OFFSET = 10000
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
bottom = (number - 1) * self.per_page
if bottom >= self.MAX_ES_OFFSET:
# Only validate if bigger than offset
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
self.object_list = self.object_list[bottom:top]
# ignore top boundary
# if top + self.orphans >= self.count:
# top = self.count
# Validate number after limit/offset has been set
number = self.validate_number(number)
return self._get_page(self.object_list, number, self)
Raise invalid page on paging errorfrom django.core.paginator import Paginator, InvalidPage
class ElasticsearchPaginator(Paginator):
"""
Paginator that prevents two queries to ES (for count and objects)
as ES gives count with objects
"""
MAX_ES_OFFSET = 10000
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
bottom = (number - 1) * self.per_page
if bottom >= self.MAX_ES_OFFSET:
# Only validate if bigger than offset
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
try:
self.object_list = self.object_list[bottom:top]
except ValueError:
raise InvalidPage()
# ignore top boundary
# if top + self.orphans >= self.count:
# top = self.count
# Validate number after limit/offset has been set
number = self.validate_number(number)
return self._get_page(self.object_list, number, self)
|
<commit_before>from django.core.paginator import Paginator
class ElasticsearchPaginator(Paginator):
"""
Paginator that prevents two queries to ES (for count and objects)
as ES gives count with objects
"""
MAX_ES_OFFSET = 10000
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
bottom = (number - 1) * self.per_page
if bottom >= self.MAX_ES_OFFSET:
# Only validate if bigger than offset
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
self.object_list = self.object_list[bottom:top]
# ignore top boundary
# if top + self.orphans >= self.count:
# top = self.count
# Validate number after limit/offset has been set
number = self.validate_number(number)
return self._get_page(self.object_list, number, self)
<commit_msg>Raise invalid page on paging error<commit_after>from django.core.paginator import Paginator, InvalidPage
class ElasticsearchPaginator(Paginator):
"""
Paginator that prevents two queries to ES (for count and objects)
as ES gives count with objects
"""
MAX_ES_OFFSET = 10000
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
bottom = (number - 1) * self.per_page
if bottom >= self.MAX_ES_OFFSET:
# Only validate if bigger than offset
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
try:
self.object_list = self.object_list[bottom:top]
except ValueError:
raise InvalidPage()
# ignore top boundary
# if top + self.orphans >= self.count:
# top = self.count
# Validate number after limit/offset has been set
number = self.validate_number(number)
return self._get_page(self.object_list, number, self)
|
5794d0ff86f90a1f1f2ad7ca52cb6f1d34cb5b24
|
ktbs_bench/benchable_store.py
|
ktbs_bench/benchable_store.py
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore(Graph):
def __init__(self, connect_args, create_func=None, create_args=[], *args, **kwargs):
super(BenchableStore, self).__init__(*args, **kwargs)
self.connect_args = connect_args
self.create_func = create_func
self.create_args = create_args
def connect(self):
if isinstance(self.connect_args, dict):
self.open(**self.connect_args)
else:
raise TypeError("connect_args must be a dict.")
def create(self):
if self.create_func:
self.create_func(*self.create_args) # TODO gerer exception si db existe deja
def destroy(self):
"""For SQL: destroy tables of the DB, not the DB itself."""
if isinstance(self.store, SPARQLStore):
self.sparql_destroy()
else:
super(BenchableStore, self).destroy(self.connect_args['configuration'])
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self:
self.remove((s, p, o))
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self, store_create=None):
if store_create:
do_create = store_create
else:
do_create = self._store_create
self.graph.open(self._store_config, create=do_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
Make Graph an attribute rather than an inheritance
|
Make Graph an attribute rather than an inheritance
|
Python
|
mit
|
ktbs/ktbs-bench,ktbs/ktbs-bench
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore(Graph):
def __init__(self, connect_args, create_func=None, create_args=[], *args, **kwargs):
super(BenchableStore, self).__init__(*args, **kwargs)
self.connect_args = connect_args
self.create_func = create_func
self.create_args = create_args
def connect(self):
if isinstance(self.connect_args, dict):
self.open(**self.connect_args)
else:
raise TypeError("connect_args must be a dict.")
def create(self):
if self.create_func:
self.create_func(*self.create_args) # TODO gerer exception si db existe deja
def destroy(self):
"""For SQL: destroy tables of the DB, not the DB itself."""
if isinstance(self.store, SPARQLStore):
self.sparql_destroy()
else:
super(BenchableStore, self).destroy(self.connect_args['configuration'])
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self:
self.remove((s, p, o))
Make Graph an attribute rather than an inheritance
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self, store_create=None):
if store_create:
do_create = store_create
else:
do_create = self._store_create
self.graph.open(self._store_config, create=do_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
<commit_before>from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore(Graph):
def __init__(self, connect_args, create_func=None, create_args=[], *args, **kwargs):
super(BenchableStore, self).__init__(*args, **kwargs)
self.connect_args = connect_args
self.create_func = create_func
self.create_args = create_args
def connect(self):
if isinstance(self.connect_args, dict):
self.open(**self.connect_args)
else:
raise TypeError("connect_args must be a dict.")
def create(self):
if self.create_func:
self.create_func(*self.create_args) # TODO gerer exception si db existe deja
def destroy(self):
"""For SQL: destroy tables of the DB, not the DB itself."""
if isinstance(self.store, SPARQLStore):
self.sparql_destroy()
else:
super(BenchableStore, self).destroy(self.connect_args['configuration'])
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self:
self.remove((s, p, o))
<commit_msg>Make Graph an attribute rather than an inheritance<commit_after>
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self, store_create=None):
if store_create:
do_create = store_create
else:
do_create = self._store_create
self.graph.open(self._store_config, create=do_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore(Graph):
def __init__(self, connect_args, create_func=None, create_args=[], *args, **kwargs):
super(BenchableStore, self).__init__(*args, **kwargs)
self.connect_args = connect_args
self.create_func = create_func
self.create_args = create_args
def connect(self):
if isinstance(self.connect_args, dict):
self.open(**self.connect_args)
else:
raise TypeError("connect_args must be a dict.")
def create(self):
if self.create_func:
self.create_func(*self.create_args) # TODO gerer exception si db existe deja
def destroy(self):
"""For SQL: destroy tables of the DB, not the DB itself."""
if isinstance(self.store, SPARQLStore):
self.sparql_destroy()
else:
super(BenchableStore, self).destroy(self.connect_args['configuration'])
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self:
self.remove((s, p, o))
Make Graph an attribute rather than an inheritancefrom rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self, store_create=None):
if store_create:
do_create = store_create
else:
do_create = self._store_create
self.graph.open(self._store_config, create=do_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
<commit_before>from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore(Graph):
def __init__(self, connect_args, create_func=None, create_args=[], *args, **kwargs):
super(BenchableStore, self).__init__(*args, **kwargs)
self.connect_args = connect_args
self.create_func = create_func
self.create_args = create_args
def connect(self):
if isinstance(self.connect_args, dict):
self.open(**self.connect_args)
else:
raise TypeError("connect_args must be a dict.")
def create(self):
if self.create_func:
self.create_func(*self.create_args) # TODO gerer exception si db existe deja
def destroy(self):
"""For SQL: destroy tables of the DB, not the DB itself."""
if isinstance(self.store, SPARQLStore):
self.sparql_destroy()
else:
super(BenchableStore, self).destroy(self.connect_args['configuration'])
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self:
self.remove((s, p, o))
<commit_msg>Make Graph an attribute rather than an inheritance<commit_after>from rdflib import Graph
from ktbs_bench.bnsparqlstore import SPARQLStore
class BenchableStore:
"""Allows to use a store/graph for benchmarks.
Contains a rdflib.Graph with setup and teardown.
"""
def __init__(self, store, graph_id, store_config, store_create=False):
self.graph = Graph(store=store, identifier=graph_id)
self._store_config = store_config
self._store_create = store_create
def connect(self, store_create=None):
if store_create:
do_create = store_create
else:
do_create = self._store_create
self.graph.open(self._store_config, create=do_create)
def close(self, commit_pending_transaction=False):
self.graph.close(commit_pending_transaction=commit_pending_transaction)
def destroy(self):
if isinstance(self.graph.store, SPARQLStore):
self.sparql_destroy()
else:
self.graph.destroy(self._store_config)
def sparql_destroy(self):
"""Try to destroy the graph as if the current store is a SPARQLStore."""
# TODO improve destroy by using SPARQL CLEAR GRAPH if RDFLib supports it
# or execute something on the command line
for s, p, o in self.graph:
self.graph.remove((s, p, o))
|
2ca26d1d4d6ce578bf217741e9e8a32d3145c3df
|
tests/test_render.py
|
tests/test_render.py
|
import unittest
import json
import great_expectations as ge
from great_expectations import render
class TestPageRenderers(unittest.TestCase):
def test_import(self):
from great_expectations import render
def test_prescriptive_expectation_renderer(self):
results = render.render(
renderer_class=render.PrescriptiveExpectationPageRenderer,
expectations=json.load(open('tests/test_fixtures/rendering_fixtures/expectation_suite_3.json'))["expectations"],
)
assert results != None
# with open('./test.html', 'w') as f:
# f.write(results)
def test_descriptive_evr_renderer(self):
R = render.DescriptiveEvrPageRenderer(
json.load(open('tests/test_fixtures/rendering_fixtures/evr_suite_3.json'))["results"],
)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
def test_full_oobe_flow(sefl):
df = ge.read_csv("/Users/abe/Documents/superconductive/data/Sacramentorealestatetransactions.csv")
df.autoinspect(ge.dataset.autoinspect.pseudo_pandas_profiling)
evrs = df.validate()["results"]
R = render.DescriptiveEvrPageRenderer(evrs)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
|
import unittest
import json
import great_expectations as ge
from great_expectations import render
class TestPageRenderers(unittest.TestCase):
def test_import(self):
from great_expectations import render
def test_prescriptive_expectation_renderer(self):
results = render.render(
renderer_class=render.PrescriptiveExpectationPageRenderer,
expectations=json.load(open('tests/test_fixtures/rendering_fixtures/expectation_suite_3.json'))["expectations"],
)
assert results != None
# with open('./test.html', 'w') as f:
# f.write(results)
def test_descriptive_evr_renderer(self):
R = render.DescriptiveEvrPageRenderer(
json.load(open('tests/test_fixtures/rendering_fixtures/evr_suite_3.json'))["results"],
)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
def test_full_oobe_flow(sefl):
df = ge.read_csv("examples/data/Titanic.csv")
df.autoinspect(ge.dataset.autoinspect.pseudo_pandas_profiling)
evrs = df.validate()["results"]
R = render.DescriptiveEvrPageRenderer(evrs)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
|
Use the Titanic example data set
|
Use the Titanic example data set
|
Python
|
apache-2.0
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
import unittest
import json
import great_expectations as ge
from great_expectations import render
class TestPageRenderers(unittest.TestCase):
def test_import(self):
from great_expectations import render
def test_prescriptive_expectation_renderer(self):
results = render.render(
renderer_class=render.PrescriptiveExpectationPageRenderer,
expectations=json.load(open('tests/test_fixtures/rendering_fixtures/expectation_suite_3.json'))["expectations"],
)
assert results != None
# with open('./test.html', 'w') as f:
# f.write(results)
def test_descriptive_evr_renderer(self):
R = render.DescriptiveEvrPageRenderer(
json.load(open('tests/test_fixtures/rendering_fixtures/evr_suite_3.json'))["results"],
)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
def test_full_oobe_flow(sefl):
df = ge.read_csv("/Users/abe/Documents/superconductive/data/Sacramentorealestatetransactions.csv")
df.autoinspect(ge.dataset.autoinspect.pseudo_pandas_profiling)
evrs = df.validate()["results"]
R = render.DescriptiveEvrPageRenderer(evrs)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)Use the Titanic example data set
|
import unittest
import json
import great_expectations as ge
from great_expectations import render
class TestPageRenderers(unittest.TestCase):
def test_import(self):
from great_expectations import render
def test_prescriptive_expectation_renderer(self):
results = render.render(
renderer_class=render.PrescriptiveExpectationPageRenderer,
expectations=json.load(open('tests/test_fixtures/rendering_fixtures/expectation_suite_3.json'))["expectations"],
)
assert results != None
# with open('./test.html', 'w') as f:
# f.write(results)
def test_descriptive_evr_renderer(self):
R = render.DescriptiveEvrPageRenderer(
json.load(open('tests/test_fixtures/rendering_fixtures/evr_suite_3.json'))["results"],
)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
def test_full_oobe_flow(sefl):
df = ge.read_csv("examples/data/Titanic.csv")
df.autoinspect(ge.dataset.autoinspect.pseudo_pandas_profiling)
evrs = df.validate()["results"]
R = render.DescriptiveEvrPageRenderer(evrs)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
|
<commit_before>import unittest
import json
import great_expectations as ge
from great_expectations import render
class TestPageRenderers(unittest.TestCase):
def test_import(self):
from great_expectations import render
def test_prescriptive_expectation_renderer(self):
results = render.render(
renderer_class=render.PrescriptiveExpectationPageRenderer,
expectations=json.load(open('tests/test_fixtures/rendering_fixtures/expectation_suite_3.json'))["expectations"],
)
assert results != None
# with open('./test.html', 'w') as f:
# f.write(results)
def test_descriptive_evr_renderer(self):
R = render.DescriptiveEvrPageRenderer(
json.load(open('tests/test_fixtures/rendering_fixtures/evr_suite_3.json'))["results"],
)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
def test_full_oobe_flow(sefl):
df = ge.read_csv("/Users/abe/Documents/superconductive/data/Sacramentorealestatetransactions.csv")
df.autoinspect(ge.dataset.autoinspect.pseudo_pandas_profiling)
evrs = df.validate()["results"]
R = render.DescriptiveEvrPageRenderer(evrs)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)<commit_msg>Use the Titanic example data set<commit_after>
|
import unittest
import json
import great_expectations as ge
from great_expectations import render
class TestPageRenderers(unittest.TestCase):
def test_import(self):
from great_expectations import render
def test_prescriptive_expectation_renderer(self):
results = render.render(
renderer_class=render.PrescriptiveExpectationPageRenderer,
expectations=json.load(open('tests/test_fixtures/rendering_fixtures/expectation_suite_3.json'))["expectations"],
)
assert results != None
# with open('./test.html', 'w') as f:
# f.write(results)
def test_descriptive_evr_renderer(self):
R = render.DescriptiveEvrPageRenderer(
json.load(open('tests/test_fixtures/rendering_fixtures/evr_suite_3.json'))["results"],
)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
def test_full_oobe_flow(sefl):
df = ge.read_csv("examples/data/Titanic.csv")
df.autoinspect(ge.dataset.autoinspect.pseudo_pandas_profiling)
evrs = df.validate()["results"]
R = render.DescriptiveEvrPageRenderer(evrs)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
|
import unittest
import json
import great_expectations as ge
from great_expectations import render
class TestPageRenderers(unittest.TestCase):
def test_import(self):
from great_expectations import render
def test_prescriptive_expectation_renderer(self):
results = render.render(
renderer_class=render.PrescriptiveExpectationPageRenderer,
expectations=json.load(open('tests/test_fixtures/rendering_fixtures/expectation_suite_3.json'))["expectations"],
)
assert results != None
# with open('./test.html', 'w') as f:
# f.write(results)
def test_descriptive_evr_renderer(self):
R = render.DescriptiveEvrPageRenderer(
json.load(open('tests/test_fixtures/rendering_fixtures/evr_suite_3.json'))["results"],
)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
def test_full_oobe_flow(sefl):
df = ge.read_csv("/Users/abe/Documents/superconductive/data/Sacramentorealestatetransactions.csv")
df.autoinspect(ge.dataset.autoinspect.pseudo_pandas_profiling)
evrs = df.validate()["results"]
R = render.DescriptiveEvrPageRenderer(evrs)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)Use the Titanic example data setimport unittest
import json
import great_expectations as ge
from great_expectations import render
class TestPageRenderers(unittest.TestCase):
def test_import(self):
from great_expectations import render
def test_prescriptive_expectation_renderer(self):
results = render.render(
renderer_class=render.PrescriptiveExpectationPageRenderer,
expectations=json.load(open('tests/test_fixtures/rendering_fixtures/expectation_suite_3.json'))["expectations"],
)
assert results != None
# with open('./test.html', 'w') as f:
# f.write(results)
def test_descriptive_evr_renderer(self):
R = render.DescriptiveEvrPageRenderer(
json.load(open('tests/test_fixtures/rendering_fixtures/evr_suite_3.json'))["results"],
)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
def test_full_oobe_flow(sefl):
df = ge.read_csv("examples/data/Titanic.csv")
df.autoinspect(ge.dataset.autoinspect.pseudo_pandas_profiling)
evrs = df.validate()["results"]
R = render.DescriptiveEvrPageRenderer(evrs)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
|
<commit_before>import unittest
import json
import great_expectations as ge
from great_expectations import render
class TestPageRenderers(unittest.TestCase):
def test_import(self):
from great_expectations import render
def test_prescriptive_expectation_renderer(self):
results = render.render(
renderer_class=render.PrescriptiveExpectationPageRenderer,
expectations=json.load(open('tests/test_fixtures/rendering_fixtures/expectation_suite_3.json'))["expectations"],
)
assert results != None
# with open('./test.html', 'w') as f:
# f.write(results)
def test_descriptive_evr_renderer(self):
R = render.DescriptiveEvrPageRenderer(
json.load(open('tests/test_fixtures/rendering_fixtures/evr_suite_3.json'))["results"],
)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
def test_full_oobe_flow(sefl):
df = ge.read_csv("/Users/abe/Documents/superconductive/data/Sacramentorealestatetransactions.csv")
df.autoinspect(ge.dataset.autoinspect.pseudo_pandas_profiling)
evrs = df.validate()["results"]
R = render.DescriptiveEvrPageRenderer(evrs)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)<commit_msg>Use the Titanic example data set<commit_after>import unittest
import json
import great_expectations as ge
from great_expectations import render
class TestPageRenderers(unittest.TestCase):
def test_import(self):
from great_expectations import render
def test_prescriptive_expectation_renderer(self):
results = render.render(
renderer_class=render.PrescriptiveExpectationPageRenderer,
expectations=json.load(open('tests/test_fixtures/rendering_fixtures/expectation_suite_3.json'))["expectations"],
)
assert results != None
# with open('./test.html', 'w') as f:
# f.write(results)
def test_descriptive_evr_renderer(self):
R = render.DescriptiveEvrPageRenderer(
json.load(open('tests/test_fixtures/rendering_fixtures/evr_suite_3.json'))["results"],
)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
def test_full_oobe_flow(sefl):
df = ge.read_csv("examples/data/Titanic.csv")
df.autoinspect(ge.dataset.autoinspect.pseudo_pandas_profiling)
evrs = df.validate()["results"]
R = render.DescriptiveEvrPageRenderer(evrs)
rendered_page = R.render()
assert rendered_page != None
# with open('./test.html', 'w') as f:
# f.write(rendered_page)
|
a0b813a08b0ea7fd52a9a87fa41e309ce21bdb64
|
alg_valid_parentheses.py
|
alg_valid_parentheses.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def valid_parentheses(s):
"""Balance parentheses in a string."""
open_close_d = {
'(': ')',
'[': ']',
'{': '}'
}
# Use stack to collect open parentheses.
stack = []
for c in s:
if c in '([{':
# If c is open parenthesis, push to stack.
stack.append(c)
continue
elif c in ')]}':
# Check if there is still open parenthesis.
if not stack:
return False
# If yes, compare open parenthesis and current char.
open_c = stack.pop()
if c != open_close_d[open_c]:
return False
# Finally check if there is open remaining.
if not stack:
return True
else:
return False
def main():
s = '(abcd)' # Ans: True.
print(valid_parentheses(s))
s = '([(a)bcd]{ef}g)' # Ans: True.
print(valid_parentheses(s))
s = '(ab{c}d]' # Ans: False.
print(valid_parentheses(s))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def valid_parentheses(s):
"""Balance parentheses in a string."""
open_close_d = {
'(': ')',
'[': ']',
'{': '}'
}
# Use stack to collect open parentheses.
stack = []
for c in s:
if c in '([{':
# If c is open parenthesis, push to stack.
stack.append(c)
continue
if c in ')]}':
# Check if there is still open parenthesis.
if not stack:
return False
# If yes, compare open parenthesis and current char.
open_c = stack.pop()
if c != open_close_d[open_c]:
return False
# Finally check if there is open remaining.
if not stack:
return True
else:
return False
def main():
s = '(abcd)' # Ans: True.
print(valid_parentheses(s))
s = '([(a)bcd]{ef}g)' # Ans: True.
print(valid_parentheses(s))
s = '(ab{c}d]' # Ans: False.
print(valid_parentheses(s))
if __name__ == '__main__':
main()
|
Revise elif to if due to continue
|
Revise elif to if due to continue
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def valid_parentheses(s):
"""Balance parentheses in a string."""
open_close_d = {
'(': ')',
'[': ']',
'{': '}'
}
# Use stack to collect open parentheses.
stack = []
for c in s:
if c in '([{':
# If c is open parenthesis, push to stack.
stack.append(c)
continue
elif c in ')]}':
# Check if there is still open parenthesis.
if not stack:
return False
# If yes, compare open parenthesis and current char.
open_c = stack.pop()
if c != open_close_d[open_c]:
return False
# Finally check if there is open remaining.
if not stack:
return True
else:
return False
def main():
s = '(abcd)' # Ans: True.
print(valid_parentheses(s))
s = '([(a)bcd]{ef}g)' # Ans: True.
print(valid_parentheses(s))
s = '(ab{c}d]' # Ans: False.
print(valid_parentheses(s))
if __name__ == '__main__':
main()
Revise elif to if due to continue
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def valid_parentheses(s):
"""Balance parentheses in a string."""
open_close_d = {
'(': ')',
'[': ']',
'{': '}'
}
# Use stack to collect open parentheses.
stack = []
for c in s:
if c in '([{':
# If c is open parenthesis, push to stack.
stack.append(c)
continue
if c in ')]}':
# Check if there is still open parenthesis.
if not stack:
return False
# If yes, compare open parenthesis and current char.
open_c = stack.pop()
if c != open_close_d[open_c]:
return False
# Finally check if there is open remaining.
if not stack:
return True
else:
return False
def main():
s = '(abcd)' # Ans: True.
print(valid_parentheses(s))
s = '([(a)bcd]{ef}g)' # Ans: True.
print(valid_parentheses(s))
s = '(ab{c}d]' # Ans: False.
print(valid_parentheses(s))
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def valid_parentheses(s):
"""Balance parentheses in a string."""
open_close_d = {
'(': ')',
'[': ']',
'{': '}'
}
# Use stack to collect open parentheses.
stack = []
for c in s:
if c in '([{':
# If c is open parenthesis, push to stack.
stack.append(c)
continue
elif c in ')]}':
# Check if there is still open parenthesis.
if not stack:
return False
# If yes, compare open parenthesis and current char.
open_c = stack.pop()
if c != open_close_d[open_c]:
return False
# Finally check if there is open remaining.
if not stack:
return True
else:
return False
def main():
s = '(abcd)' # Ans: True.
print(valid_parentheses(s))
s = '([(a)bcd]{ef}g)' # Ans: True.
print(valid_parentheses(s))
s = '(ab{c}d]' # Ans: False.
print(valid_parentheses(s))
if __name__ == '__main__':
main()
<commit_msg>Revise elif to if due to continue<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def valid_parentheses(s):
"""Balance parentheses in a string."""
open_close_d = {
'(': ')',
'[': ']',
'{': '}'
}
# Use stack to collect open parentheses.
stack = []
for c in s:
if c in '([{':
# If c is open parenthesis, push to stack.
stack.append(c)
continue
if c in ')]}':
# Check if there is still open parenthesis.
if not stack:
return False
# If yes, compare open parenthesis and current char.
open_c = stack.pop()
if c != open_close_d[open_c]:
return False
# Finally check if there is open remaining.
if not stack:
return True
else:
return False
def main():
s = '(abcd)' # Ans: True.
print(valid_parentheses(s))
s = '([(a)bcd]{ef}g)' # Ans: True.
print(valid_parentheses(s))
s = '(ab{c}d]' # Ans: False.
print(valid_parentheses(s))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def valid_parentheses(s):
"""Balance parentheses in a string."""
open_close_d = {
'(': ')',
'[': ']',
'{': '}'
}
# Use stack to collect open parentheses.
stack = []
for c in s:
if c in '([{':
# If c is open parenthesis, push to stack.
stack.append(c)
continue
elif c in ')]}':
# Check if there is still open parenthesis.
if not stack:
return False
# If yes, compare open parenthesis and current char.
open_c = stack.pop()
if c != open_close_d[open_c]:
return False
# Finally check if there is open remaining.
if not stack:
return True
else:
return False
def main():
s = '(abcd)' # Ans: True.
print(valid_parentheses(s))
s = '([(a)bcd]{ef}g)' # Ans: True.
print(valid_parentheses(s))
s = '(ab{c}d]' # Ans: False.
print(valid_parentheses(s))
if __name__ == '__main__':
main()
Revise elif to if due to continuefrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def valid_parentheses(s):
"""Balance parentheses in a string."""
open_close_d = {
'(': ')',
'[': ']',
'{': '}'
}
# Use stack to collect open parentheses.
stack = []
for c in s:
if c in '([{':
# If c is open parenthesis, push to stack.
stack.append(c)
continue
if c in ')]}':
# Check if there is still open parenthesis.
if not stack:
return False
# If yes, compare open parenthesis and current char.
open_c = stack.pop()
if c != open_close_d[open_c]:
return False
# Finally check if there is open remaining.
if not stack:
return True
else:
return False
def main():
s = '(abcd)' # Ans: True.
print(valid_parentheses(s))
s = '([(a)bcd]{ef}g)' # Ans: True.
print(valid_parentheses(s))
s = '(ab{c}d]' # Ans: False.
print(valid_parentheses(s))
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def valid_parentheses(s):
"""Balance parentheses in a string."""
open_close_d = {
'(': ')',
'[': ']',
'{': '}'
}
# Use stack to collect open parentheses.
stack = []
for c in s:
if c in '([{':
# If c is open parenthesis, push to stack.
stack.append(c)
continue
elif c in ')]}':
# Check if there is still open parenthesis.
if not stack:
return False
# If yes, compare open parenthesis and current char.
open_c = stack.pop()
if c != open_close_d[open_c]:
return False
# Finally check if there is open remaining.
if not stack:
return True
else:
return False
def main():
s = '(abcd)' # Ans: True.
print(valid_parentheses(s))
s = '([(a)bcd]{ef}g)' # Ans: True.
print(valid_parentheses(s))
s = '(ab{c}d]' # Ans: False.
print(valid_parentheses(s))
if __name__ == '__main__':
main()
<commit_msg>Revise elif to if due to continue<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def valid_parentheses(s):
"""Balance parentheses in a string."""
open_close_d = {
'(': ')',
'[': ']',
'{': '}'
}
# Use stack to collect open parentheses.
stack = []
for c in s:
if c in '([{':
# If c is open parenthesis, push to stack.
stack.append(c)
continue
if c in ')]}':
# Check if there is still open parenthesis.
if not stack:
return False
# If yes, compare open parenthesis and current char.
open_c = stack.pop()
if c != open_close_d[open_c]:
return False
# Finally check if there is open remaining.
if not stack:
return True
else:
return False
def main():
s = '(abcd)' # Ans: True.
print(valid_parentheses(s))
s = '([(a)bcd]{ef}g)' # Ans: True.
print(valid_parentheses(s))
s = '(ab{c}d]' # Ans: False.
print(valid_parentheses(s))
if __name__ == '__main__':
main()
|
56d7349a52f3a7928d3d67c89a086b54b2a3701d
|
elmo/moon_tracker/models.py
|
elmo/moon_tracker/models.py
|
from django.db import models
from django.conf import settings
from django.forms import Select
from eve_sde.models import Moon
# Create your models here.
class ScanResult(models.Model):
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='scans',
db_index=True
)
moon = models.ForeignKey(
Moon,
related_name='scans',
db_index=True
)
ORE_CHOICES = (
('Standard Ores', (
(1, 'PH1'),
(2, 'PH2'),
)),
('Moon Ores', (
(3, 'PH3'),
(4, 'PH4'),
)),
)
class ScanResultOre(models.Model):
scan = models.ForeignKey(
ScanResult,
related_name='constituents',
db_index=True
)
ore = models.IntegerField(choices=ORE_CHOICES)
percentage = models.PositiveSmallIntegerField()
class Meta:
default_permissions = ()
|
from django.db import models
from django.conf import settings
from django.forms import Select
from eve_sde.models import Moon
# Create your models here.
class ScanResult(models.Model):
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='scans',
db_index=True
)
moon = models.ForeignKey(
Moon,
related_name='scans',
db_index=True
)
ORE_CHOICES = (
('Standard Ores', (
(18, 'Plagioclase'),
(19, 'Spodumain'),
(20, 'Kernite'),
(21, 'Hedbergite'),
(22, 'Arkonor'),
(1223, 'Bistot'),
(1224, 'Pyroxeres'),
(1225, 'Crokite'),
(1226, 'Jaspet'),
(1227, 'Omber'),
(1228, 'Scordite'),
(1229, 'Gneiss'),
(1230, 'Veldspar'),
(1231, 'Hemorphite'),
(1232, 'Dark Ochre'),
(11396, 'Mercoxit'),
)),
('Moon Ores', (
(3, 'PH3'),
(4, 'PH4'),
)),
)
class ScanResultOre(models.Model):
scan = models.ForeignKey(
ScanResult,
related_name='constituents',
db_index=True
)
ore = models.IntegerField(choices=ORE_CHOICES)
percentage = models.PositiveSmallIntegerField()
class Meta:
default_permissions = ()
|
Add all standard ore types with ID.
|
Add all standard ore types with ID.
|
Python
|
mit
|
StephenSwat/eve_lunar_mining_organiser,StephenSwat/eve_lunar_mining_organiser
|
from django.db import models
from django.conf import settings
from django.forms import Select
from eve_sde.models import Moon
# Create your models here.
class ScanResult(models.Model):
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='scans',
db_index=True
)
moon = models.ForeignKey(
Moon,
related_name='scans',
db_index=True
)
ORE_CHOICES = (
('Standard Ores', (
(1, 'PH1'),
(2, 'PH2'),
)),
('Moon Ores', (
(3, 'PH3'),
(4, 'PH4'),
)),
)
class ScanResultOre(models.Model):
scan = models.ForeignKey(
ScanResult,
related_name='constituents',
db_index=True
)
ore = models.IntegerField(choices=ORE_CHOICES)
percentage = models.PositiveSmallIntegerField()
class Meta:
default_permissions = ()
Add all standard ore types with ID.
|
from django.db import models
from django.conf import settings
from django.forms import Select
from eve_sde.models import Moon
# Create your models here.
class ScanResult(models.Model):
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='scans',
db_index=True
)
moon = models.ForeignKey(
Moon,
related_name='scans',
db_index=True
)
ORE_CHOICES = (
('Standard Ores', (
(18, 'Plagioclase'),
(19, 'Spodumain'),
(20, 'Kernite'),
(21, 'Hedbergite'),
(22, 'Arkonor'),
(1223, 'Bistot'),
(1224, 'Pyroxeres'),
(1225, 'Crokite'),
(1226, 'Jaspet'),
(1227, 'Omber'),
(1228, 'Scordite'),
(1229, 'Gneiss'),
(1230, 'Veldspar'),
(1231, 'Hemorphite'),
(1232, 'Dark Ochre'),
(11396, 'Mercoxit'),
)),
('Moon Ores', (
(3, 'PH3'),
(4, 'PH4'),
)),
)
class ScanResultOre(models.Model):
scan = models.ForeignKey(
ScanResult,
related_name='constituents',
db_index=True
)
ore = models.IntegerField(choices=ORE_CHOICES)
percentage = models.PositiveSmallIntegerField()
class Meta:
default_permissions = ()
|
<commit_before>from django.db import models
from django.conf import settings
from django.forms import Select
from eve_sde.models import Moon
# Create your models here.
class ScanResult(models.Model):
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='scans',
db_index=True
)
moon = models.ForeignKey(
Moon,
related_name='scans',
db_index=True
)
ORE_CHOICES = (
('Standard Ores', (
(1, 'PH1'),
(2, 'PH2'),
)),
('Moon Ores', (
(3, 'PH3'),
(4, 'PH4'),
)),
)
class ScanResultOre(models.Model):
scan = models.ForeignKey(
ScanResult,
related_name='constituents',
db_index=True
)
ore = models.IntegerField(choices=ORE_CHOICES)
percentage = models.PositiveSmallIntegerField()
class Meta:
default_permissions = ()
<commit_msg>Add all standard ore types with ID.<commit_after>
|
from django.db import models
from django.conf import settings
from django.forms import Select
from eve_sde.models import Moon
# Create your models here.
class ScanResult(models.Model):
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='scans',
db_index=True
)
moon = models.ForeignKey(
Moon,
related_name='scans',
db_index=True
)
ORE_CHOICES = (
('Standard Ores', (
(18, 'Plagioclase'),
(19, 'Spodumain'),
(20, 'Kernite'),
(21, 'Hedbergite'),
(22, 'Arkonor'),
(1223, 'Bistot'),
(1224, 'Pyroxeres'),
(1225, 'Crokite'),
(1226, 'Jaspet'),
(1227, 'Omber'),
(1228, 'Scordite'),
(1229, 'Gneiss'),
(1230, 'Veldspar'),
(1231, 'Hemorphite'),
(1232, 'Dark Ochre'),
(11396, 'Mercoxit'),
)),
('Moon Ores', (
(3, 'PH3'),
(4, 'PH4'),
)),
)
class ScanResultOre(models.Model):
scan = models.ForeignKey(
ScanResult,
related_name='constituents',
db_index=True
)
ore = models.IntegerField(choices=ORE_CHOICES)
percentage = models.PositiveSmallIntegerField()
class Meta:
default_permissions = ()
|
from django.db import models
from django.conf import settings
from django.forms import Select
from eve_sde.models import Moon
# Create your models here.
class ScanResult(models.Model):
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='scans',
db_index=True
)
moon = models.ForeignKey(
Moon,
related_name='scans',
db_index=True
)
ORE_CHOICES = (
('Standard Ores', (
(1, 'PH1'),
(2, 'PH2'),
)),
('Moon Ores', (
(3, 'PH3'),
(4, 'PH4'),
)),
)
class ScanResultOre(models.Model):
scan = models.ForeignKey(
ScanResult,
related_name='constituents',
db_index=True
)
ore = models.IntegerField(choices=ORE_CHOICES)
percentage = models.PositiveSmallIntegerField()
class Meta:
default_permissions = ()
Add all standard ore types with ID.from django.db import models
from django.conf import settings
from django.forms import Select
from eve_sde.models import Moon
# Create your models here.
class ScanResult(models.Model):
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='scans',
db_index=True
)
moon = models.ForeignKey(
Moon,
related_name='scans',
db_index=True
)
ORE_CHOICES = (
('Standard Ores', (
(18, 'Plagioclase'),
(19, 'Spodumain'),
(20, 'Kernite'),
(21, 'Hedbergite'),
(22, 'Arkonor'),
(1223, 'Bistot'),
(1224, 'Pyroxeres'),
(1225, 'Crokite'),
(1226, 'Jaspet'),
(1227, 'Omber'),
(1228, 'Scordite'),
(1229, 'Gneiss'),
(1230, 'Veldspar'),
(1231, 'Hemorphite'),
(1232, 'Dark Ochre'),
(11396, 'Mercoxit'),
)),
('Moon Ores', (
(3, 'PH3'),
(4, 'PH4'),
)),
)
class ScanResultOre(models.Model):
scan = models.ForeignKey(
ScanResult,
related_name='constituents',
db_index=True
)
ore = models.IntegerField(choices=ORE_CHOICES)
percentage = models.PositiveSmallIntegerField()
class Meta:
default_permissions = ()
|
<commit_before>from django.db import models
from django.conf import settings
from django.forms import Select
from eve_sde.models import Moon
# Create your models here.
class ScanResult(models.Model):
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='scans',
db_index=True
)
moon = models.ForeignKey(
Moon,
related_name='scans',
db_index=True
)
ORE_CHOICES = (
('Standard Ores', (
(1, 'PH1'),
(2, 'PH2'),
)),
('Moon Ores', (
(3, 'PH3'),
(4, 'PH4'),
)),
)
class ScanResultOre(models.Model):
scan = models.ForeignKey(
ScanResult,
related_name='constituents',
db_index=True
)
ore = models.IntegerField(choices=ORE_CHOICES)
percentage = models.PositiveSmallIntegerField()
class Meta:
default_permissions = ()
<commit_msg>Add all standard ore types with ID.<commit_after>from django.db import models
from django.conf import settings
from django.forms import Select
from eve_sde.models import Moon
# Create your models here.
class ScanResult(models.Model):
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='scans',
db_index=True
)
moon = models.ForeignKey(
Moon,
related_name='scans',
db_index=True
)
ORE_CHOICES = (
('Standard Ores', (
(18, 'Plagioclase'),
(19, 'Spodumain'),
(20, 'Kernite'),
(21, 'Hedbergite'),
(22, 'Arkonor'),
(1223, 'Bistot'),
(1224, 'Pyroxeres'),
(1225, 'Crokite'),
(1226, 'Jaspet'),
(1227, 'Omber'),
(1228, 'Scordite'),
(1229, 'Gneiss'),
(1230, 'Veldspar'),
(1231, 'Hemorphite'),
(1232, 'Dark Ochre'),
(11396, 'Mercoxit'),
)),
('Moon Ores', (
(3, 'PH3'),
(4, 'PH4'),
)),
)
class ScanResultOre(models.Model):
scan = models.ForeignKey(
ScanResult,
related_name='constituents',
db_index=True
)
ore = models.IntegerField(choices=ORE_CHOICES)
percentage = models.PositiveSmallIntegerField()
class Meta:
default_permissions = ()
|
03ee406800fb59ff3e7565397107fa9aad0d54d0
|
website/notifications/listeners.py
|
website/notifications/listeners.py
|
import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
|
import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.institution_id or node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
|
Revert "Remove incorrect check for institution_id"
|
Revert "Remove incorrect check for institution_id"
This reverts commit 617df13670573b858b6c23249f4287786807d8b6.
|
Python
|
apache-2.0
|
hmoco/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,cslzchen/osf.io,Nesiehr/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,chennan47/osf.io,crcresearch/osf.io,Nesiehr/osf.io,felliott/osf.io,Johnetordoff/osf.io,acshi/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,binoculars/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,sloria/osf.io,TomBaxter/osf.io,caneruguz/osf.io,adlius/osf.io,hmoco/osf.io,caneruguz/osf.io,chennan47/osf.io,chrisseto/osf.io,mattclark/osf.io,saradbowman/osf.io,aaxelb/osf.io,mfraezz/osf.io,felliott/osf.io,monikagrabowska/osf.io,sloria/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,erinspace/osf.io,erinspace/osf.io,leb2dg/osf.io,adlius/osf.io,pattisdr/osf.io,laurenrevere/osf.io,caneruguz/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,adlius/osf.io,adlius/osf.io,cwisecarver/osf.io,leb2dg/osf.io,baylee-d/osf.io,mfraezz/osf.io,caseyrollins/osf.io,mattclark/osf.io,icereval/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,leb2dg/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,felliott/osf.io,erinspace/osf.io,laurenrevere/osf.io,leb2dg/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,chennan47/osf.io,hmoco/osf.io,acshi/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,pattisdr/osf.io,icereval/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,aaxelb/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,acshi/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,chrisseto/osf.io,baylee-d/osf.io,icereval/osf.io,binoculars/osf.io,felliott/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,hmoco/osf.io,sloria/osf.io,saradbowman/osf.io,acshi/osf.io,caneruguz/osf.io,acshi/osf.io,chrisseto/osf.io,cwisecarver/osf.io
|
import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
Revert "Remove incorrect check for institution_id"
This reverts commit 617df13670573b858b6c23249f4287786807d8b6.
|
import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.institution_id or node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
|
<commit_before>import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
<commit_msg>Revert "Remove incorrect check for institution_id"
This reverts commit 617df13670573b858b6c23249f4287786807d8b6.<commit_after>
|
import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.institution_id or node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
|
import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
Revert "Remove incorrect check for institution_id"
This reverts commit 617df13670573b858b6c23249f4287786807d8b6.import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.institution_id or node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
|
<commit_before>import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
<commit_msg>Revert "Remove incorrect check for institution_id"
This reverts commit 617df13670573b858b6c23249f4287786807d8b6.<commit_after>import logging
from website.notifications.exceptions import InvalidSubscriptionError
from website.notifications.utils import subscribe_user_to_notifications, subscribe_user_to_global_notifications
from website.project.signals import contributor_added, project_created
from framework.auth.signals import user_confirmed
logger = logging.getLogger(__name__)
@project_created.connect
def subscribe_creator(node):
if node.institution_id or node.is_collection or node.is_deleted:
return None
try:
subscribe_user_to_notifications(node, node.creator)
except InvalidSubscriptionError as err:
user = node.creator._id if node.creator else 'None'
logger.warn('Skipping subscription of user {} to node {}'.format(user, node._id))
logger.warn('Reason: {}'.format(str(err)))
@contributor_added.connect
def subscribe_contributor(node, contributor, auth=None, *args, **kwargs):
try:
subscribe_user_to_notifications(node, contributor)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to node {}'.format(contributor, node._id))
logger.warn('Reason: {}'.format(str(err)))
@user_confirmed.connect
def subscribe_confirmed_user(user):
try:
subscribe_user_to_global_notifications(user)
except InvalidSubscriptionError as err:
logger.warn('Skipping subscription of user {} to global subscriptions'.format(user))
logger.warn('Reason: {}'.format(str(err)))
|
3318165728145a97a1b9b87ac212945d087c1e14
|
manifests/bin/db-add.py
|
manifests/bin/db-add.py
|
#!/usr/bin/env python
import pymysql
import os
import syslog
host = os.environ['DB_HOST']
user = os.environ['DB_USER']
password = os.environ['DB_PASSWORD']
db = os.environ['DB_DATABASE']
syslog.openlog(facility=syslog.LOG_USER)
syslog.syslog("Inserting new business data into database.")
connection = pymysql.connect(host=host,
user=user,
password=password,
db=db,
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "CALL insert_data({0})".format(1)
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
|
#!/usr/bin/env python
import pymysql
import os
import syslog
host = os.environ['DB_HOST']
user = os.environ['DB_USER']
password = os.environ['DB_PASSWORD']
db = os.environ['DB_DATABASE']
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_USER)
syslog.syslog("Inserting new business data into database.")
connection = pymysql.connect(host=host,
user=user,
password=password,
db=db,
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "CALL insert_data({0})".format(1)
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
|
Set flag to log process id in syslog
|
Set flag to log process id in syslog
|
Python
|
apache-2.0
|
boundary/tsi-lab,boundary/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab,boundary/tsi-lab,jdgwartney/tsi-lab,boundary/tsi-lab
|
#!/usr/bin/env python
import pymysql
import os
import syslog
host = os.environ['DB_HOST']
user = os.environ['DB_USER']
password = os.environ['DB_PASSWORD']
db = os.environ['DB_DATABASE']
syslog.openlog(facility=syslog.LOG_USER)
syslog.syslog("Inserting new business data into database.")
connection = pymysql.connect(host=host,
user=user,
password=password,
db=db,
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "CALL insert_data({0})".format(1)
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
Set flag to log process id in syslog
|
#!/usr/bin/env python
import pymysql
import os
import syslog
host = os.environ['DB_HOST']
user = os.environ['DB_USER']
password = os.environ['DB_PASSWORD']
db = os.environ['DB_DATABASE']
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_USER)
syslog.syslog("Inserting new business data into database.")
connection = pymysql.connect(host=host,
user=user,
password=password,
db=db,
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "CALL insert_data({0})".format(1)
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
|
<commit_before>#!/usr/bin/env python
import pymysql
import os
import syslog
host = os.environ['DB_HOST']
user = os.environ['DB_USER']
password = os.environ['DB_PASSWORD']
db = os.environ['DB_DATABASE']
syslog.openlog(facility=syslog.LOG_USER)
syslog.syslog("Inserting new business data into database.")
connection = pymysql.connect(host=host,
user=user,
password=password,
db=db,
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "CALL insert_data({0})".format(1)
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
<commit_msg>Set flag to log process id in syslog<commit_after>
|
#!/usr/bin/env python
import pymysql
import os
import syslog
host = os.environ['DB_HOST']
user = os.environ['DB_USER']
password = os.environ['DB_PASSWORD']
db = os.environ['DB_DATABASE']
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_USER)
syslog.syslog("Inserting new business data into database.")
connection = pymysql.connect(host=host,
user=user,
password=password,
db=db,
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "CALL insert_data({0})".format(1)
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
|
#!/usr/bin/env python
import pymysql
import os
import syslog
host = os.environ['DB_HOST']
user = os.environ['DB_USER']
password = os.environ['DB_PASSWORD']
db = os.environ['DB_DATABASE']
syslog.openlog(facility=syslog.LOG_USER)
syslog.syslog("Inserting new business data into database.")
connection = pymysql.connect(host=host,
user=user,
password=password,
db=db,
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "CALL insert_data({0})".format(1)
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
Set flag to log process id in syslog#!/usr/bin/env python
import pymysql
import os
import syslog
host = os.environ['DB_HOST']
user = os.environ['DB_USER']
password = os.environ['DB_PASSWORD']
db = os.environ['DB_DATABASE']
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_USER)
syslog.syslog("Inserting new business data into database.")
connection = pymysql.connect(host=host,
user=user,
password=password,
db=db,
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "CALL insert_data({0})".format(1)
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
|
<commit_before>#!/usr/bin/env python
import pymysql
import os
import syslog
host = os.environ['DB_HOST']
user = os.environ['DB_USER']
password = os.environ['DB_PASSWORD']
db = os.environ['DB_DATABASE']
syslog.openlog(facility=syslog.LOG_USER)
syslog.syslog("Inserting new business data into database.")
connection = pymysql.connect(host=host,
user=user,
password=password,
db=db,
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "CALL insert_data({0})".format(1)
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
<commit_msg>Set flag to log process id in syslog<commit_after>#!/usr/bin/env python
import pymysql
import os
import syslog
host = os.environ['DB_HOST']
user = os.environ['DB_USER']
password = os.environ['DB_PASSWORD']
db = os.environ['DB_DATABASE']
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_USER)
syslog.syslog("Inserting new business data into database.")
connection = pymysql.connect(host=host,
user=user,
password=password,
db=db,
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "CALL insert_data({0})".format(1)
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
|
6f90c6543224155be1234de199c8b3c9775b72f3
|
zephyr/projects/brya/brya/BUILD.py
|
zephyr/projects/brya/brya/BUILD.py
|
# Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
register_npcx_project(
project_name="brya",
zephyr_board="brya",
dts_overlays=[
"battery.dts",
"bb_retimer.dts",
"cbi_eeprom.dts",
"fan.dts",
"gpio.dts",
"interrupts.dts",
"keyboard.dts",
"motionsense.dts",
"pwm_leds.dts",
"usbc.dts",
],
)
|
# Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
brya = register_npcx_project(
project_name="brya",
zephyr_board="brya",
dts_overlays=[
"battery.dts",
"bb_retimer.dts",
"cbi_eeprom.dts",
"fan.dts",
"gpio.dts",
"interrupts.dts",
"keyboard.dts",
"motionsense.dts",
"pwm_leds.dts",
"usbc.dts",
],
)
ghost = brya.variant(project_name="ghost")
|
Add "ghost" variant of brya
|
zephyr: Add "ghost" variant of brya
Right now using brya hardware to develop ghost EC. Initially it can
be a simple rename of brya. Later we will change the charger chip,
which will require a couple of DTS customizations. Eventually, this
project will need to move to not be a brya variant at some point.
BUG=b:222738712
BRANCH=none
TEST=builds (don't have hardware for testing yet)
Signed-off-by: Jack Rosenthal <d3f605bef1867f59845d4ce6e4f83b8dc9e4e0ae@chromium.org>
Change-Id: I3c713e12bd0685d08ed31c40bd103c2685416d78
Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3530958
Reviewed-by: Jeremy Bettis <4df7b5147fee087dca33c181f288ee7dbf56e022@chromium.org>
|
Python
|
bsd-3-clause
|
coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec
|
# Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
register_npcx_project(
project_name="brya",
zephyr_board="brya",
dts_overlays=[
"battery.dts",
"bb_retimer.dts",
"cbi_eeprom.dts",
"fan.dts",
"gpio.dts",
"interrupts.dts",
"keyboard.dts",
"motionsense.dts",
"pwm_leds.dts",
"usbc.dts",
],
)
zephyr: Add "ghost" variant of brya
Right now using brya hardware to develop ghost EC. Initially it can
be a simple rename of brya. Later we will change the charger chip,
which will require a couple of DTS customizations. Eventually, this
project will need to move to not be a brya variant at some point.
BUG=b:222738712
BRANCH=none
TEST=builds (don't have hardware for testing yet)
Signed-off-by: Jack Rosenthal <d3f605bef1867f59845d4ce6e4f83b8dc9e4e0ae@chromium.org>
Change-Id: I3c713e12bd0685d08ed31c40bd103c2685416d78
Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3530958
Reviewed-by: Jeremy Bettis <4df7b5147fee087dca33c181f288ee7dbf56e022@chromium.org>
|
# Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
brya = register_npcx_project(
project_name="brya",
zephyr_board="brya",
dts_overlays=[
"battery.dts",
"bb_retimer.dts",
"cbi_eeprom.dts",
"fan.dts",
"gpio.dts",
"interrupts.dts",
"keyboard.dts",
"motionsense.dts",
"pwm_leds.dts",
"usbc.dts",
],
)
ghost = brya.variant(project_name="ghost")
|
<commit_before># Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
register_npcx_project(
project_name="brya",
zephyr_board="brya",
dts_overlays=[
"battery.dts",
"bb_retimer.dts",
"cbi_eeprom.dts",
"fan.dts",
"gpio.dts",
"interrupts.dts",
"keyboard.dts",
"motionsense.dts",
"pwm_leds.dts",
"usbc.dts",
],
)
<commit_msg>zephyr: Add "ghost" variant of brya
Right now using brya hardware to develop ghost EC. Initially it can
be a simple rename of brya. Later we will change the charger chip,
which will require a couple of DTS customizations. Eventually, this
project will need to move to not be a brya variant at some point.
BUG=b:222738712
BRANCH=none
TEST=builds (don't have hardware for testing yet)
Signed-off-by: Jack Rosenthal <d3f605bef1867f59845d4ce6e4f83b8dc9e4e0ae@chromium.org>
Change-Id: I3c713e12bd0685d08ed31c40bd103c2685416d78
Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3530958
Reviewed-by: Jeremy Bettis <4df7b5147fee087dca33c181f288ee7dbf56e022@chromium.org><commit_after>
|
# Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
brya = register_npcx_project(
project_name="brya",
zephyr_board="brya",
dts_overlays=[
"battery.dts",
"bb_retimer.dts",
"cbi_eeprom.dts",
"fan.dts",
"gpio.dts",
"interrupts.dts",
"keyboard.dts",
"motionsense.dts",
"pwm_leds.dts",
"usbc.dts",
],
)
ghost = brya.variant(project_name="ghost")
|
# Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
register_npcx_project(
project_name="brya",
zephyr_board="brya",
dts_overlays=[
"battery.dts",
"bb_retimer.dts",
"cbi_eeprom.dts",
"fan.dts",
"gpio.dts",
"interrupts.dts",
"keyboard.dts",
"motionsense.dts",
"pwm_leds.dts",
"usbc.dts",
],
)
zephyr: Add "ghost" variant of brya
Right now using brya hardware to develop ghost EC. Initially it can
be a simple rename of brya. Later we will change the charger chip,
which will require a couple of DTS customizations. Eventually, this
project will need to move to not be a brya variant at some point.
BUG=b:222738712
BRANCH=none
TEST=builds (don't have hardware for testing yet)
Signed-off-by: Jack Rosenthal <d3f605bef1867f59845d4ce6e4f83b8dc9e4e0ae@chromium.org>
Change-Id: I3c713e12bd0685d08ed31c40bd103c2685416d78
Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3530958
Reviewed-by: Jeremy Bettis <4df7b5147fee087dca33c181f288ee7dbf56e022@chromium.org># Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
brya = register_npcx_project(
project_name="brya",
zephyr_board="brya",
dts_overlays=[
"battery.dts",
"bb_retimer.dts",
"cbi_eeprom.dts",
"fan.dts",
"gpio.dts",
"interrupts.dts",
"keyboard.dts",
"motionsense.dts",
"pwm_leds.dts",
"usbc.dts",
],
)
ghost = brya.variant(project_name="ghost")
|
<commit_before># Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
register_npcx_project(
project_name="brya",
zephyr_board="brya",
dts_overlays=[
"battery.dts",
"bb_retimer.dts",
"cbi_eeprom.dts",
"fan.dts",
"gpio.dts",
"interrupts.dts",
"keyboard.dts",
"motionsense.dts",
"pwm_leds.dts",
"usbc.dts",
],
)
<commit_msg>zephyr: Add "ghost" variant of brya
Right now using brya hardware to develop ghost EC. Initially it can
be a simple rename of brya. Later we will change the charger chip,
which will require a couple of DTS customizations. Eventually, this
project will need to move to not be a brya variant at some point.
BUG=b:222738712
BRANCH=none
TEST=builds (don't have hardware for testing yet)
Signed-off-by: Jack Rosenthal <d3f605bef1867f59845d4ce6e4f83b8dc9e4e0ae@chromium.org>
Change-Id: I3c713e12bd0685d08ed31c40bd103c2685416d78
Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3530958
Reviewed-by: Jeremy Bettis <4df7b5147fee087dca33c181f288ee7dbf56e022@chromium.org><commit_after># Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
brya = register_npcx_project(
project_name="brya",
zephyr_board="brya",
dts_overlays=[
"battery.dts",
"bb_retimer.dts",
"cbi_eeprom.dts",
"fan.dts",
"gpio.dts",
"interrupts.dts",
"keyboard.dts",
"motionsense.dts",
"pwm_leds.dts",
"usbc.dts",
],
)
ghost = brya.variant(project_name="ghost")
|
c0a7554c7c8160d6a7b4023441c3cbe5e2f46ee5
|
tests/test_replwrap.py
|
tests/test_replwrap.py
|
import sys
import unittest
import pexpect
from pexpect import replwrap
class REPLWrapTestCase(unittest.TestCase):
def test_python(self):
py = replwrap.python(sys.executable)
res = py.run_command("5+6")
self.assertEqual(res.strip(), "11")
def test_multiline(self):
py = replwrap.python(sys.executable)
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
# Should raise ValueError if input is incomplete
try:
py.run_command("for a in range(3):")
except ValueError:
pass
else:
assert False, "Didn't raise ValueError for incorrect input"
# Check that the REPL was reset (SIGINT) after the incomplete input
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
def test_existing_spawn(self):
child = pexpect.spawnu("python")
repl = replwrap.REPLWrapper(child, u">>> ",
"import sys; sys.ps1=%r" % replwrap.PEXPECT_PROMPT)
res = repl.run_command("print(7*6)")
self.assertEqual(res.strip(), "42")
if __name__ == '__main__':
unittest.main()
|
import sys
import unittest
import pexpect
from pexpect import replwrap
class REPLWrapTestCase(unittest.TestCase):
def test_python(self):
py = replwrap.python(sys.executable)
res = py.run_command("5+6")
self.assertEqual(res.strip(), "11")
def test_multiline(self):
py = replwrap.python(sys.executable)
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
# Should raise ValueError if input is incomplete
try:
py.run_command("for a in range(3):")
except ValueError:
pass
else:
assert False, "Didn't raise ValueError for incorrect input"
# Check that the REPL was reset (SIGINT) after the incomplete input
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
def test_existing_spawn(self):
child = pexpect.spawnu("python")
repl = replwrap.REPLWrapper(child, replwrap.u(">>> "),
"import sys; sys.ps1=%r" % replwrap.PEXPECT_PROMPT)
res = repl.run_command("print(7*6)")
self.assertEqual(res.strip(), "42")
if __name__ == '__main__':
unittest.main()
|
Fix another unicode literal for Python 3.2
|
Fix another unicode literal for Python 3.2
|
Python
|
isc
|
dongguangming/pexpect,blink1073/pexpect,crdoconnor/pexpect,nodish/pexpect,Wakeupbuddy/pexpect,Depado/pexpect,crdoconnor/pexpect,quatanium/pexpect,crdoconnor/pexpect,nodish/pexpect,quatanium/pexpect,bangi123/pexpect,quatanium/pexpect,bangi123/pexpect,bangi123/pexpect,blink1073/pexpect,nodish/pexpect,Depado/pexpect,Wakeupbuddy/pexpect,blink1073/pexpect,dongguangming/pexpect,Depado/pexpect,Depado/pexpect,Wakeupbuddy/pexpect,dongguangming/pexpect,Wakeupbuddy/pexpect,dongguangming/pexpect,bangi123/pexpect
|
import sys
import unittest
import pexpect
from pexpect import replwrap
class REPLWrapTestCase(unittest.TestCase):
def test_python(self):
py = replwrap.python(sys.executable)
res = py.run_command("5+6")
self.assertEqual(res.strip(), "11")
def test_multiline(self):
py = replwrap.python(sys.executable)
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
# Should raise ValueError if input is incomplete
try:
py.run_command("for a in range(3):")
except ValueError:
pass
else:
assert False, "Didn't raise ValueError for incorrect input"
# Check that the REPL was reset (SIGINT) after the incomplete input
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
def test_existing_spawn(self):
child = pexpect.spawnu("python")
repl = replwrap.REPLWrapper(child, u">>> ",
"import sys; sys.ps1=%r" % replwrap.PEXPECT_PROMPT)
res = repl.run_command("print(7*6)")
self.assertEqual(res.strip(), "42")
if __name__ == '__main__':
unittest.main()Fix another unicode literal for Python 3.2
|
import sys
import unittest
import pexpect
from pexpect import replwrap
class REPLWrapTestCase(unittest.TestCase):
def test_python(self):
py = replwrap.python(sys.executable)
res = py.run_command("5+6")
self.assertEqual(res.strip(), "11")
def test_multiline(self):
py = replwrap.python(sys.executable)
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
# Should raise ValueError if input is incomplete
try:
py.run_command("for a in range(3):")
except ValueError:
pass
else:
assert False, "Didn't raise ValueError for incorrect input"
# Check that the REPL was reset (SIGINT) after the incomplete input
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
def test_existing_spawn(self):
child = pexpect.spawnu("python")
repl = replwrap.REPLWrapper(child, replwrap.u(">>> "),
"import sys; sys.ps1=%r" % replwrap.PEXPECT_PROMPT)
res = repl.run_command("print(7*6)")
self.assertEqual(res.strip(), "42")
if __name__ == '__main__':
unittest.main()
|
<commit_before>import sys
import unittest
import pexpect
from pexpect import replwrap
class REPLWrapTestCase(unittest.TestCase):
def test_python(self):
py = replwrap.python(sys.executable)
res = py.run_command("5+6")
self.assertEqual(res.strip(), "11")
def test_multiline(self):
py = replwrap.python(sys.executable)
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
# Should raise ValueError if input is incomplete
try:
py.run_command("for a in range(3):")
except ValueError:
pass
else:
assert False, "Didn't raise ValueError for incorrect input"
# Check that the REPL was reset (SIGINT) after the incomplete input
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
def test_existing_spawn(self):
child = pexpect.spawnu("python")
repl = replwrap.REPLWrapper(child, u">>> ",
"import sys; sys.ps1=%r" % replwrap.PEXPECT_PROMPT)
res = repl.run_command("print(7*6)")
self.assertEqual(res.strip(), "42")
if __name__ == '__main__':
unittest.main()<commit_msg>Fix another unicode literal for Python 3.2<commit_after>
|
import sys
import unittest
import pexpect
from pexpect import replwrap
class REPLWrapTestCase(unittest.TestCase):
def test_python(self):
py = replwrap.python(sys.executable)
res = py.run_command("5+6")
self.assertEqual(res.strip(), "11")
def test_multiline(self):
py = replwrap.python(sys.executable)
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
# Should raise ValueError if input is incomplete
try:
py.run_command("for a in range(3):")
except ValueError:
pass
else:
assert False, "Didn't raise ValueError for incorrect input"
# Check that the REPL was reset (SIGINT) after the incomplete input
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
def test_existing_spawn(self):
child = pexpect.spawnu("python")
repl = replwrap.REPLWrapper(child, replwrap.u(">>> "),
"import sys; sys.ps1=%r" % replwrap.PEXPECT_PROMPT)
res = repl.run_command("print(7*6)")
self.assertEqual(res.strip(), "42")
if __name__ == '__main__':
unittest.main()
|
import sys
import unittest
import pexpect
from pexpect import replwrap
class REPLWrapTestCase(unittest.TestCase):
def test_python(self):
py = replwrap.python(sys.executable)
res = py.run_command("5+6")
self.assertEqual(res.strip(), "11")
def test_multiline(self):
py = replwrap.python(sys.executable)
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
# Should raise ValueError if input is incomplete
try:
py.run_command("for a in range(3):")
except ValueError:
pass
else:
assert False, "Didn't raise ValueError for incorrect input"
# Check that the REPL was reset (SIGINT) after the incomplete input
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
def test_existing_spawn(self):
child = pexpect.spawnu("python")
repl = replwrap.REPLWrapper(child, u">>> ",
"import sys; sys.ps1=%r" % replwrap.PEXPECT_PROMPT)
res = repl.run_command("print(7*6)")
self.assertEqual(res.strip(), "42")
if __name__ == '__main__':
unittest.main()Fix another unicode literal for Python 3.2import sys
import unittest
import pexpect
from pexpect import replwrap
class REPLWrapTestCase(unittest.TestCase):
def test_python(self):
py = replwrap.python(sys.executable)
res = py.run_command("5+6")
self.assertEqual(res.strip(), "11")
def test_multiline(self):
py = replwrap.python(sys.executable)
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
# Should raise ValueError if input is incomplete
try:
py.run_command("for a in range(3):")
except ValueError:
pass
else:
assert False, "Didn't raise ValueError for incorrect input"
# Check that the REPL was reset (SIGINT) after the incomplete input
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
def test_existing_spawn(self):
child = pexpect.spawnu("python")
repl = replwrap.REPLWrapper(child, replwrap.u(">>> "),
"import sys; sys.ps1=%r" % replwrap.PEXPECT_PROMPT)
res = repl.run_command("print(7*6)")
self.assertEqual(res.strip(), "42")
if __name__ == '__main__':
unittest.main()
|
<commit_before>import sys
import unittest
import pexpect
from pexpect import replwrap
class REPLWrapTestCase(unittest.TestCase):
def test_python(self):
py = replwrap.python(sys.executable)
res = py.run_command("5+6")
self.assertEqual(res.strip(), "11")
def test_multiline(self):
py = replwrap.python(sys.executable)
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
# Should raise ValueError if input is incomplete
try:
py.run_command("for a in range(3):")
except ValueError:
pass
else:
assert False, "Didn't raise ValueError for incorrect input"
# Check that the REPL was reset (SIGINT) after the incomplete input
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
def test_existing_spawn(self):
child = pexpect.spawnu("python")
repl = replwrap.REPLWrapper(child, u">>> ",
"import sys; sys.ps1=%r" % replwrap.PEXPECT_PROMPT)
res = repl.run_command("print(7*6)")
self.assertEqual(res.strip(), "42")
if __name__ == '__main__':
unittest.main()<commit_msg>Fix another unicode literal for Python 3.2<commit_after>import sys
import unittest
import pexpect
from pexpect import replwrap
class REPLWrapTestCase(unittest.TestCase):
def test_python(self):
py = replwrap.python(sys.executable)
res = py.run_command("5+6")
self.assertEqual(res.strip(), "11")
def test_multiline(self):
py = replwrap.python(sys.executable)
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
# Should raise ValueError if input is incomplete
try:
py.run_command("for a in range(3):")
except ValueError:
pass
else:
assert False, "Didn't raise ValueError for incorrect input"
# Check that the REPL was reset (SIGINT) after the incomplete input
res = py.run_command("for a in range(3):\n print(a)\n")
self.assertEqual(res.strip().splitlines(), ['0', '1', '2'])
def test_existing_spawn(self):
child = pexpect.spawnu("python")
repl = replwrap.REPLWrapper(child, replwrap.u(">>> "),
"import sys; sys.ps1=%r" % replwrap.PEXPECT_PROMPT)
res = repl.run_command("print(7*6)")
self.assertEqual(res.strip(), "42")
if __name__ == '__main__':
unittest.main()
|
ff435c335115262b38d66b912fe4e17b2861b45a
|
26-lazy-rivers/tf-26.py
|
26-lazy-rivers/tf-26.py
|
#!/usr/bin/env python
import sys, operator, string
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char == True:
word = ""
if c.isalnum():
# We found the start of a word
word = c.lower()
start_char = False
else: pass
else:
if c.isalnum():
word += c.lower()
else:
# We found end of word, emit it
start_char = True
yield word
def non_stop_words(filename):
stopwords = set(open('../stop_words.txt').read().split(',') + list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stopwords:
yield w
def count_and_sort(filename):
freqs = {}
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w]+1
return sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
#
# The main function
#
word_freqs = count_and_sort(sys.argv[1])
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
|
#!/usr/bin/env python
import sys, operator, string
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char == True:
word = ""
if c.isalnum():
# We found the start of a word
word = c.lower()
start_char = False
else: pass
else:
if c.isalnum():
word += c.lower()
else:
# We found end of word, emit it
start_char = True
yield word
def non_stop_words(filename):
stopwords = set(open('../stop_words.txt').read().split(',') + list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stopwords:
yield w
def count_and_sort(filename):
freqs, i = {}, 1
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w]+1
if i % 5000 == 0:
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
i = i+1
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
#
# The main function
#
for word_freqs in count_and_sort(sys.argv[1]):
print "-----------------------------"
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
|
Make the last function also a generator, so that the explanation flows better
|
Make the last function also a generator, so that the explanation flows better
|
Python
|
mit
|
folpindo/exercises-in-programming-style,crista/exercises-in-programming-style,rajanvenkataguru/exercises-in-programming-style,panesofglass/exercises-in-programming-style,panesofglass/exercises-in-programming-style,wolfhesse/exercises-in-programming-style,jw0201/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,Drooids/exercises-in-programming-style,crista/exercises-in-programming-style,jw0201/exercises-in-programming-style,mathkann/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,wolfhesse/exercises-in-programming-style,emil-mi/exercises-in-programming-style,folpindo/exercises-in-programming-style,jim-thisplace/exercises-in-programming-style,bgamwell/exercises-in-programming-style,jim-thisplace/exercises-in-programming-style,emil-mi/exercises-in-programming-style,wolfhesse/exercises-in-programming-style,jim-thisplace/exercises-in-programming-style,rajanvenkataguru/exercises-in-programming-style,crista/exercises-in-programming-style,rajanvenkataguru/exercises-in-programming-style,bgamwell/exercises-in-programming-style,GabrielNicolasAvellaneda/exercises-in-programming-style,placrosse/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,mathkann/exercises-in-programming-style,folpindo/exercises-in-programming-style,jw0201/exercises-in-programming-style,panesofglass/exercises-in-programming-style,folpindo/exercises-in-programming-style,wolfhesse/exercises-in-programming-style,placrosse/exercises-in-programming-style,mathkann/exercises-in-programming-style,jim-thisplace/exercises-in-programming-style,folpindo/exercises-in-programming-style,rajanvenkataguru/exercises-in-programming-style,bgamwell/exercises-in-programming-style,halagoing/exercises-in-programming-style,halagoing/exercises-in-programming-style,matk86/exercises-in-programming-style,halagoing/exercises-in-programming-style,mathkann/exercises-in-programming-style,crista/exercises-in-programming-style,Drooids/exercises-in-programming-style,placrosse/exercises-in-programming-style,halagoing/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,GabrielNicolasAvellaneda/exercises-in-programming-style,wolfhesse/exercises-in-programming-style,jw0201/exercises-in-programming-style,GabrielNicolasAvellaneda/exercises-in-programming-style,placrosse/exercises-in-programming-style,crista/exercises-in-programming-style,bgamwell/exercises-in-programming-style,halagoing/exercises-in-programming-style,matk86/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,Drooids/exercises-in-programming-style,bgamwell/exercises-in-programming-style,matk86/exercises-in-programming-style,GabrielNicolasAvellaneda/exercises-in-programming-style,rajanvenkataguru/exercises-in-programming-style,matk86/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,Drooids/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,matk86/exercises-in-programming-style,emil-mi/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,Drooids/exercises-in-programming-style,mathkann/exercises-in-programming-style,emil-mi/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,jw0201/exercises-in-programming-style,panesofglass/exercises-in-programming-style,jim-thisplace/exercises-in-programming-style,emil-mi/exercises-in-programming-style,placrosse/exercises-in-programming-style,panesofglass/exercises-in-programming-style,GabrielNicolasAvellaneda/exercises-in-programming-style
|
#!/usr/bin/env python
import sys, operator, string
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char == True:
word = ""
if c.isalnum():
# We found the start of a word
word = c.lower()
start_char = False
else: pass
else:
if c.isalnum():
word += c.lower()
else:
# We found end of word, emit it
start_char = True
yield word
def non_stop_words(filename):
stopwords = set(open('../stop_words.txt').read().split(',') + list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stopwords:
yield w
def count_and_sort(filename):
freqs = {}
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w]+1
return sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
#
# The main function
#
word_freqs = count_and_sort(sys.argv[1])
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
Make the last function also a generator, so that the explanation flows better
|
#!/usr/bin/env python
import sys, operator, string
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char == True:
word = ""
if c.isalnum():
# We found the start of a word
word = c.lower()
start_char = False
else: pass
else:
if c.isalnum():
word += c.lower()
else:
# We found end of word, emit it
start_char = True
yield word
def non_stop_words(filename):
stopwords = set(open('../stop_words.txt').read().split(',') + list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stopwords:
yield w
def count_and_sort(filename):
freqs, i = {}, 1
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w]+1
if i % 5000 == 0:
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
i = i+1
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
#
# The main function
#
for word_freqs in count_and_sort(sys.argv[1]):
print "-----------------------------"
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
|
<commit_before>#!/usr/bin/env python
import sys, operator, string
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char == True:
word = ""
if c.isalnum():
# We found the start of a word
word = c.lower()
start_char = False
else: pass
else:
if c.isalnum():
word += c.lower()
else:
# We found end of word, emit it
start_char = True
yield word
def non_stop_words(filename):
stopwords = set(open('../stop_words.txt').read().split(',') + list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stopwords:
yield w
def count_and_sort(filename):
freqs = {}
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w]+1
return sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
#
# The main function
#
word_freqs = count_and_sort(sys.argv[1])
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
<commit_msg>Make the last function also a generator, so that the explanation flows better<commit_after>
|
#!/usr/bin/env python
import sys, operator, string
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char == True:
word = ""
if c.isalnum():
# We found the start of a word
word = c.lower()
start_char = False
else: pass
else:
if c.isalnum():
word += c.lower()
else:
# We found end of word, emit it
start_char = True
yield word
def non_stop_words(filename):
stopwords = set(open('../stop_words.txt').read().split(',') + list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stopwords:
yield w
def count_and_sort(filename):
freqs, i = {}, 1
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w]+1
if i % 5000 == 0:
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
i = i+1
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
#
# The main function
#
for word_freqs in count_and_sort(sys.argv[1]):
print "-----------------------------"
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
|
#!/usr/bin/env python
import sys, operator, string
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char == True:
word = ""
if c.isalnum():
# We found the start of a word
word = c.lower()
start_char = False
else: pass
else:
if c.isalnum():
word += c.lower()
else:
# We found end of word, emit it
start_char = True
yield word
def non_stop_words(filename):
stopwords = set(open('../stop_words.txt').read().split(',') + list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stopwords:
yield w
def count_and_sort(filename):
freqs = {}
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w]+1
return sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
#
# The main function
#
word_freqs = count_and_sort(sys.argv[1])
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
Make the last function also a generator, so that the explanation flows better#!/usr/bin/env python
import sys, operator, string
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char == True:
word = ""
if c.isalnum():
# We found the start of a word
word = c.lower()
start_char = False
else: pass
else:
if c.isalnum():
word += c.lower()
else:
# We found end of word, emit it
start_char = True
yield word
def non_stop_words(filename):
stopwords = set(open('../stop_words.txt').read().split(',') + list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stopwords:
yield w
def count_and_sort(filename):
freqs, i = {}, 1
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w]+1
if i % 5000 == 0:
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
i = i+1
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
#
# The main function
#
for word_freqs in count_and_sort(sys.argv[1]):
print "-----------------------------"
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
|
<commit_before>#!/usr/bin/env python
import sys, operator, string
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char == True:
word = ""
if c.isalnum():
# We found the start of a word
word = c.lower()
start_char = False
else: pass
else:
if c.isalnum():
word += c.lower()
else:
# We found end of word, emit it
start_char = True
yield word
def non_stop_words(filename):
stopwords = set(open('../stop_words.txt').read().split(',') + list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stopwords:
yield w
def count_and_sort(filename):
freqs = {}
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w]+1
return sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
#
# The main function
#
word_freqs = count_and_sort(sys.argv[1])
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
<commit_msg>Make the last function also a generator, so that the explanation flows better<commit_after>#!/usr/bin/env python
import sys, operator, string
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char == True:
word = ""
if c.isalnum():
# We found the start of a word
word = c.lower()
start_char = False
else: pass
else:
if c.isalnum():
word += c.lower()
else:
# We found end of word, emit it
start_char = True
yield word
def non_stop_words(filename):
stopwords = set(open('../stop_words.txt').read().split(',') + list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stopwords:
yield w
def count_and_sort(filename):
freqs, i = {}, 1
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w]+1
if i % 5000 == 0:
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
i = i+1
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
#
# The main function
#
for word_freqs in count_and_sort(sys.argv[1]):
print "-----------------------------"
for (w, c) in word_freqs[0:25]:
print w, ' - ', c
|
73aa38a5d481a26278dd29364f16839cad0f22cf
|
manager/projects/ui/views/files.py
|
manager/projects/ui/views/files.py
|
from django.contrib.auth.decorators import login_required
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from projects.api.views.files import ProjectsFilesViewSet
@login_required
def list(request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""
Get a list of project files.
The trailing part of the URL becomes the `prefix` query
parameter, consistent with API ending e.g.
/<account>/<project>/files/sub?search=foo
is equivalent to:
/api/projects/<project>/files?prefix=sub&search=foo
"""
prefix = kwargs.get("prefix")
if prefix and not prefix.endswith("/"):
prefix += "/"
request.GET = request.GET.copy()
request.GET["prefix"] = prefix
request.GET["aggregate"] = True
viewset = ProjectsFilesViewSet.init("list", request, args, kwargs)
project = viewset.get_project()
files = viewset.get_queryset(project)
# List of tuples for directory breadcrumbs
dirs = [("root", "")]
path = ""
for name in prefix.split("/"):
if name:
path += name + "/"
dirs.append((name, path))
return render(
request,
"projects/files/list.html",
dict(prefix=prefix, dirs=dirs, files=files, project=project,),
)
|
from django.contrib.auth.decorators import login_required
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from projects.api.views.files import ProjectsFilesViewSet
@login_required
def list(request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""
Get a list of project files.
The trailing part of the URL becomes the `prefix` query
parameter, consistent with API ending e.g.
/<account>/<project>/files/sub?search=foo
is equivalent to:
/api/projects/<project>/files?prefix=sub&search=foo
"""
request.GET = request.GET.copy()
request.GET["prefix"] = kwargs.get("prefix")
viewset = ProjectsFilesViewSet.init("list", request, args, kwargs)
project = viewset.get_project()
files = viewset.get_queryset(project)
context = viewset.get_response_context(queryset=files)
return render(
request, "projects/files/list.html", dict(project=project, **context),
)
|
Update view for change in viewset
|
refactor(Files): Update view for change in viewset
|
Python
|
apache-2.0
|
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
|
from django.contrib.auth.decorators import login_required
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from projects.api.views.files import ProjectsFilesViewSet
@login_required
def list(request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""
Get a list of project files.
The trailing part of the URL becomes the `prefix` query
parameter, consistent with API ending e.g.
/<account>/<project>/files/sub?search=foo
is equivalent to:
/api/projects/<project>/files?prefix=sub&search=foo
"""
prefix = kwargs.get("prefix")
if prefix and not prefix.endswith("/"):
prefix += "/"
request.GET = request.GET.copy()
request.GET["prefix"] = prefix
request.GET["aggregate"] = True
viewset = ProjectsFilesViewSet.init("list", request, args, kwargs)
project = viewset.get_project()
files = viewset.get_queryset(project)
# List of tuples for directory breadcrumbs
dirs = [("root", "")]
path = ""
for name in prefix.split("/"):
if name:
path += name + "/"
dirs.append((name, path))
return render(
request,
"projects/files/list.html",
dict(prefix=prefix, dirs=dirs, files=files, project=project,),
)
refactor(Files): Update view for change in viewset
|
from django.contrib.auth.decorators import login_required
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from projects.api.views.files import ProjectsFilesViewSet
@login_required
def list(request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""
Get a list of project files.
The trailing part of the URL becomes the `prefix` query
parameter, consistent with API ending e.g.
/<account>/<project>/files/sub?search=foo
is equivalent to:
/api/projects/<project>/files?prefix=sub&search=foo
"""
request.GET = request.GET.copy()
request.GET["prefix"] = kwargs.get("prefix")
viewset = ProjectsFilesViewSet.init("list", request, args, kwargs)
project = viewset.get_project()
files = viewset.get_queryset(project)
context = viewset.get_response_context(queryset=files)
return render(
request, "projects/files/list.html", dict(project=project, **context),
)
|
<commit_before>from django.contrib.auth.decorators import login_required
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from projects.api.views.files import ProjectsFilesViewSet
@login_required
def list(request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""
Get a list of project files.
The trailing part of the URL becomes the `prefix` query
parameter, consistent with API ending e.g.
/<account>/<project>/files/sub?search=foo
is equivalent to:
/api/projects/<project>/files?prefix=sub&search=foo
"""
prefix = kwargs.get("prefix")
if prefix and not prefix.endswith("/"):
prefix += "/"
request.GET = request.GET.copy()
request.GET["prefix"] = prefix
request.GET["aggregate"] = True
viewset = ProjectsFilesViewSet.init("list", request, args, kwargs)
project = viewset.get_project()
files = viewset.get_queryset(project)
# List of tuples for directory breadcrumbs
dirs = [("root", "")]
path = ""
for name in prefix.split("/"):
if name:
path += name + "/"
dirs.append((name, path))
return render(
request,
"projects/files/list.html",
dict(prefix=prefix, dirs=dirs, files=files, project=project,),
)
<commit_msg>refactor(Files): Update view for change in viewset<commit_after>
|
from django.contrib.auth.decorators import login_required
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from projects.api.views.files import ProjectsFilesViewSet
@login_required
def list(request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""
Get a list of project files.
The trailing part of the URL becomes the `prefix` query
parameter, consistent with API ending e.g.
/<account>/<project>/files/sub?search=foo
is equivalent to:
/api/projects/<project>/files?prefix=sub&search=foo
"""
request.GET = request.GET.copy()
request.GET["prefix"] = kwargs.get("prefix")
viewset = ProjectsFilesViewSet.init("list", request, args, kwargs)
project = viewset.get_project()
files = viewset.get_queryset(project)
context = viewset.get_response_context(queryset=files)
return render(
request, "projects/files/list.html", dict(project=project, **context),
)
|
from django.contrib.auth.decorators import login_required
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from projects.api.views.files import ProjectsFilesViewSet
@login_required
def list(request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""
Get a list of project files.
The trailing part of the URL becomes the `prefix` query
parameter, consistent with API ending e.g.
/<account>/<project>/files/sub?search=foo
is equivalent to:
/api/projects/<project>/files?prefix=sub&search=foo
"""
prefix = kwargs.get("prefix")
if prefix and not prefix.endswith("/"):
prefix += "/"
request.GET = request.GET.copy()
request.GET["prefix"] = prefix
request.GET["aggregate"] = True
viewset = ProjectsFilesViewSet.init("list", request, args, kwargs)
project = viewset.get_project()
files = viewset.get_queryset(project)
# List of tuples for directory breadcrumbs
dirs = [("root", "")]
path = ""
for name in prefix.split("/"):
if name:
path += name + "/"
dirs.append((name, path))
return render(
request,
"projects/files/list.html",
dict(prefix=prefix, dirs=dirs, files=files, project=project,),
)
refactor(Files): Update view for change in viewsetfrom django.contrib.auth.decorators import login_required
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from projects.api.views.files import ProjectsFilesViewSet
@login_required
def list(request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""
Get a list of project files.
The trailing part of the URL becomes the `prefix` query
parameter, consistent with API ending e.g.
/<account>/<project>/files/sub?search=foo
is equivalent to:
/api/projects/<project>/files?prefix=sub&search=foo
"""
request.GET = request.GET.copy()
request.GET["prefix"] = kwargs.get("prefix")
viewset = ProjectsFilesViewSet.init("list", request, args, kwargs)
project = viewset.get_project()
files = viewset.get_queryset(project)
context = viewset.get_response_context(queryset=files)
return render(
request, "projects/files/list.html", dict(project=project, **context),
)
|
<commit_before>from django.contrib.auth.decorators import login_required
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from projects.api.views.files import ProjectsFilesViewSet
@login_required
def list(request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""
Get a list of project files.
The trailing part of the URL becomes the `prefix` query
parameter, consistent with API ending e.g.
/<account>/<project>/files/sub?search=foo
is equivalent to:
/api/projects/<project>/files?prefix=sub&search=foo
"""
prefix = kwargs.get("prefix")
if prefix and not prefix.endswith("/"):
prefix += "/"
request.GET = request.GET.copy()
request.GET["prefix"] = prefix
request.GET["aggregate"] = True
viewset = ProjectsFilesViewSet.init("list", request, args, kwargs)
project = viewset.get_project()
files = viewset.get_queryset(project)
# List of tuples for directory breadcrumbs
dirs = [("root", "")]
path = ""
for name in prefix.split("/"):
if name:
path += name + "/"
dirs.append((name, path))
return render(
request,
"projects/files/list.html",
dict(prefix=prefix, dirs=dirs, files=files, project=project,),
)
<commit_msg>refactor(Files): Update view for change in viewset<commit_after>from django.contrib.auth.decorators import login_required
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from projects.api.views.files import ProjectsFilesViewSet
@login_required
def list(request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""
Get a list of project files.
The trailing part of the URL becomes the `prefix` query
parameter, consistent with API ending e.g.
/<account>/<project>/files/sub?search=foo
is equivalent to:
/api/projects/<project>/files?prefix=sub&search=foo
"""
request.GET = request.GET.copy()
request.GET["prefix"] = kwargs.get("prefix")
viewset = ProjectsFilesViewSet.init("list", request, args, kwargs)
project = viewset.get_project()
files = viewset.get_queryset(project)
context = viewset.get_response_context(queryset=files)
return render(
request, "projects/files/list.html", dict(project=project, **context),
)
|
ff460f9a3c7df3322271eeb5de3bead72fe121bc
|
bmi_tester/tests_pytest/test_grid_uniform_rectilinear.py
|
bmi_tester/tests_pytest/test_grid_uniform_rectilinear.py
|
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
if gtype == 'scalar':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
else:
assert rtn is shape
np.testing.assert_equal(shape, ())
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
Remove test for get_grid_shape for scalar grids.
|
Remove test for get_grid_shape for scalar grids.
|
Python
|
mit
|
csdms/bmi-tester
|
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
if gtype == 'scalar':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
else:
assert rtn is shape
np.testing.assert_equal(shape, ())
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
Remove test for get_grid_shape for scalar grids.
|
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
<commit_before>import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
if gtype == 'scalar':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
else:
assert rtn is shape
np.testing.assert_equal(shape, ())
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
<commit_msg>Remove test for get_grid_shape for scalar grids.<commit_after>
|
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
if gtype == 'scalar':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
else:
assert rtn is shape
np.testing.assert_equal(shape, ())
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
Remove test for get_grid_shape for scalar grids.import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
<commit_before>import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
if gtype == 'scalar':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
else:
assert rtn is shape
np.testing.assert_equal(shape, ())
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
<commit_msg>Remove test for get_grid_shape for scalar grids.<commit_after>import warnings
import numpy as np
def test_get_grid_shape(new_bmi, gid):
"""Test the grid shape."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
shape = np.empty(ndim, dtype=np.int32)
try:
rtn = new_bmi.get_grid_shape(gid, shape)
except TypeError:
warnings.warn('get_grid_shape should take two arguments')
rtn = new_bmi.get_grid_shape(gid)
shape[:] = rtn
else:
assert rtn is shape
for dim in shape:
assert dim > 0
def test_get_grid_spacing(new_bmi, gid):
"""Test the grid spacing."""
gtype = new_bmi.get_grid_type(gid)
if gtype == 'uniform_rectilinear':
ndim = new_bmi.get_grid_rank(gid)
spacing = np.empty(ndim, dtype=float)
assert spacing is new_bmi.get_grid_spacing(gid, spacing)
|
a19eac7104268865bd66bac520ffd41eacc30920
|
lifetimes/datasets/__init__.py
|
lifetimes/datasets/__init__.py
|
# -*- coding: utf-8 -*-
# modified from https://github.com/CamDavidsonPilon/lifelines/
import pandas as pd
from pkg_resources import resource_filename
__all__ = [
'load_cdnow',
'load_transaction_data',
]
def load_dataset(filename, **kwargs):
'''
Load a dataset from lifetimes.datasets
Parameters:
filename : for example "larynx.csv"
usecols : list of columns in file to use
Returns : Pandas dataframe
'''
return pd.read_csv(resource_filename('lifetimes', 'datasets/' + filename), **kwargs)
def load_cdnow(**kwargs):
return load_dataset('cdnow_customers.csv', **kwargs)
def load_transaction_data(**kwargs):
return load_dataset('example_transactions.csv', **kwargs)
|
# -*- coding: utf-8 -*-
# modified from https://github.com/CamDavidsonPilon/lifelines/
import pandas as pd
from pkg_resources import resource_filename
__all__ = [
'load_cdnow',
'load_transaction_data',
]
def load_dataset(filename, **kwargs):
'''
Load a dataset from lifetimes.datasets
Parameters:
filename : for example "larynx.csv"
usecols : list of columns in file to use
Returns : Pandas dataframe
'''
return pd.read_csv(resource_filename('lifetimes', 'datasets/' + filename), **kwargs)
def load_cdnow(**kwargs):
return load_dataset('cdnow_customers.csv', **kwargs)
def load_transaction_data(**kwargs):
"""
Returns a Pandas dataframe of transactional data. Looks like:
date id
0 2014-03-08 00:00:00 0
1 2014-05-21 00:00:00 1
2 2014-03-14 00:00:00 2
3 2014-04-09 00:00:00 2
4 2014-05-21 00:00:00 2
The data was artificially created using Lifetimes data generation routines. Data was generated
between 2014-01-01 to 2014-12-31.
"""
return load_dataset('example_transactions.csv', **kwargs)
|
Add a doc string to make using this dataset easier
|
Add a doc string to make using this dataset easier
|
Python
|
mit
|
aprotopopov/lifetimes,CamDavidsonPilon/lifetimes,luke14free/lifetimes,statwonk/lifetimes
|
# -*- coding: utf-8 -*-
# modified from https://github.com/CamDavidsonPilon/lifelines/
import pandas as pd
from pkg_resources import resource_filename
__all__ = [
'load_cdnow',
'load_transaction_data',
]
def load_dataset(filename, **kwargs):
'''
Load a dataset from lifetimes.datasets
Parameters:
filename : for example "larynx.csv"
usecols : list of columns in file to use
Returns : Pandas dataframe
'''
return pd.read_csv(resource_filename('lifetimes', 'datasets/' + filename), **kwargs)
def load_cdnow(**kwargs):
return load_dataset('cdnow_customers.csv', **kwargs)
def load_transaction_data(**kwargs):
return load_dataset('example_transactions.csv', **kwargs)
Add a doc string to make using this dataset easier
|
# -*- coding: utf-8 -*-
# modified from https://github.com/CamDavidsonPilon/lifelines/
import pandas as pd
from pkg_resources import resource_filename
__all__ = [
'load_cdnow',
'load_transaction_data',
]
def load_dataset(filename, **kwargs):
'''
Load a dataset from lifetimes.datasets
Parameters:
filename : for example "larynx.csv"
usecols : list of columns in file to use
Returns : Pandas dataframe
'''
return pd.read_csv(resource_filename('lifetimes', 'datasets/' + filename), **kwargs)
def load_cdnow(**kwargs):
return load_dataset('cdnow_customers.csv', **kwargs)
def load_transaction_data(**kwargs):
"""
Returns a Pandas dataframe of transactional data. Looks like:
date id
0 2014-03-08 00:00:00 0
1 2014-05-21 00:00:00 1
2 2014-03-14 00:00:00 2
3 2014-04-09 00:00:00 2
4 2014-05-21 00:00:00 2
The data was artificially created using Lifetimes data generation routines. Data was generated
between 2014-01-01 to 2014-12-31.
"""
return load_dataset('example_transactions.csv', **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
# modified from https://github.com/CamDavidsonPilon/lifelines/
import pandas as pd
from pkg_resources import resource_filename
__all__ = [
'load_cdnow',
'load_transaction_data',
]
def load_dataset(filename, **kwargs):
'''
Load a dataset from lifetimes.datasets
Parameters:
filename : for example "larynx.csv"
usecols : list of columns in file to use
Returns : Pandas dataframe
'''
return pd.read_csv(resource_filename('lifetimes', 'datasets/' + filename), **kwargs)
def load_cdnow(**kwargs):
return load_dataset('cdnow_customers.csv', **kwargs)
def load_transaction_data(**kwargs):
return load_dataset('example_transactions.csv', **kwargs)
<commit_msg>Add a doc string to make using this dataset easier<commit_after>
|
# -*- coding: utf-8 -*-
# modified from https://github.com/CamDavidsonPilon/lifelines/
import pandas as pd
from pkg_resources import resource_filename
__all__ = [
'load_cdnow',
'load_transaction_data',
]
def load_dataset(filename, **kwargs):
'''
Load a dataset from lifetimes.datasets
Parameters:
filename : for example "larynx.csv"
usecols : list of columns in file to use
Returns : Pandas dataframe
'''
return pd.read_csv(resource_filename('lifetimes', 'datasets/' + filename), **kwargs)
def load_cdnow(**kwargs):
return load_dataset('cdnow_customers.csv', **kwargs)
def load_transaction_data(**kwargs):
"""
Returns a Pandas dataframe of transactional data. Looks like:
date id
0 2014-03-08 00:00:00 0
1 2014-05-21 00:00:00 1
2 2014-03-14 00:00:00 2
3 2014-04-09 00:00:00 2
4 2014-05-21 00:00:00 2
The data was artificially created using Lifetimes data generation routines. Data was generated
between 2014-01-01 to 2014-12-31.
"""
return load_dataset('example_transactions.csv', **kwargs)
|
# -*- coding: utf-8 -*-
# modified from https://github.com/CamDavidsonPilon/lifelines/
import pandas as pd
from pkg_resources import resource_filename
__all__ = [
'load_cdnow',
'load_transaction_data',
]
def load_dataset(filename, **kwargs):
'''
Load a dataset from lifetimes.datasets
Parameters:
filename : for example "larynx.csv"
usecols : list of columns in file to use
Returns : Pandas dataframe
'''
return pd.read_csv(resource_filename('lifetimes', 'datasets/' + filename), **kwargs)
def load_cdnow(**kwargs):
return load_dataset('cdnow_customers.csv', **kwargs)
def load_transaction_data(**kwargs):
return load_dataset('example_transactions.csv', **kwargs)
Add a doc string to make using this dataset easier# -*- coding: utf-8 -*-
# modified from https://github.com/CamDavidsonPilon/lifelines/
import pandas as pd
from pkg_resources import resource_filename
__all__ = [
'load_cdnow',
'load_transaction_data',
]
def load_dataset(filename, **kwargs):
'''
Load a dataset from lifetimes.datasets
Parameters:
filename : for example "larynx.csv"
usecols : list of columns in file to use
Returns : Pandas dataframe
'''
return pd.read_csv(resource_filename('lifetimes', 'datasets/' + filename), **kwargs)
def load_cdnow(**kwargs):
return load_dataset('cdnow_customers.csv', **kwargs)
def load_transaction_data(**kwargs):
"""
Returns a Pandas dataframe of transactional data. Looks like:
date id
0 2014-03-08 00:00:00 0
1 2014-05-21 00:00:00 1
2 2014-03-14 00:00:00 2
3 2014-04-09 00:00:00 2
4 2014-05-21 00:00:00 2
The data was artificially created using Lifetimes data generation routines. Data was generated
between 2014-01-01 to 2014-12-31.
"""
return load_dataset('example_transactions.csv', **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
# modified from https://github.com/CamDavidsonPilon/lifelines/
import pandas as pd
from pkg_resources import resource_filename
__all__ = [
'load_cdnow',
'load_transaction_data',
]
def load_dataset(filename, **kwargs):
'''
Load a dataset from lifetimes.datasets
Parameters:
filename : for example "larynx.csv"
usecols : list of columns in file to use
Returns : Pandas dataframe
'''
return pd.read_csv(resource_filename('lifetimes', 'datasets/' + filename), **kwargs)
def load_cdnow(**kwargs):
return load_dataset('cdnow_customers.csv', **kwargs)
def load_transaction_data(**kwargs):
return load_dataset('example_transactions.csv', **kwargs)
<commit_msg>Add a doc string to make using this dataset easier<commit_after># -*- coding: utf-8 -*-
# modified from https://github.com/CamDavidsonPilon/lifelines/
import pandas as pd
from pkg_resources import resource_filename
__all__ = [
'load_cdnow',
'load_transaction_data',
]
def load_dataset(filename, **kwargs):
'''
Load a dataset from lifetimes.datasets
Parameters:
filename : for example "larynx.csv"
usecols : list of columns in file to use
Returns : Pandas dataframe
'''
return pd.read_csv(resource_filename('lifetimes', 'datasets/' + filename), **kwargs)
def load_cdnow(**kwargs):
return load_dataset('cdnow_customers.csv', **kwargs)
def load_transaction_data(**kwargs):
"""
Returns a Pandas dataframe of transactional data. Looks like:
date id
0 2014-03-08 00:00:00 0
1 2014-05-21 00:00:00 1
2 2014-03-14 00:00:00 2
3 2014-04-09 00:00:00 2
4 2014-05-21 00:00:00 2
The data was artificially created using Lifetimes data generation routines. Data was generated
between 2014-01-01 to 2014-12-31.
"""
return load_dataset('example_transactions.csv', **kwargs)
|
4622125a0f73a77ae0327deb886ac9d4b1c50791
|
events/views.py
|
events/views.py
|
from django.shortcuts import render_to_response
from django.template import RequestContext
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=RequestContext(request)
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=RequestContext(request)
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=RequestContext(request)
)
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.urlresolvers import resolve
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=context
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=context
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=context
)
|
Add current_app to context for django-cms v3 support
|
Add current_app to context for django-cms v3 support
|
Python
|
bsd-3-clause
|
theherk/django-theherk-events
|
from django.shortcuts import render_to_response
from django.template import RequestContext
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=RequestContext(request)
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=RequestContext(request)
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=RequestContext(request)
)
Add current_app to context for django-cms v3 support
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.urlresolvers import resolve
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=context
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=context
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=context
)
|
<commit_before>from django.shortcuts import render_to_response
from django.template import RequestContext
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=RequestContext(request)
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=RequestContext(request)
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=RequestContext(request)
)
<commit_msg>Add current_app to context for django-cms v3 support<commit_after>
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.urlresolvers import resolve
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=context
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=context
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=context
)
|
from django.shortcuts import render_to_response
from django.template import RequestContext
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=RequestContext(request)
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=RequestContext(request)
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=RequestContext(request)
)
Add current_app to context for django-cms v3 supportfrom django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.urlresolvers import resolve
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=context
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=context
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=context
)
|
<commit_before>from django.shortcuts import render_to_response
from django.template import RequestContext
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=RequestContext(request)
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=RequestContext(request)
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=RequestContext(request)
)
<commit_msg>Add current_app to context for django-cms v3 support<commit_after>from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.urlresolvers import resolve
import datetime
from django.http import Http404
from events.models import Event
from events.models import Calendar
def index(request):
calendars = Calendar.objects.all()
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/index.html',
{'calendars': calendars},
context_instance=context
)
def calendar_events(request, calendar_slug=None):
events = Event.objects.filter(calendar__slug=calendar_slug).filter(end__gte=datetime.date.today()).order_by('start')
calendars = Calendar.objects.all()
calendar = calendars.get(slug=calendar_slug)
if not calendar:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/calendar_events.html',
{'events': events, 'calendar': calendar, 'calendars': calendars},
context_instance=context
)
def event_detail(request, calendar_slug=None, event_slug=None):
calendars = Calendar.objects.all()
try:
event = Event.objects.get(slug=event_slug)
except Event.DoesNotExist:
raise Http404
current_app = resolve(request.path).namespace
context = RequestContext(request, current_app=current_app)
return render_to_response(
'events/event_detail.html',
{'event': event, 'calendars': calendars},
context_instance=context
)
|
57b35933e3accc3013b2ba417ad78340c10ed807
|
lighty/wsgi/commands.py
|
lighty/wsgi/commands.py
|
from wsgiref.simple_server import make_server
from . import WSGIApplication
def run_server(settings):
application = WSGIApplication(settings)
httpd = make_server('', 8000, application)
print("Serving on port 8000...")
httpd.serve_forever()
|
def run_server(settings):
'''Run application using wsgiref test server
'''
from wsgiref.simple_server import make_server
from . import WSGIApplication
application = WSGIApplication(settings)
httpd = make_server('', 8000, application)
print("Serving on port 8000...")
httpd.serve_forever()
def run_tornado(settings):
'''Run application using Tornade Web framework WSGI server
'''
from tornado import ioloop, httpserver, wsgi
from . import WSGIApplication
application = WSGIApplication(settings)
container = wsgi.WSGIContainer(application)
http_server = httpserver.HTTPServer(container)
http_server.listen(8000)
ioloop.IOLoop.instance().start()
|
Add base tornado server support
|
Add base tornado server support
|
Python
|
bsd-3-clause
|
GrAndSE/lighty
|
from wsgiref.simple_server import make_server
from . import WSGIApplication
def run_server(settings):
application = WSGIApplication(settings)
httpd = make_server('', 8000, application)
print("Serving on port 8000...")
httpd.serve_forever()
Add base tornado server support
|
def run_server(settings):
'''Run application using wsgiref test server
'''
from wsgiref.simple_server import make_server
from . import WSGIApplication
application = WSGIApplication(settings)
httpd = make_server('', 8000, application)
print("Serving on port 8000...")
httpd.serve_forever()
def run_tornado(settings):
'''Run application using Tornade Web framework WSGI server
'''
from tornado import ioloop, httpserver, wsgi
from . import WSGIApplication
application = WSGIApplication(settings)
container = wsgi.WSGIContainer(application)
http_server = httpserver.HTTPServer(container)
http_server.listen(8000)
ioloop.IOLoop.instance().start()
|
<commit_before>from wsgiref.simple_server import make_server
from . import WSGIApplication
def run_server(settings):
application = WSGIApplication(settings)
httpd = make_server('', 8000, application)
print("Serving on port 8000...")
httpd.serve_forever()
<commit_msg>Add base tornado server support<commit_after>
|
def run_server(settings):
'''Run application using wsgiref test server
'''
from wsgiref.simple_server import make_server
from . import WSGIApplication
application = WSGIApplication(settings)
httpd = make_server('', 8000, application)
print("Serving on port 8000...")
httpd.serve_forever()
def run_tornado(settings):
'''Run application using Tornade Web framework WSGI server
'''
from tornado import ioloop, httpserver, wsgi
from . import WSGIApplication
application = WSGIApplication(settings)
container = wsgi.WSGIContainer(application)
http_server = httpserver.HTTPServer(container)
http_server.listen(8000)
ioloop.IOLoop.instance().start()
|
from wsgiref.simple_server import make_server
from . import WSGIApplication
def run_server(settings):
application = WSGIApplication(settings)
httpd = make_server('', 8000, application)
print("Serving on port 8000...")
httpd.serve_forever()
Add base tornado server support
def run_server(settings):
'''Run application using wsgiref test server
'''
from wsgiref.simple_server import make_server
from . import WSGIApplication
application = WSGIApplication(settings)
httpd = make_server('', 8000, application)
print("Serving on port 8000...")
httpd.serve_forever()
def run_tornado(settings):
'''Run application using Tornade Web framework WSGI server
'''
from tornado import ioloop, httpserver, wsgi
from . import WSGIApplication
application = WSGIApplication(settings)
container = wsgi.WSGIContainer(application)
http_server = httpserver.HTTPServer(container)
http_server.listen(8000)
ioloop.IOLoop.instance().start()
|
<commit_before>from wsgiref.simple_server import make_server
from . import WSGIApplication
def run_server(settings):
application = WSGIApplication(settings)
httpd = make_server('', 8000, application)
print("Serving on port 8000...")
httpd.serve_forever()
<commit_msg>Add base tornado server support<commit_after>
def run_server(settings):
'''Run application using wsgiref test server
'''
from wsgiref.simple_server import make_server
from . import WSGIApplication
application = WSGIApplication(settings)
httpd = make_server('', 8000, application)
print("Serving on port 8000...")
httpd.serve_forever()
def run_tornado(settings):
'''Run application using Tornade Web framework WSGI server
'''
from tornado import ioloop, httpserver, wsgi
from . import WSGIApplication
application = WSGIApplication(settings)
container = wsgi.WSGIContainer(application)
http_server = httpserver.HTTPServer(container)
http_server.listen(8000)
ioloop.IOLoop.instance().start()
|
17b9640949874c4bd547ae09607d5424ea1d2d12
|
partner_communication_switzerland/migrations/12.0.1.1.3/post-migration.py
|
partner_communication_switzerland/migrations/12.0.1.1.3/post-migration.py
|
import logging
from openupgradelib import openupgrade
_logger = logging.getLogger(__name__)
@openupgrade.migrate(use_env=True)
def migrate(env, installed_version):
if not installed_version:
return
# Generate all missing biennials since bug introduced by CS-428
env.cr.execute("""
select c.id
from compassion_child_pictures p join compassion_child c on p.child_id = c.id
where p.create_date > '2021-08-02 12:00:00' and c.sponsor_id is not null
order by p.create_date asc
""")
children = env["compassion.child"].browse([r[0] for r in env.cr.fetchall()])
_logger.info("Generating missing biennials for %s children", len(children))
comm_obj = env["partner.communication.job"]
count = 0
for child in children:
count += 1
existing = comm_obj.search_count([
("config_id", "=", 36), # Biennial config
("partner_id", "=", child.sponsor_id.id),
("date", ">=", "2021-08-02"),
("object_ids", "like", child.id)
])
if not existing:
# This will trigger the biennial communication and the order photo
child.new_photo()
_logger.info("... %s / %s done", count, len(children))
env.cr.commit()
|
import logging
from openupgradelib import openupgrade
_logger = logging.getLogger(__name__)
@openupgrade.migrate(use_env=True)
def migrate(env, installed_version):
if not installed_version:
return
# Generate all missing biennials since bug introduced by CS-428
env.cr.execute("""
select c.id
from compassion_child_pictures p join compassion_child c on p.child_id = c.id
where p.create_date > '2021-08-02 12:00:00' and c.sponsor_id is not null
order by p.create_date asc
""")
children = env["compassion.child"].browse([r[0] for r in env.cr.fetchall()])
_logger.info("Generating missing biennials for %s children", len(children))
comm_obj = env["partner.communication.job"]
count = 0
for child in children:
count += 1
existing = comm_obj.search_count([
("config_id", "=", 36), # Biennial config
("partner_id", "=", child.sponsor_id.id),
("date", ">=", "2021-08-02"),
("object_ids", "like", child.id)
])
if not existing:
# This will trigger the biennial communication and the order photo
child.new_photo()
_logger.info("... %s / %s done", count, len(children))
|
Remove commit after successful migration
|
Remove commit after successful migration
|
Python
|
agpl-3.0
|
CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland
|
import logging
from openupgradelib import openupgrade
_logger = logging.getLogger(__name__)
@openupgrade.migrate(use_env=True)
def migrate(env, installed_version):
if not installed_version:
return
# Generate all missing biennials since bug introduced by CS-428
env.cr.execute("""
select c.id
from compassion_child_pictures p join compassion_child c on p.child_id = c.id
where p.create_date > '2021-08-02 12:00:00' and c.sponsor_id is not null
order by p.create_date asc
""")
children = env["compassion.child"].browse([r[0] for r in env.cr.fetchall()])
_logger.info("Generating missing biennials for %s children", len(children))
comm_obj = env["partner.communication.job"]
count = 0
for child in children:
count += 1
existing = comm_obj.search_count([
("config_id", "=", 36), # Biennial config
("partner_id", "=", child.sponsor_id.id),
("date", ">=", "2021-08-02"),
("object_ids", "like", child.id)
])
if not existing:
# This will trigger the biennial communication and the order photo
child.new_photo()
_logger.info("... %s / %s done", count, len(children))
env.cr.commit()
Remove commit after successful migration
|
import logging
from openupgradelib import openupgrade
_logger = logging.getLogger(__name__)
@openupgrade.migrate(use_env=True)
def migrate(env, installed_version):
if not installed_version:
return
# Generate all missing biennials since bug introduced by CS-428
env.cr.execute("""
select c.id
from compassion_child_pictures p join compassion_child c on p.child_id = c.id
where p.create_date > '2021-08-02 12:00:00' and c.sponsor_id is not null
order by p.create_date asc
""")
children = env["compassion.child"].browse([r[0] for r in env.cr.fetchall()])
_logger.info("Generating missing biennials for %s children", len(children))
comm_obj = env["partner.communication.job"]
count = 0
for child in children:
count += 1
existing = comm_obj.search_count([
("config_id", "=", 36), # Biennial config
("partner_id", "=", child.sponsor_id.id),
("date", ">=", "2021-08-02"),
("object_ids", "like", child.id)
])
if not existing:
# This will trigger the biennial communication and the order photo
child.new_photo()
_logger.info("... %s / %s done", count, len(children))
|
<commit_before>import logging
from openupgradelib import openupgrade
_logger = logging.getLogger(__name__)
@openupgrade.migrate(use_env=True)
def migrate(env, installed_version):
if not installed_version:
return
# Generate all missing biennials since bug introduced by CS-428
env.cr.execute("""
select c.id
from compassion_child_pictures p join compassion_child c on p.child_id = c.id
where p.create_date > '2021-08-02 12:00:00' and c.sponsor_id is not null
order by p.create_date asc
""")
children = env["compassion.child"].browse([r[0] for r in env.cr.fetchall()])
_logger.info("Generating missing biennials for %s children", len(children))
comm_obj = env["partner.communication.job"]
count = 0
for child in children:
count += 1
existing = comm_obj.search_count([
("config_id", "=", 36), # Biennial config
("partner_id", "=", child.sponsor_id.id),
("date", ">=", "2021-08-02"),
("object_ids", "like", child.id)
])
if not existing:
# This will trigger the biennial communication and the order photo
child.new_photo()
_logger.info("... %s / %s done", count, len(children))
env.cr.commit()
<commit_msg>Remove commit after successful migration<commit_after>
|
import logging
from openupgradelib import openupgrade
_logger = logging.getLogger(__name__)
@openupgrade.migrate(use_env=True)
def migrate(env, installed_version):
if not installed_version:
return
# Generate all missing biennials since bug introduced by CS-428
env.cr.execute("""
select c.id
from compassion_child_pictures p join compassion_child c on p.child_id = c.id
where p.create_date > '2021-08-02 12:00:00' and c.sponsor_id is not null
order by p.create_date asc
""")
children = env["compassion.child"].browse([r[0] for r in env.cr.fetchall()])
_logger.info("Generating missing biennials for %s children", len(children))
comm_obj = env["partner.communication.job"]
count = 0
for child in children:
count += 1
existing = comm_obj.search_count([
("config_id", "=", 36), # Biennial config
("partner_id", "=", child.sponsor_id.id),
("date", ">=", "2021-08-02"),
("object_ids", "like", child.id)
])
if not existing:
# This will trigger the biennial communication and the order photo
child.new_photo()
_logger.info("... %s / %s done", count, len(children))
|
import logging
from openupgradelib import openupgrade
_logger = logging.getLogger(__name__)
@openupgrade.migrate(use_env=True)
def migrate(env, installed_version):
if not installed_version:
return
# Generate all missing biennials since bug introduced by CS-428
env.cr.execute("""
select c.id
from compassion_child_pictures p join compassion_child c on p.child_id = c.id
where p.create_date > '2021-08-02 12:00:00' and c.sponsor_id is not null
order by p.create_date asc
""")
children = env["compassion.child"].browse([r[0] for r in env.cr.fetchall()])
_logger.info("Generating missing biennials for %s children", len(children))
comm_obj = env["partner.communication.job"]
count = 0
for child in children:
count += 1
existing = comm_obj.search_count([
("config_id", "=", 36), # Biennial config
("partner_id", "=", child.sponsor_id.id),
("date", ">=", "2021-08-02"),
("object_ids", "like", child.id)
])
if not existing:
# This will trigger the biennial communication and the order photo
child.new_photo()
_logger.info("... %s / %s done", count, len(children))
env.cr.commit()
Remove commit after successful migrationimport logging
from openupgradelib import openupgrade
_logger = logging.getLogger(__name__)
@openupgrade.migrate(use_env=True)
def migrate(env, installed_version):
if not installed_version:
return
# Generate all missing biennials since bug introduced by CS-428
env.cr.execute("""
select c.id
from compassion_child_pictures p join compassion_child c on p.child_id = c.id
where p.create_date > '2021-08-02 12:00:00' and c.sponsor_id is not null
order by p.create_date asc
""")
children = env["compassion.child"].browse([r[0] for r in env.cr.fetchall()])
_logger.info("Generating missing biennials for %s children", len(children))
comm_obj = env["partner.communication.job"]
count = 0
for child in children:
count += 1
existing = comm_obj.search_count([
("config_id", "=", 36), # Biennial config
("partner_id", "=", child.sponsor_id.id),
("date", ">=", "2021-08-02"),
("object_ids", "like", child.id)
])
if not existing:
# This will trigger the biennial communication and the order photo
child.new_photo()
_logger.info("... %s / %s done", count, len(children))
|
<commit_before>import logging
from openupgradelib import openupgrade
_logger = logging.getLogger(__name__)
@openupgrade.migrate(use_env=True)
def migrate(env, installed_version):
if not installed_version:
return
# Generate all missing biennials since bug introduced by CS-428
env.cr.execute("""
select c.id
from compassion_child_pictures p join compassion_child c on p.child_id = c.id
where p.create_date > '2021-08-02 12:00:00' and c.sponsor_id is not null
order by p.create_date asc
""")
children = env["compassion.child"].browse([r[0] for r in env.cr.fetchall()])
_logger.info("Generating missing biennials for %s children", len(children))
comm_obj = env["partner.communication.job"]
count = 0
for child in children:
count += 1
existing = comm_obj.search_count([
("config_id", "=", 36), # Biennial config
("partner_id", "=", child.sponsor_id.id),
("date", ">=", "2021-08-02"),
("object_ids", "like", child.id)
])
if not existing:
# This will trigger the biennial communication and the order photo
child.new_photo()
_logger.info("... %s / %s done", count, len(children))
env.cr.commit()
<commit_msg>Remove commit after successful migration<commit_after>import logging
from openupgradelib import openupgrade
_logger = logging.getLogger(__name__)
@openupgrade.migrate(use_env=True)
def migrate(env, installed_version):
if not installed_version:
return
# Generate all missing biennials since bug introduced by CS-428
env.cr.execute("""
select c.id
from compassion_child_pictures p join compassion_child c on p.child_id = c.id
where p.create_date > '2021-08-02 12:00:00' and c.sponsor_id is not null
order by p.create_date asc
""")
children = env["compassion.child"].browse([r[0] for r in env.cr.fetchall()])
_logger.info("Generating missing biennials for %s children", len(children))
comm_obj = env["partner.communication.job"]
count = 0
for child in children:
count += 1
existing = comm_obj.search_count([
("config_id", "=", 36), # Biennial config
("partner_id", "=", child.sponsor_id.id),
("date", ">=", "2021-08-02"),
("object_ids", "like", child.id)
])
if not existing:
# This will trigger the biennial communication and the order photo
child.new_photo()
_logger.info("... %s / %s done", count, len(children))
|
964ab07575b3f08560356a0f2a0b7950febbb4c7
|
myapp/tests_settings.py
|
myapp/tests_settings.py
|
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
|
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
|
Disable DEBUG on test settings
|
Disable DEBUG on test settings
|
Python
|
bsd-3-clause
|
ikcam/django-skeleton,ikcam/django-skeleton,ikcam/django-skeleton,ikcam/django-skeleton
|
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
Disable DEBUG on test settings
|
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
|
<commit_before>DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
<commit_msg>Disable DEBUG on test settings<commit_after>
|
DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
|
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
Disable DEBUG on test settingsDEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
|
<commit_before>DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
<commit_msg>Disable DEBUG on test settings<commit_after>DEBUG = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
|
b345f3667baaa6fa80bfa8eac6c498af2c3037be
|
tests/log_tests.py
|
tests/log_tests.py
|
try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch
from lighthouse import log
class LogTests(unittest.TestCase):
@patch("lighthouse.log.CLIHandler")
@patch("lighthouse.log.logging")
def test_setup_adds_handler_to_root_logger(self, mock_logging, CLIHandler):
log.setup()
mock_logging.getLogger.assert_called_once_with()
mock_logging.getLogger.return_value.addHandler.assert_called_once_with(
CLIHandler()
)
|
try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch
from lighthouse import log
class LogTests(unittest.TestCase):
@patch("lighthouse.log.CLIHandler")
@patch("lighthouse.log.logging")
def test_setup_adds_handler_to_root_logger(self, mock_logging, CLIHandler):
log.setup("foobar")
mock_logging.getLogger.assert_called_once_with()
mock_logging.getLogger.return_value.addHandler.assert_called_once_with(
CLIHandler()
)
|
Fix test broken in previous patch.
|
Fix test broken in previous patch.
|
Python
|
apache-2.0
|
wglass/lighthouse
|
try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch
from lighthouse import log
class LogTests(unittest.TestCase):
@patch("lighthouse.log.CLIHandler")
@patch("lighthouse.log.logging")
def test_setup_adds_handler_to_root_logger(self, mock_logging, CLIHandler):
log.setup()
mock_logging.getLogger.assert_called_once_with()
mock_logging.getLogger.return_value.addHandler.assert_called_once_with(
CLIHandler()
)
Fix test broken in previous patch.
|
try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch
from lighthouse import log
class LogTests(unittest.TestCase):
@patch("lighthouse.log.CLIHandler")
@patch("lighthouse.log.logging")
def test_setup_adds_handler_to_root_logger(self, mock_logging, CLIHandler):
log.setup("foobar")
mock_logging.getLogger.assert_called_once_with()
mock_logging.getLogger.return_value.addHandler.assert_called_once_with(
CLIHandler()
)
|
<commit_before>try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch
from lighthouse import log
class LogTests(unittest.TestCase):
@patch("lighthouse.log.CLIHandler")
@patch("lighthouse.log.logging")
def test_setup_adds_handler_to_root_logger(self, mock_logging, CLIHandler):
log.setup()
mock_logging.getLogger.assert_called_once_with()
mock_logging.getLogger.return_value.addHandler.assert_called_once_with(
CLIHandler()
)
<commit_msg>Fix test broken in previous patch.<commit_after>
|
try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch
from lighthouse import log
class LogTests(unittest.TestCase):
@patch("lighthouse.log.CLIHandler")
@patch("lighthouse.log.logging")
def test_setup_adds_handler_to_root_logger(self, mock_logging, CLIHandler):
log.setup("foobar")
mock_logging.getLogger.assert_called_once_with()
mock_logging.getLogger.return_value.addHandler.assert_called_once_with(
CLIHandler()
)
|
try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch
from lighthouse import log
class LogTests(unittest.TestCase):
@patch("lighthouse.log.CLIHandler")
@patch("lighthouse.log.logging")
def test_setup_adds_handler_to_root_logger(self, mock_logging, CLIHandler):
log.setup()
mock_logging.getLogger.assert_called_once_with()
mock_logging.getLogger.return_value.addHandler.assert_called_once_with(
CLIHandler()
)
Fix test broken in previous patch.try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch
from lighthouse import log
class LogTests(unittest.TestCase):
@patch("lighthouse.log.CLIHandler")
@patch("lighthouse.log.logging")
def test_setup_adds_handler_to_root_logger(self, mock_logging, CLIHandler):
log.setup("foobar")
mock_logging.getLogger.assert_called_once_with()
mock_logging.getLogger.return_value.addHandler.assert_called_once_with(
CLIHandler()
)
|
<commit_before>try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch
from lighthouse import log
class LogTests(unittest.TestCase):
@patch("lighthouse.log.CLIHandler")
@patch("lighthouse.log.logging")
def test_setup_adds_handler_to_root_logger(self, mock_logging, CLIHandler):
log.setup()
mock_logging.getLogger.assert_called_once_with()
mock_logging.getLogger.return_value.addHandler.assert_called_once_with(
CLIHandler()
)
<commit_msg>Fix test broken in previous patch.<commit_after>try:
import unittest2 as unittest
except ImportError:
import unittest
from mock import patch
from lighthouse import log
class LogTests(unittest.TestCase):
@patch("lighthouse.log.CLIHandler")
@patch("lighthouse.log.logging")
def test_setup_adds_handler_to_root_logger(self, mock_logging, CLIHandler):
log.setup("foobar")
mock_logging.getLogger.assert_called_once_with()
mock_logging.getLogger.return_value.addHandler.assert_called_once_with(
CLIHandler()
)
|
0356392b2933aa7c02f89bdf588a4ec0482db4a8
|
tests/main_test.py
|
tests/main_test.py
|
#!/usr/bin/env python3
from libpals.util import xor_find_singlechar_key
def test_xor_find_singlechar_key():
input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
ciphertext = bytes.fromhex(input)
result = xor_find_singlechar_key(ciphertext)
assert result['key'] == 88
assert result['plaintext'] == b"Cooking MC's like a pound of bacon"
|
#!/usr/bin/env python3
from libpals.util import xor_find_singlechar_key, hamming_distance
def test_xor_find_singlechar_key():
input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
ciphertext = bytes.fromhex(input)
result = xor_find_singlechar_key(ciphertext)
assert result['key'] == 88
assert result['plaintext'] == b"Cooking MC's like a pound of bacon"
def test_hamming_distance():
assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37
|
Add a test for hamming_distance()
|
Add a test for hamming_distance()
|
Python
|
bsd-2-clause
|
cpach/cryptopals-python3
|
#!/usr/bin/env python3
from libpals.util import xor_find_singlechar_key
def test_xor_find_singlechar_key():
input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
ciphertext = bytes.fromhex(input)
result = xor_find_singlechar_key(ciphertext)
assert result['key'] == 88
assert result['plaintext'] == b"Cooking MC's like a pound of bacon"
Add a test for hamming_distance()
|
#!/usr/bin/env python3
from libpals.util import xor_find_singlechar_key, hamming_distance
def test_xor_find_singlechar_key():
input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
ciphertext = bytes.fromhex(input)
result = xor_find_singlechar_key(ciphertext)
assert result['key'] == 88
assert result['plaintext'] == b"Cooking MC's like a pound of bacon"
def test_hamming_distance():
assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37
|
<commit_before>#!/usr/bin/env python3
from libpals.util import xor_find_singlechar_key
def test_xor_find_singlechar_key():
input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
ciphertext = bytes.fromhex(input)
result = xor_find_singlechar_key(ciphertext)
assert result['key'] == 88
assert result['plaintext'] == b"Cooking MC's like a pound of bacon"
<commit_msg>Add a test for hamming_distance()<commit_after>
|
#!/usr/bin/env python3
from libpals.util import xor_find_singlechar_key, hamming_distance
def test_xor_find_singlechar_key():
input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
ciphertext = bytes.fromhex(input)
result = xor_find_singlechar_key(ciphertext)
assert result['key'] == 88
assert result['plaintext'] == b"Cooking MC's like a pound of bacon"
def test_hamming_distance():
assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37
|
#!/usr/bin/env python3
from libpals.util import xor_find_singlechar_key
def test_xor_find_singlechar_key():
input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
ciphertext = bytes.fromhex(input)
result = xor_find_singlechar_key(ciphertext)
assert result['key'] == 88
assert result['plaintext'] == b"Cooking MC's like a pound of bacon"
Add a test for hamming_distance()#!/usr/bin/env python3
from libpals.util import xor_find_singlechar_key, hamming_distance
def test_xor_find_singlechar_key():
input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
ciphertext = bytes.fromhex(input)
result = xor_find_singlechar_key(ciphertext)
assert result['key'] == 88
assert result['plaintext'] == b"Cooking MC's like a pound of bacon"
def test_hamming_distance():
assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37
|
<commit_before>#!/usr/bin/env python3
from libpals.util import xor_find_singlechar_key
def test_xor_find_singlechar_key():
input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
ciphertext = bytes.fromhex(input)
result = xor_find_singlechar_key(ciphertext)
assert result['key'] == 88
assert result['plaintext'] == b"Cooking MC's like a pound of bacon"
<commit_msg>Add a test for hamming_distance()<commit_after>#!/usr/bin/env python3
from libpals.util import xor_find_singlechar_key, hamming_distance
def test_xor_find_singlechar_key():
input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
ciphertext = bytes.fromhex(input)
result = xor_find_singlechar_key(ciphertext)
assert result['key'] == 88
assert result['plaintext'] == b"Cooking MC's like a pound of bacon"
def test_hamming_distance():
assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37
|
bca6a06f6035e7a10c9726ef40e7aed4b4b7ee34
|
tests/test_init.py
|
tests/test_init.py
|
# archivebox init
# archivebox add
import os
import subprocess
from pathlib import Path
import json
from .fixtures import *
def test_init(tmp_path, process):
assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
def test_update(tmp_path, process):
os.chdir(tmp_path)
update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
def test_add_link(tmp_path, process):
os.chdir(tmp_path)
add_process = subprocess.run(['archivebox', 'add', 'http://example.com'], capture_output=True)
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
assert "index.json" in [x.name for x in archived_item_path.iterdir()]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert "IANA — IANA-managed Reserved Domains" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "IANA — IANA-managed Reserved Domains" in output_html
|
# archivebox init
# archivebox add
import os
import subprocess
from pathlib import Path
import json
from .fixtures import *
def test_init(tmp_path, process):
assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
def test_update(tmp_path, process):
os.chdir(tmp_path)
update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
def test_add_link(tmp_path, process):
os.chdir(tmp_path)
add_process = subprocess.run(['archivebox', 'add', 'http://example.com'], capture_output=True)
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
assert "index.json" in [x.name for x in archived_item_path.iterdir()]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert "Example Domain" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "Example Domain" in output_html
|
Fix test to reflect new API changes
|
test: Fix test to reflect new API changes
|
Python
|
mit
|
pirate/bookmark-archiver,pirate/bookmark-archiver,pirate/bookmark-archiver
|
# archivebox init
# archivebox add
import os
import subprocess
from pathlib import Path
import json
from .fixtures import *
def test_init(tmp_path, process):
assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
def test_update(tmp_path, process):
os.chdir(tmp_path)
update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
def test_add_link(tmp_path, process):
os.chdir(tmp_path)
add_process = subprocess.run(['archivebox', 'add', 'http://example.com'], capture_output=True)
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
assert "index.json" in [x.name for x in archived_item_path.iterdir()]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert "IANA — IANA-managed Reserved Domains" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "IANA — IANA-managed Reserved Domains" in output_html
test: Fix test to reflect new API changes
|
# archivebox init
# archivebox add
import os
import subprocess
from pathlib import Path
import json
from .fixtures import *
def test_init(tmp_path, process):
assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
def test_update(tmp_path, process):
os.chdir(tmp_path)
update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
def test_add_link(tmp_path, process):
os.chdir(tmp_path)
add_process = subprocess.run(['archivebox', 'add', 'http://example.com'], capture_output=True)
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
assert "index.json" in [x.name for x in archived_item_path.iterdir()]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert "Example Domain" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "Example Domain" in output_html
|
<commit_before># archivebox init
# archivebox add
import os
import subprocess
from pathlib import Path
import json
from .fixtures import *
def test_init(tmp_path, process):
assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
def test_update(tmp_path, process):
os.chdir(tmp_path)
update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
def test_add_link(tmp_path, process):
os.chdir(tmp_path)
add_process = subprocess.run(['archivebox', 'add', 'http://example.com'], capture_output=True)
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
assert "index.json" in [x.name for x in archived_item_path.iterdir()]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert "IANA — IANA-managed Reserved Domains" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "IANA — IANA-managed Reserved Domains" in output_html
<commit_msg>test: Fix test to reflect new API changes<commit_after>
|
# archivebox init
# archivebox add
import os
import subprocess
from pathlib import Path
import json
from .fixtures import *
def test_init(tmp_path, process):
assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
def test_update(tmp_path, process):
os.chdir(tmp_path)
update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
def test_add_link(tmp_path, process):
os.chdir(tmp_path)
add_process = subprocess.run(['archivebox', 'add', 'http://example.com'], capture_output=True)
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
assert "index.json" in [x.name for x in archived_item_path.iterdir()]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert "Example Domain" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "Example Domain" in output_html
|
# archivebox init
# archivebox add
import os
import subprocess
from pathlib import Path
import json
from .fixtures import *
def test_init(tmp_path, process):
assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
def test_update(tmp_path, process):
os.chdir(tmp_path)
update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
def test_add_link(tmp_path, process):
os.chdir(tmp_path)
add_process = subprocess.run(['archivebox', 'add', 'http://example.com'], capture_output=True)
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
assert "index.json" in [x.name for x in archived_item_path.iterdir()]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert "IANA — IANA-managed Reserved Domains" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "IANA — IANA-managed Reserved Domains" in output_html
test: Fix test to reflect new API changes# archivebox init
# archivebox add
import os
import subprocess
from pathlib import Path
import json
from .fixtures import *
def test_init(tmp_path, process):
assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
def test_update(tmp_path, process):
os.chdir(tmp_path)
update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
def test_add_link(tmp_path, process):
os.chdir(tmp_path)
add_process = subprocess.run(['archivebox', 'add', 'http://example.com'], capture_output=True)
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
assert "index.json" in [x.name for x in archived_item_path.iterdir()]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert "Example Domain" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "Example Domain" in output_html
|
<commit_before># archivebox init
# archivebox add
import os
import subprocess
from pathlib import Path
import json
from .fixtures import *
def test_init(tmp_path, process):
assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
def test_update(tmp_path, process):
os.chdir(tmp_path)
update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
def test_add_link(tmp_path, process):
os.chdir(tmp_path)
add_process = subprocess.run(['archivebox', 'add', 'http://example.com'], capture_output=True)
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
assert "index.json" in [x.name for x in archived_item_path.iterdir()]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert "IANA — IANA-managed Reserved Domains" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "IANA — IANA-managed Reserved Domains" in output_html
<commit_msg>test: Fix test to reflect new API changes<commit_after># archivebox init
# archivebox add
import os
import subprocess
from pathlib import Path
import json
from .fixtures import *
def test_init(tmp_path, process):
assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
def test_update(tmp_path, process):
os.chdir(tmp_path)
update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
def test_add_link(tmp_path, process):
os.chdir(tmp_path)
add_process = subprocess.run(['archivebox', 'add', 'http://example.com'], capture_output=True)
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
assert "index.json" in [x.name for x in archived_item_path.iterdir()]
with open(archived_item_path / "index.json", "r") as f:
output_json = json.load(f)
assert "Example Domain" == output_json['history']['title'][0]['output']
with open(tmp_path / "index.html", "r") as f:
output_html = f.read()
assert "Example Domain" in output_html
|
d0d4491828942d22a50ee80110f38c54a1b5c301
|
services/disqus.py
|
services/disqus.py
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'write data on your behalf'),
('admin', 'moderate your forums'),
]
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'read and write data on your behalf'),
('admin', 'read and write data on your behalf and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
Rewrite Disqus to use the new scope selection system
|
Rewrite Disqus to use the new scope selection system
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'write data on your behalf'),
('admin', 'moderate your forums'),
]
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
Rewrite Disqus to use the new scope selection system
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'read and write data on your behalf'),
('admin', 'read and write data on your behalf and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
<commit_before>from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'write data on your behalf'),
('admin', 'moderate your forums'),
]
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
<commit_msg>Rewrite Disqus to use the new scope selection system<commit_after>
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'read and write data on your behalf'),
('admin', 'read and write data on your behalf and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'write data on your behalf'),
('admin', 'moderate your forums'),
]
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
Rewrite Disqus to use the new scope selection systemfrom oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'read and write data on your behalf'),
('admin', 'read and write data on your behalf and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
<commit_before>from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'write data on your behalf'),
('admin', 'moderate your forums'),
]
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
<commit_msg>Rewrite Disqus to use the new scope selection system<commit_after>from oauthlib.oauth2.draft25 import utils
import foauth.providers
def token_uri(service, token, r):
params = [((u'access_token', token)), ((u'api_key', service.client_id))]
r.url = utils.add_params_to_uri(r.url, params)
return r
class Disqus(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'http://disqus.com/'
docs_url = 'http://disqus.com/api/docs/'
category = 'Social'
# URLs to interact with the API
authorize_url = 'https://disqus.com/api/oauth/2.0/authorize/'
access_token_url = 'https://disqus.com/api/oauth/2.0/access_token/'
api_domain = 'disqus.com'
available_permissions = [
(None, 'read data on your behalf'),
('write', 'read and write data on your behalf'),
('admin', 'read and write data on your behalf and moderate your forums'),
]
permissions_widget = 'radio'
bearer_type = token_uri
def get_scope_string(self, scopes):
# Disqus doesn't follow the spec on this point
return ','.join(scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/api/3.0/users/details.json')
return r.json[u'response'][u'id']
|
c530ea901c374fef97390260e66492f37fc90a3f
|
setman/__init__.py
|
setman/__init__.py
|
from setman.lazy import LazySettings
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
|
try:
from setman.lazy import LazySettings
except ImportError:
# Do not care about "Settings cannot be imported, because environment
# variable DJANGO_SETTINGS_MODULE is undefined." errors
LazySettings = type('LazySettings', (object, ), {})
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
|
Fix installing ``django-setman`` via PIP.
|
Fix installing ``django-setman`` via PIP.
|
Python
|
bsd-3-clause
|
playpauseandstop/setman,owais/django-setman,owais/django-setman
|
from setman.lazy import LazySettings
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
Fix installing ``django-setman`` via PIP.
|
try:
from setman.lazy import LazySettings
except ImportError:
# Do not care about "Settings cannot be imported, because environment
# variable DJANGO_SETTINGS_MODULE is undefined." errors
LazySettings = type('LazySettings', (object, ), {})
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
|
<commit_before>from setman.lazy import LazySettings
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
<commit_msg>Fix installing ``django-setman`` via PIP.<commit_after>
|
try:
from setman.lazy import LazySettings
except ImportError:
# Do not care about "Settings cannot be imported, because environment
# variable DJANGO_SETTINGS_MODULE is undefined." errors
LazySettings = type('LazySettings', (object, ), {})
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
|
from setman.lazy import LazySettings
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
Fix installing ``django-setman`` via PIP.try:
from setman.lazy import LazySettings
except ImportError:
# Do not care about "Settings cannot be imported, because environment
# variable DJANGO_SETTINGS_MODULE is undefined." errors
LazySettings = type('LazySettings', (object, ), {})
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
|
<commit_before>from setman.lazy import LazySettings
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
<commit_msg>Fix installing ``django-setman`` via PIP.<commit_after>try:
from setman.lazy import LazySettings
except ImportError:
# Do not care about "Settings cannot be imported, because environment
# variable DJANGO_SETTINGS_MODULE is undefined." errors
LazySettings = type('LazySettings', (object, ), {})
__all__ = ('get_version', 'settings')
VERSION = (0, 1, 'beta')
settings = LazySettings()
def get_version(version=None):
"""
Return setman version number in human readable form.
You could call this function without args and in this case value from
``setman.VERSION`` would be used.
"""
version = version or VERSION
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
return '%d.%d.%d' % version
return '%d.%d-%s' % version
return '%d.%d' % version[:2]
|
0fd947625a0420970c7ed95114f73215d90de532
|
nimbus/apps/api/urls.py
|
nimbus/apps/api/urls.py
|
from django.conf.urls import url, include
from nimbus.apps import debug_urls
from . import views
urlpatterns = debug_urls()
urlpatterns += [
url(r"^$", views.api_root, name="api_root"),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth$', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r"^media$", views.MediaList.as_view(), name="media_list"),
url(r"^media/filter_media_type/(?P<media_type>[A-Z]+)", views.TypeFilteredMediaList.as_view(), name="filter_media_api"),
url(r"^media/show$", views.MediaDetail.as_view(), name="media_detail"),
url(r"^media/add_file$", views.AddFile.as_view(), name="add_file"),
url(r"^media/add_link$", views.AddLink.as_view(), name="add_link"),
url(r"^media/delete$", views.delete_media, name="delete_media"),
]
|
from django.conf.urls import url, include
from nimbus.apps import debug_urls
from . import views
urlpatterns = debug_urls()
urlpatterns += [
url(r"^$", views.api_root, name="api_root"),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth$', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r"^media/list$", views.MediaList.as_view(), name="media_list"),
url(r"^media/filter_media_type/(?P<media_type>[A-Z]+)", views.TypeFilteredMediaList.as_view(), name="filter_media_api"),
url(r"^media/show$", views.MediaDetail.as_view(), name="media_detail"),
url(r"^media/add_file$", views.AddFile.as_view(), name="add_file"),
url(r"^media/add_link$", views.AddLink.as_view(), name="add_link"),
url(r"^media/delete$", views.delete_media, name="delete_media"),
]
|
Change url for media list api
|
Change url for media list api
|
Python
|
mit
|
ethanal/Nimbus,ethanal/Nimbus,ethanal/Nimbus,ethanal/Nimbus
|
from django.conf.urls import url, include
from nimbus.apps import debug_urls
from . import views
urlpatterns = debug_urls()
urlpatterns += [
url(r"^$", views.api_root, name="api_root"),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth$', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r"^media$", views.MediaList.as_view(), name="media_list"),
url(r"^media/filter_media_type/(?P<media_type>[A-Z]+)", views.TypeFilteredMediaList.as_view(), name="filter_media_api"),
url(r"^media/show$", views.MediaDetail.as_view(), name="media_detail"),
url(r"^media/add_file$", views.AddFile.as_view(), name="add_file"),
url(r"^media/add_link$", views.AddLink.as_view(), name="add_link"),
url(r"^media/delete$", views.delete_media, name="delete_media"),
]
Change url for media list api
|
from django.conf.urls import url, include
from nimbus.apps import debug_urls
from . import views
urlpatterns = debug_urls()
urlpatterns += [
url(r"^$", views.api_root, name="api_root"),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth$', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r"^media/list$", views.MediaList.as_view(), name="media_list"),
url(r"^media/filter_media_type/(?P<media_type>[A-Z]+)", views.TypeFilteredMediaList.as_view(), name="filter_media_api"),
url(r"^media/show$", views.MediaDetail.as_view(), name="media_detail"),
url(r"^media/add_file$", views.AddFile.as_view(), name="add_file"),
url(r"^media/add_link$", views.AddLink.as_view(), name="add_link"),
url(r"^media/delete$", views.delete_media, name="delete_media"),
]
|
<commit_before>from django.conf.urls import url, include
from nimbus.apps import debug_urls
from . import views
urlpatterns = debug_urls()
urlpatterns += [
url(r"^$", views.api_root, name="api_root"),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth$', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r"^media$", views.MediaList.as_view(), name="media_list"),
url(r"^media/filter_media_type/(?P<media_type>[A-Z]+)", views.TypeFilteredMediaList.as_view(), name="filter_media_api"),
url(r"^media/show$", views.MediaDetail.as_view(), name="media_detail"),
url(r"^media/add_file$", views.AddFile.as_view(), name="add_file"),
url(r"^media/add_link$", views.AddLink.as_view(), name="add_link"),
url(r"^media/delete$", views.delete_media, name="delete_media"),
]
<commit_msg>Change url for media list api<commit_after>
|
from django.conf.urls import url, include
from nimbus.apps import debug_urls
from . import views
urlpatterns = debug_urls()
urlpatterns += [
url(r"^$", views.api_root, name="api_root"),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth$', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r"^media/list$", views.MediaList.as_view(), name="media_list"),
url(r"^media/filter_media_type/(?P<media_type>[A-Z]+)", views.TypeFilteredMediaList.as_view(), name="filter_media_api"),
url(r"^media/show$", views.MediaDetail.as_view(), name="media_detail"),
url(r"^media/add_file$", views.AddFile.as_view(), name="add_file"),
url(r"^media/add_link$", views.AddLink.as_view(), name="add_link"),
url(r"^media/delete$", views.delete_media, name="delete_media"),
]
|
from django.conf.urls import url, include
from nimbus.apps import debug_urls
from . import views
urlpatterns = debug_urls()
urlpatterns += [
url(r"^$", views.api_root, name="api_root"),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth$', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r"^media$", views.MediaList.as_view(), name="media_list"),
url(r"^media/filter_media_type/(?P<media_type>[A-Z]+)", views.TypeFilteredMediaList.as_view(), name="filter_media_api"),
url(r"^media/show$", views.MediaDetail.as_view(), name="media_detail"),
url(r"^media/add_file$", views.AddFile.as_view(), name="add_file"),
url(r"^media/add_link$", views.AddLink.as_view(), name="add_link"),
url(r"^media/delete$", views.delete_media, name="delete_media"),
]
Change url for media list apifrom django.conf.urls import url, include
from nimbus.apps import debug_urls
from . import views
urlpatterns = debug_urls()
urlpatterns += [
url(r"^$", views.api_root, name="api_root"),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth$', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r"^media/list$", views.MediaList.as_view(), name="media_list"),
url(r"^media/filter_media_type/(?P<media_type>[A-Z]+)", views.TypeFilteredMediaList.as_view(), name="filter_media_api"),
url(r"^media/show$", views.MediaDetail.as_view(), name="media_detail"),
url(r"^media/add_file$", views.AddFile.as_view(), name="add_file"),
url(r"^media/add_link$", views.AddLink.as_view(), name="add_link"),
url(r"^media/delete$", views.delete_media, name="delete_media"),
]
|
<commit_before>from django.conf.urls import url, include
from nimbus.apps import debug_urls
from . import views
urlpatterns = debug_urls()
urlpatterns += [
url(r"^$", views.api_root, name="api_root"),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth$', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r"^media$", views.MediaList.as_view(), name="media_list"),
url(r"^media/filter_media_type/(?P<media_type>[A-Z]+)", views.TypeFilteredMediaList.as_view(), name="filter_media_api"),
url(r"^media/show$", views.MediaDetail.as_view(), name="media_detail"),
url(r"^media/add_file$", views.AddFile.as_view(), name="add_file"),
url(r"^media/add_link$", views.AddLink.as_view(), name="add_link"),
url(r"^media/delete$", views.delete_media, name="delete_media"),
]
<commit_msg>Change url for media list api<commit_after>from django.conf.urls import url, include
from nimbus.apps import debug_urls
from . import views
urlpatterns = debug_urls()
urlpatterns += [
url(r"^$", views.api_root, name="api_root"),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth$', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r"^media/list$", views.MediaList.as_view(), name="media_list"),
url(r"^media/filter_media_type/(?P<media_type>[A-Z]+)", views.TypeFilteredMediaList.as_view(), name="filter_media_api"),
url(r"^media/show$", views.MediaDetail.as_view(), name="media_detail"),
url(r"^media/add_file$", views.AddFile.as_view(), name="add_file"),
url(r"^media/add_link$", views.AddLink.as_view(), name="add_link"),
url(r"^media/delete$", views.delete_media, name="delete_media"),
]
|
61ca58480a16c1300906932151a986b2f8fd3c79
|
nuts/src/application/TestController.py
|
nuts/src/application/TestController.py
|
from src.data.TestSuite import TestSuite
from src.service.FileHandler import FileHandler
from src.service.Runner import Runner
from src.service.Evaluator import Evaluator
from src.service.salt_api_wrapper import SaltApi
class TestController:
def __init__(self, test_file, max_iterations):
self.test_suite = TestSuite("SuiteName")
self.test_file = test_file
self.file_handler = FileHandler(self.test_suite, self.test_file)
self.runner = Runner(self.test_suite, SaltApi(), max_iterations=max_iterations)
self.evaluator = Evaluator(self.test_suite)
def logic(self):
self.file_handler.read_file(self.test_file)
self.run_tests()
def run_tests(self):
self.test_suite.print_all_test_cases()
self.runner.run_all()
self.evaluator.validate_all_results()
if(self.test_suite.has_failed_tests()):
if(self.check_re_run_failed_tests()):
self.re_run_failed_tests()
def re_run_failed_tests(self):
self.test_suite.prepare_re_run()
self.run_tests()
def check_re_run_failed_tests(self):
input_var = raw_input('Do you want to re-run the failed tests? \n' +
'Enter yes or y \n')
return input_var.lower() == 'yes' or input_var.lower() == 'y'
|
from src.data.TestSuite import TestSuite
from src.service.FileHandler import FileHandler
from src.service.Runner import Runner
from src.service.Evaluator import Evaluator
from src.service.salt_api_wrapper import SaltApi
class TestController:
def __init__(self, test_file, max_iterations=25):
self.test_suite = TestSuite("SuiteName")
self.test_file = test_file
self.file_handler = FileHandler(self.test_suite, self.test_file)
self.runner = Runner(self.test_suite, SaltApi(), max_iterations=max_iterations)
self.evaluator = Evaluator(self.test_suite)
def logic(self):
self.file_handler.read_file(self.test_file)
self.run_tests()
def run_tests(self):
self.test_suite.print_all_test_cases()
self.runner.run_all()
self.evaluator.validate_all_results()
if(self.test_suite.has_failed_tests()):
if(self.check_re_run_failed_tests()):
self.re_run_failed_tests()
def re_run_failed_tests(self):
self.test_suite.prepare_re_run()
self.run_tests()
def check_re_run_failed_tests(self):
input_var = raw_input('Do you want to re-run the failed tests? \n' +
'Enter yes or y \n')
return input_var.lower() == 'yes' or input_var.lower() == 'y'
|
Set default-value in the testcontroller
|
Set default-value in the testcontroller
|
Python
|
mit
|
HSRNetwork/Nuts
|
from src.data.TestSuite import TestSuite
from src.service.FileHandler import FileHandler
from src.service.Runner import Runner
from src.service.Evaluator import Evaluator
from src.service.salt_api_wrapper import SaltApi
class TestController:
def __init__(self, test_file, max_iterations):
self.test_suite = TestSuite("SuiteName")
self.test_file = test_file
self.file_handler = FileHandler(self.test_suite, self.test_file)
self.runner = Runner(self.test_suite, SaltApi(), max_iterations=max_iterations)
self.evaluator = Evaluator(self.test_suite)
def logic(self):
self.file_handler.read_file(self.test_file)
self.run_tests()
def run_tests(self):
self.test_suite.print_all_test_cases()
self.runner.run_all()
self.evaluator.validate_all_results()
if(self.test_suite.has_failed_tests()):
if(self.check_re_run_failed_tests()):
self.re_run_failed_tests()
def re_run_failed_tests(self):
self.test_suite.prepare_re_run()
self.run_tests()
def check_re_run_failed_tests(self):
input_var = raw_input('Do you want to re-run the failed tests? \n' +
'Enter yes or y \n')
return input_var.lower() == 'yes' or input_var.lower() == 'y'
Set default-value in the testcontroller
|
from src.data.TestSuite import TestSuite
from src.service.FileHandler import FileHandler
from src.service.Runner import Runner
from src.service.Evaluator import Evaluator
from src.service.salt_api_wrapper import SaltApi
class TestController:
def __init__(self, test_file, max_iterations=25):
self.test_suite = TestSuite("SuiteName")
self.test_file = test_file
self.file_handler = FileHandler(self.test_suite, self.test_file)
self.runner = Runner(self.test_suite, SaltApi(), max_iterations=max_iterations)
self.evaluator = Evaluator(self.test_suite)
def logic(self):
self.file_handler.read_file(self.test_file)
self.run_tests()
def run_tests(self):
self.test_suite.print_all_test_cases()
self.runner.run_all()
self.evaluator.validate_all_results()
if(self.test_suite.has_failed_tests()):
if(self.check_re_run_failed_tests()):
self.re_run_failed_tests()
def re_run_failed_tests(self):
self.test_suite.prepare_re_run()
self.run_tests()
def check_re_run_failed_tests(self):
input_var = raw_input('Do you want to re-run the failed tests? \n' +
'Enter yes or y \n')
return input_var.lower() == 'yes' or input_var.lower() == 'y'
|
<commit_before>from src.data.TestSuite import TestSuite
from src.service.FileHandler import FileHandler
from src.service.Runner import Runner
from src.service.Evaluator import Evaluator
from src.service.salt_api_wrapper import SaltApi
class TestController:
def __init__(self, test_file, max_iterations):
self.test_suite = TestSuite("SuiteName")
self.test_file = test_file
self.file_handler = FileHandler(self.test_suite, self.test_file)
self.runner = Runner(self.test_suite, SaltApi(), max_iterations=max_iterations)
self.evaluator = Evaluator(self.test_suite)
def logic(self):
self.file_handler.read_file(self.test_file)
self.run_tests()
def run_tests(self):
self.test_suite.print_all_test_cases()
self.runner.run_all()
self.evaluator.validate_all_results()
if(self.test_suite.has_failed_tests()):
if(self.check_re_run_failed_tests()):
self.re_run_failed_tests()
def re_run_failed_tests(self):
self.test_suite.prepare_re_run()
self.run_tests()
def check_re_run_failed_tests(self):
input_var = raw_input('Do you want to re-run the failed tests? \n' +
'Enter yes or y \n')
return input_var.lower() == 'yes' or input_var.lower() == 'y'
<commit_msg>Set default-value in the testcontroller<commit_after>
|
from src.data.TestSuite import TestSuite
from src.service.FileHandler import FileHandler
from src.service.Runner import Runner
from src.service.Evaluator import Evaluator
from src.service.salt_api_wrapper import SaltApi
class TestController:
def __init__(self, test_file, max_iterations=25):
self.test_suite = TestSuite("SuiteName")
self.test_file = test_file
self.file_handler = FileHandler(self.test_suite, self.test_file)
self.runner = Runner(self.test_suite, SaltApi(), max_iterations=max_iterations)
self.evaluator = Evaluator(self.test_suite)
def logic(self):
self.file_handler.read_file(self.test_file)
self.run_tests()
def run_tests(self):
self.test_suite.print_all_test_cases()
self.runner.run_all()
self.evaluator.validate_all_results()
if(self.test_suite.has_failed_tests()):
if(self.check_re_run_failed_tests()):
self.re_run_failed_tests()
def re_run_failed_tests(self):
self.test_suite.prepare_re_run()
self.run_tests()
def check_re_run_failed_tests(self):
input_var = raw_input('Do you want to re-run the failed tests? \n' +
'Enter yes or y \n')
return input_var.lower() == 'yes' or input_var.lower() == 'y'
|
from src.data.TestSuite import TestSuite
from src.service.FileHandler import FileHandler
from src.service.Runner import Runner
from src.service.Evaluator import Evaluator
from src.service.salt_api_wrapper import SaltApi
class TestController:
def __init__(self, test_file, max_iterations):
self.test_suite = TestSuite("SuiteName")
self.test_file = test_file
self.file_handler = FileHandler(self.test_suite, self.test_file)
self.runner = Runner(self.test_suite, SaltApi(), max_iterations=max_iterations)
self.evaluator = Evaluator(self.test_suite)
def logic(self):
self.file_handler.read_file(self.test_file)
self.run_tests()
def run_tests(self):
self.test_suite.print_all_test_cases()
self.runner.run_all()
self.evaluator.validate_all_results()
if(self.test_suite.has_failed_tests()):
if(self.check_re_run_failed_tests()):
self.re_run_failed_tests()
def re_run_failed_tests(self):
self.test_suite.prepare_re_run()
self.run_tests()
def check_re_run_failed_tests(self):
input_var = raw_input('Do you want to re-run the failed tests? \n' +
'Enter yes or y \n')
return input_var.lower() == 'yes' or input_var.lower() == 'y'
Set default-value in the testcontrollerfrom src.data.TestSuite import TestSuite
from src.service.FileHandler import FileHandler
from src.service.Runner import Runner
from src.service.Evaluator import Evaluator
from src.service.salt_api_wrapper import SaltApi
class TestController:
def __init__(self, test_file, max_iterations=25):
self.test_suite = TestSuite("SuiteName")
self.test_file = test_file
self.file_handler = FileHandler(self.test_suite, self.test_file)
self.runner = Runner(self.test_suite, SaltApi(), max_iterations=max_iterations)
self.evaluator = Evaluator(self.test_suite)
def logic(self):
self.file_handler.read_file(self.test_file)
self.run_tests()
def run_tests(self):
self.test_suite.print_all_test_cases()
self.runner.run_all()
self.evaluator.validate_all_results()
if(self.test_suite.has_failed_tests()):
if(self.check_re_run_failed_tests()):
self.re_run_failed_tests()
def re_run_failed_tests(self):
self.test_suite.prepare_re_run()
self.run_tests()
def check_re_run_failed_tests(self):
input_var = raw_input('Do you want to re-run the failed tests? \n' +
'Enter yes or y \n')
return input_var.lower() == 'yes' or input_var.lower() == 'y'
|
<commit_before>from src.data.TestSuite import TestSuite
from src.service.FileHandler import FileHandler
from src.service.Runner import Runner
from src.service.Evaluator import Evaluator
from src.service.salt_api_wrapper import SaltApi
class TestController:
def __init__(self, test_file, max_iterations):
self.test_suite = TestSuite("SuiteName")
self.test_file = test_file
self.file_handler = FileHandler(self.test_suite, self.test_file)
self.runner = Runner(self.test_suite, SaltApi(), max_iterations=max_iterations)
self.evaluator = Evaluator(self.test_suite)
def logic(self):
self.file_handler.read_file(self.test_file)
self.run_tests()
def run_tests(self):
self.test_suite.print_all_test_cases()
self.runner.run_all()
self.evaluator.validate_all_results()
if(self.test_suite.has_failed_tests()):
if(self.check_re_run_failed_tests()):
self.re_run_failed_tests()
def re_run_failed_tests(self):
self.test_suite.prepare_re_run()
self.run_tests()
def check_re_run_failed_tests(self):
input_var = raw_input('Do you want to re-run the failed tests? \n' +
'Enter yes or y \n')
return input_var.lower() == 'yes' or input_var.lower() == 'y'
<commit_msg>Set default-value in the testcontroller<commit_after>from src.data.TestSuite import TestSuite
from src.service.FileHandler import FileHandler
from src.service.Runner import Runner
from src.service.Evaluator import Evaluator
from src.service.salt_api_wrapper import SaltApi
class TestController:
def __init__(self, test_file, max_iterations=25):
self.test_suite = TestSuite("SuiteName")
self.test_file = test_file
self.file_handler = FileHandler(self.test_suite, self.test_file)
self.runner = Runner(self.test_suite, SaltApi(), max_iterations=max_iterations)
self.evaluator = Evaluator(self.test_suite)
def logic(self):
self.file_handler.read_file(self.test_file)
self.run_tests()
def run_tests(self):
self.test_suite.print_all_test_cases()
self.runner.run_all()
self.evaluator.validate_all_results()
if(self.test_suite.has_failed_tests()):
if(self.check_re_run_failed_tests()):
self.re_run_failed_tests()
def re_run_failed_tests(self):
self.test_suite.prepare_re_run()
self.run_tests()
def check_re_run_failed_tests(self):
input_var = raw_input('Do you want to re-run the failed tests? \n' +
'Enter yes or y \n')
return input_var.lower() == 'yes' or input_var.lower() == 'y'
|
35d84021736f5509dc37f12ca92a05693cff5d47
|
twython/helpers.py
|
twython/helpers.py
|
from .compat import basestring
def _transparent_params(_params):
params = {}
files = {}
for k, v in _params.items():
if hasattr(v, 'read') and callable(v.read):
files[k] = v
elif isinstance(v, bool):
if v:
params[k] = 'true'
else:
params[k] = 'false'
elif isinstance(v, basestring):
params[k] = v
else:
continue
return params, files
|
from .compat import basestring
def _transparent_params(_params):
params = {}
files = {}
for k, v in _params.items():
if hasattr(v, 'read') and callable(v.read):
files[k] = v
elif isinstance(v, bool):
if v:
params[k] = 'true'
else:
params[k] = 'false'
elif isinstance(v, basestring) or isinstance(v, int):
params[k] = v
else:
continue
return params, files
|
Include ints in params too
|
Include ints in params too
Oops ;P
|
Python
|
mit
|
vivek8943/twython,ping/twython,akarambir/twython,Fueled/twython,fibears/twython,Hasimir/twython,Devyani-Divs/twython,Oire/twython,joebos/twython,ryanmcgrath/twython
|
from .compat import basestring
def _transparent_params(_params):
params = {}
files = {}
for k, v in _params.items():
if hasattr(v, 'read') and callable(v.read):
files[k] = v
elif isinstance(v, bool):
if v:
params[k] = 'true'
else:
params[k] = 'false'
elif isinstance(v, basestring):
params[k] = v
else:
continue
return params, files
Include ints in params too
Oops ;P
|
from .compat import basestring
def _transparent_params(_params):
params = {}
files = {}
for k, v in _params.items():
if hasattr(v, 'read') and callable(v.read):
files[k] = v
elif isinstance(v, bool):
if v:
params[k] = 'true'
else:
params[k] = 'false'
elif isinstance(v, basestring) or isinstance(v, int):
params[k] = v
else:
continue
return params, files
|
<commit_before>from .compat import basestring
def _transparent_params(_params):
params = {}
files = {}
for k, v in _params.items():
if hasattr(v, 'read') and callable(v.read):
files[k] = v
elif isinstance(v, bool):
if v:
params[k] = 'true'
else:
params[k] = 'false'
elif isinstance(v, basestring):
params[k] = v
else:
continue
return params, files
<commit_msg>Include ints in params too
Oops ;P<commit_after>
|
from .compat import basestring
def _transparent_params(_params):
params = {}
files = {}
for k, v in _params.items():
if hasattr(v, 'read') and callable(v.read):
files[k] = v
elif isinstance(v, bool):
if v:
params[k] = 'true'
else:
params[k] = 'false'
elif isinstance(v, basestring) or isinstance(v, int):
params[k] = v
else:
continue
return params, files
|
from .compat import basestring
def _transparent_params(_params):
params = {}
files = {}
for k, v in _params.items():
if hasattr(v, 'read') and callable(v.read):
files[k] = v
elif isinstance(v, bool):
if v:
params[k] = 'true'
else:
params[k] = 'false'
elif isinstance(v, basestring):
params[k] = v
else:
continue
return params, files
Include ints in params too
Oops ;Pfrom .compat import basestring
def _transparent_params(_params):
params = {}
files = {}
for k, v in _params.items():
if hasattr(v, 'read') and callable(v.read):
files[k] = v
elif isinstance(v, bool):
if v:
params[k] = 'true'
else:
params[k] = 'false'
elif isinstance(v, basestring) or isinstance(v, int):
params[k] = v
else:
continue
return params, files
|
<commit_before>from .compat import basestring
def _transparent_params(_params):
params = {}
files = {}
for k, v in _params.items():
if hasattr(v, 'read') and callable(v.read):
files[k] = v
elif isinstance(v, bool):
if v:
params[k] = 'true'
else:
params[k] = 'false'
elif isinstance(v, basestring):
params[k] = v
else:
continue
return params, files
<commit_msg>Include ints in params too
Oops ;P<commit_after>from .compat import basestring
def _transparent_params(_params):
params = {}
files = {}
for k, v in _params.items():
if hasattr(v, 'read') and callable(v.read):
files[k] = v
elif isinstance(v, bool):
if v:
params[k] = 'true'
else:
params[k] = 'false'
elif isinstance(v, basestring) or isinstance(v, int):
params[k] = v
else:
continue
return params, files
|
778df70d5e755d0681636cb401bbf33f17f247bc
|
uniqueids/admin.py
|
uniqueids/admin.py
|
from django.contrib import admin
from .models import Record
from .tasks import send_personnel_code
class RecordAdmin(admin.ModelAdmin):
list_display = [
"id", "identity", "write_to", "created_at", "updated_at"]
list_filter = ["write_to", "created_at"]
search_fields = ["identity", "write_to"]
actions = ["resend_personnel_code"]
def resend_personnel_code(self, request, queryset):
created = 0
skipped = 0
for record in queryset:
if record.write_to != "personnel_code":
skipped += 1
continue
send_personnel_code.apply_async(kwargs={
"identity": str(record.identity),
"personnel_code": record.id})
created += 1
if created == 1:
created_text = "%s Record was" % created
else:
created_text = "%s Records were" % created
if skipped == 1:
skipped_text = "%s Record was" % skipped
else:
skipped_text = "%s Records were" % skipped
self.message_user(
request, "%s successfully changed. %s skipped because they are "
"not a HCW." % (created_text, skipped_text))
resend_personnel_code.short_description = "Send code by SMS (personnel "\
"code only)"
admin.site.register(Record, RecordAdmin)
|
from django.contrib import admin
from .models import Record
from .tasks import send_personnel_code
class RecordAdmin(admin.ModelAdmin):
list_display = [
"id", "identity", "write_to", "created_at", "updated_at"]
list_filter = ["write_to", "created_at"]
search_fields = ["identity", "write_to"]
actions = ["resend_personnel_code"]
def resend_personnel_code(self, request, queryset):
created = 0
skipped = 0
for record in queryset.iterator():
if record.write_to != "personnel_code":
skipped += 1
continue
send_personnel_code.apply_async(kwargs={
"identity": str(record.identity),
"personnel_code": record.id})
created += 1
if created == 1:
created_text = "%s Record was" % created
else:
created_text = "%s Records were" % created
if skipped == 1:
skipped_text = "%s Record was" % skipped
else:
skipped_text = "%s Records were" % skipped
self.message_user(
request, "%s successfully changed. %s skipped because they are "
"not a HCW." % (created_text, skipped_text))
resend_personnel_code.short_description = "Send code by SMS (personnel "\
"code only)"
admin.site.register(Record, RecordAdmin)
|
Use Iterator to iterate through records
|
Use Iterator to iterate through records
|
Python
|
bsd-3-clause
|
praekelt/hellomama-registration,praekelt/hellomama-registration
|
from django.contrib import admin
from .models import Record
from .tasks import send_personnel_code
class RecordAdmin(admin.ModelAdmin):
list_display = [
"id", "identity", "write_to", "created_at", "updated_at"]
list_filter = ["write_to", "created_at"]
search_fields = ["identity", "write_to"]
actions = ["resend_personnel_code"]
def resend_personnel_code(self, request, queryset):
created = 0
skipped = 0
for record in queryset:
if record.write_to != "personnel_code":
skipped += 1
continue
send_personnel_code.apply_async(kwargs={
"identity": str(record.identity),
"personnel_code": record.id})
created += 1
if created == 1:
created_text = "%s Record was" % created
else:
created_text = "%s Records were" % created
if skipped == 1:
skipped_text = "%s Record was" % skipped
else:
skipped_text = "%s Records were" % skipped
self.message_user(
request, "%s successfully changed. %s skipped because they are "
"not a HCW." % (created_text, skipped_text))
resend_personnel_code.short_description = "Send code by SMS (personnel "\
"code only)"
admin.site.register(Record, RecordAdmin)
Use Iterator to iterate through records
|
from django.contrib import admin
from .models import Record
from .tasks import send_personnel_code
class RecordAdmin(admin.ModelAdmin):
list_display = [
"id", "identity", "write_to", "created_at", "updated_at"]
list_filter = ["write_to", "created_at"]
search_fields = ["identity", "write_to"]
actions = ["resend_personnel_code"]
def resend_personnel_code(self, request, queryset):
created = 0
skipped = 0
for record in queryset.iterator():
if record.write_to != "personnel_code":
skipped += 1
continue
send_personnel_code.apply_async(kwargs={
"identity": str(record.identity),
"personnel_code": record.id})
created += 1
if created == 1:
created_text = "%s Record was" % created
else:
created_text = "%s Records were" % created
if skipped == 1:
skipped_text = "%s Record was" % skipped
else:
skipped_text = "%s Records were" % skipped
self.message_user(
request, "%s successfully changed. %s skipped because they are "
"not a HCW." % (created_text, skipped_text))
resend_personnel_code.short_description = "Send code by SMS (personnel "\
"code only)"
admin.site.register(Record, RecordAdmin)
|
<commit_before>from django.contrib import admin
from .models import Record
from .tasks import send_personnel_code
class RecordAdmin(admin.ModelAdmin):
list_display = [
"id", "identity", "write_to", "created_at", "updated_at"]
list_filter = ["write_to", "created_at"]
search_fields = ["identity", "write_to"]
actions = ["resend_personnel_code"]
def resend_personnel_code(self, request, queryset):
created = 0
skipped = 0
for record in queryset:
if record.write_to != "personnel_code":
skipped += 1
continue
send_personnel_code.apply_async(kwargs={
"identity": str(record.identity),
"personnel_code": record.id})
created += 1
if created == 1:
created_text = "%s Record was" % created
else:
created_text = "%s Records were" % created
if skipped == 1:
skipped_text = "%s Record was" % skipped
else:
skipped_text = "%s Records were" % skipped
self.message_user(
request, "%s successfully changed. %s skipped because they are "
"not a HCW." % (created_text, skipped_text))
resend_personnel_code.short_description = "Send code by SMS (personnel "\
"code only)"
admin.site.register(Record, RecordAdmin)
<commit_msg>Use Iterator to iterate through records<commit_after>
|
from django.contrib import admin
from .models import Record
from .tasks import send_personnel_code
class RecordAdmin(admin.ModelAdmin):
list_display = [
"id", "identity", "write_to", "created_at", "updated_at"]
list_filter = ["write_to", "created_at"]
search_fields = ["identity", "write_to"]
actions = ["resend_personnel_code"]
def resend_personnel_code(self, request, queryset):
created = 0
skipped = 0
for record in queryset.iterator():
if record.write_to != "personnel_code":
skipped += 1
continue
send_personnel_code.apply_async(kwargs={
"identity": str(record.identity),
"personnel_code": record.id})
created += 1
if created == 1:
created_text = "%s Record was" % created
else:
created_text = "%s Records were" % created
if skipped == 1:
skipped_text = "%s Record was" % skipped
else:
skipped_text = "%s Records were" % skipped
self.message_user(
request, "%s successfully changed. %s skipped because they are "
"not a HCW." % (created_text, skipped_text))
resend_personnel_code.short_description = "Send code by SMS (personnel "\
"code only)"
admin.site.register(Record, RecordAdmin)
|
from django.contrib import admin
from .models import Record
from .tasks import send_personnel_code
class RecordAdmin(admin.ModelAdmin):
list_display = [
"id", "identity", "write_to", "created_at", "updated_at"]
list_filter = ["write_to", "created_at"]
search_fields = ["identity", "write_to"]
actions = ["resend_personnel_code"]
def resend_personnel_code(self, request, queryset):
created = 0
skipped = 0
for record in queryset:
if record.write_to != "personnel_code":
skipped += 1
continue
send_personnel_code.apply_async(kwargs={
"identity": str(record.identity),
"personnel_code": record.id})
created += 1
if created == 1:
created_text = "%s Record was" % created
else:
created_text = "%s Records were" % created
if skipped == 1:
skipped_text = "%s Record was" % skipped
else:
skipped_text = "%s Records were" % skipped
self.message_user(
request, "%s successfully changed. %s skipped because they are "
"not a HCW." % (created_text, skipped_text))
resend_personnel_code.short_description = "Send code by SMS (personnel "\
"code only)"
admin.site.register(Record, RecordAdmin)
Use Iterator to iterate through recordsfrom django.contrib import admin
from .models import Record
from .tasks import send_personnel_code
class RecordAdmin(admin.ModelAdmin):
list_display = [
"id", "identity", "write_to", "created_at", "updated_at"]
list_filter = ["write_to", "created_at"]
search_fields = ["identity", "write_to"]
actions = ["resend_personnel_code"]
def resend_personnel_code(self, request, queryset):
created = 0
skipped = 0
for record in queryset.iterator():
if record.write_to != "personnel_code":
skipped += 1
continue
send_personnel_code.apply_async(kwargs={
"identity": str(record.identity),
"personnel_code": record.id})
created += 1
if created == 1:
created_text = "%s Record was" % created
else:
created_text = "%s Records were" % created
if skipped == 1:
skipped_text = "%s Record was" % skipped
else:
skipped_text = "%s Records were" % skipped
self.message_user(
request, "%s successfully changed. %s skipped because they are "
"not a HCW." % (created_text, skipped_text))
resend_personnel_code.short_description = "Send code by SMS (personnel "\
"code only)"
admin.site.register(Record, RecordAdmin)
|
<commit_before>from django.contrib import admin
from .models import Record
from .tasks import send_personnel_code
class RecordAdmin(admin.ModelAdmin):
list_display = [
"id", "identity", "write_to", "created_at", "updated_at"]
list_filter = ["write_to", "created_at"]
search_fields = ["identity", "write_to"]
actions = ["resend_personnel_code"]
def resend_personnel_code(self, request, queryset):
created = 0
skipped = 0
for record in queryset:
if record.write_to != "personnel_code":
skipped += 1
continue
send_personnel_code.apply_async(kwargs={
"identity": str(record.identity),
"personnel_code": record.id})
created += 1
if created == 1:
created_text = "%s Record was" % created
else:
created_text = "%s Records were" % created
if skipped == 1:
skipped_text = "%s Record was" % skipped
else:
skipped_text = "%s Records were" % skipped
self.message_user(
request, "%s successfully changed. %s skipped because they are "
"not a HCW." % (created_text, skipped_text))
resend_personnel_code.short_description = "Send code by SMS (personnel "\
"code only)"
admin.site.register(Record, RecordAdmin)
<commit_msg>Use Iterator to iterate through records<commit_after>from django.contrib import admin
from .models import Record
from .tasks import send_personnel_code
class RecordAdmin(admin.ModelAdmin):
list_display = [
"id", "identity", "write_to", "created_at", "updated_at"]
list_filter = ["write_to", "created_at"]
search_fields = ["identity", "write_to"]
actions = ["resend_personnel_code"]
def resend_personnel_code(self, request, queryset):
created = 0
skipped = 0
for record in queryset.iterator():
if record.write_to != "personnel_code":
skipped += 1
continue
send_personnel_code.apply_async(kwargs={
"identity": str(record.identity),
"personnel_code": record.id})
created += 1
if created == 1:
created_text = "%s Record was" % created
else:
created_text = "%s Records were" % created
if skipped == 1:
skipped_text = "%s Record was" % skipped
else:
skipped_text = "%s Records were" % skipped
self.message_user(
request, "%s successfully changed. %s skipped because they are "
"not a HCW." % (created_text, skipped_text))
resend_personnel_code.short_description = "Send code by SMS (personnel "\
"code only)"
admin.site.register(Record, RecordAdmin)
|
978e09882f4fb19a8d31a9b91b0258751f745c21
|
mods/FleetAutoTarget/AutoTarget.py
|
mods/FleetAutoTarget/AutoTarget.py
|
import logmodule
from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView
def PatchFn(fn):
def wrapper(self):
try:
br = sm.GetService('fleet').GetBroadcastHistory()[0]
logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE)
if br.name in ("Target", "HealArmor", "HealShield"):
sm.GetService('target').TryLockTarget(br.itemID)
except:
pass
return fn(self)
return wrapper
def RunPatch():
FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory)
logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
|
import logmodule
from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView
def PatchFn(fn):
def wrapper(self):
ret = fn(self)
try:
br = sm.GetService('fleet').GetBroadcastHistory()[0]
logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE)
if br.name in ("Target", "HealArmor", "HealShield"):
sm.GetService('target').TryLockTarget(br.itemID)
except:
pass
return ret
return wrapper
def RunPatch():
FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory)
logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
|
Adjust the order to reduce latency
|
Adjust the order to reduce latency
|
Python
|
mit
|
EVEModX/Mods
|
import logmodule
from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView
def PatchFn(fn):
def wrapper(self):
try:
br = sm.GetService('fleet').GetBroadcastHistory()[0]
logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE)
if br.name in ("Target", "HealArmor", "HealShield"):
sm.GetService('target').TryLockTarget(br.itemID)
except:
pass
return fn(self)
return wrapper
def RunPatch():
FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory)
logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
Adjust the order to reduce latency
|
import logmodule
from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView
def PatchFn(fn):
def wrapper(self):
ret = fn(self)
try:
br = sm.GetService('fleet').GetBroadcastHistory()[0]
logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE)
if br.name in ("Target", "HealArmor", "HealShield"):
sm.GetService('target').TryLockTarget(br.itemID)
except:
pass
return ret
return wrapper
def RunPatch():
FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory)
logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
|
<commit_before>import logmodule
from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView
def PatchFn(fn):
def wrapper(self):
try:
br = sm.GetService('fleet').GetBroadcastHistory()[0]
logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE)
if br.name in ("Target", "HealArmor", "HealShield"):
sm.GetService('target').TryLockTarget(br.itemID)
except:
pass
return fn(self)
return wrapper
def RunPatch():
FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory)
logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
<commit_msg>Adjust the order to reduce latency<commit_after>
|
import logmodule
from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView
def PatchFn(fn):
def wrapper(self):
ret = fn(self)
try:
br = sm.GetService('fleet').GetBroadcastHistory()[0]
logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE)
if br.name in ("Target", "HealArmor", "HealShield"):
sm.GetService('target').TryLockTarget(br.itemID)
except:
pass
return ret
return wrapper
def RunPatch():
FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory)
logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
|
import logmodule
from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView
def PatchFn(fn):
def wrapper(self):
try:
br = sm.GetService('fleet').GetBroadcastHistory()[0]
logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE)
if br.name in ("Target", "HealArmor", "HealShield"):
sm.GetService('target').TryLockTarget(br.itemID)
except:
pass
return fn(self)
return wrapper
def RunPatch():
FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory)
logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
Adjust the order to reduce latencyimport logmodule
from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView
def PatchFn(fn):
def wrapper(self):
ret = fn(self)
try:
br = sm.GetService('fleet').GetBroadcastHistory()[0]
logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE)
if br.name in ("Target", "HealArmor", "HealShield"):
sm.GetService('target').TryLockTarget(br.itemID)
except:
pass
return ret
return wrapper
def RunPatch():
FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory)
logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
|
<commit_before>import logmodule
from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView
def PatchFn(fn):
def wrapper(self):
try:
br = sm.GetService('fleet').GetBroadcastHistory()[0]
logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE)
if br.name in ("Target", "HealArmor", "HealShield"):
sm.GetService('target').TryLockTarget(br.itemID)
except:
pass
return fn(self)
return wrapper
def RunPatch():
FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory)
logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
<commit_msg>Adjust the order to reduce latency<commit_after>import logmodule
from eve.client.script.ui.shared.fleet.fleetbroadcast import FleetBroadcastView
def PatchFn(fn):
def wrapper(self):
ret = fn(self)
try:
br = sm.GetService('fleet').GetBroadcastHistory()[0]
logmodule.general.Log("GetBroadcastListEntry invoked: %s %d %d" % (br.name, br.charID, br.itemID), logmodule.LGNOTICE)
if br.name in ("Target", "HealArmor", "HealShield"):
sm.GetService('target').TryLockTarget(br.itemID)
except:
pass
return ret
return wrapper
def RunPatch():
FleetBroadcastView.LoadBroadcastHistory = PatchFn(FleetBroadcastView.LoadBroadcastHistory)
logmodule.general.Log("Code Injected", logmodule.LGNOTICE)
|
1c814acdb58e30ccfaa6ea80aa8cb3080d90b2e2
|
project_fish/whats_fresh/tests/test_preparation_model.py
|
project_fish/whats_fresh/tests/test_preparation_model.py
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
u'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
Change unicode test string to ascii
|
Change unicode test string to ascii
|
Python
|
apache-2.0
|
iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
u'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
Change unicode test string to ascii
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
<commit_before>from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
u'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
<commit_msg>Change unicode test string to ascii<commit_after>
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
u'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
Change unicode test string to asciifrom django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
<commit_before>from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
u'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
<commit_msg>Change unicode test string to ascii<commit_after>from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
import os
import time
import sys
import datetime
class PreparationsTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'name': models.TextField,
'description': models.TextField,
'additional_info': models.TextField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'Preparation')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = Vendor._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
d2e41e3c03e71919aeeaa72766f6c4037424d3c1
|
tests/factories.py
|
tests/factories.py
|
from django.contrib.auth.models import User
import factory
class UserFactory(factory.DjangoModelFactory):
username = factory.Sequence('User {}'.format)
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
self.raw_password = 'default_password' if extracted is None else extracted
self.set_password(self.raw_password)
if create:
self.save()
|
from django.contrib.auth.models import User
import factory
class UserFactory(factory.DjangoModelFactory):
username = factory.Sequence('User {}'.format)
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
# By using this method password can never be set to `None`!
self.raw_password = 'default_password' if extracted is None else extracted
self.set_password(self.raw_password)
if create:
self.save()
|
Add comment about User factory post_generation
|
Add comment about User factory post_generation
|
Python
|
bsd-2-clause
|
incuna/incuna-test-utils,incuna/incuna-test-utils
|
from django.contrib.auth.models import User
import factory
class UserFactory(factory.DjangoModelFactory):
username = factory.Sequence('User {}'.format)
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
self.raw_password = 'default_password' if extracted is None else extracted
self.set_password(self.raw_password)
if create:
self.save()
Add comment about User factory post_generation
|
from django.contrib.auth.models import User
import factory
class UserFactory(factory.DjangoModelFactory):
username = factory.Sequence('User {}'.format)
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
# By using this method password can never be set to `None`!
self.raw_password = 'default_password' if extracted is None else extracted
self.set_password(self.raw_password)
if create:
self.save()
|
<commit_before>from django.contrib.auth.models import User
import factory
class UserFactory(factory.DjangoModelFactory):
username = factory.Sequence('User {}'.format)
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
self.raw_password = 'default_password' if extracted is None else extracted
self.set_password(self.raw_password)
if create:
self.save()
<commit_msg>Add comment about User factory post_generation<commit_after>
|
from django.contrib.auth.models import User
import factory
class UserFactory(factory.DjangoModelFactory):
username = factory.Sequence('User {}'.format)
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
# By using this method password can never be set to `None`!
self.raw_password = 'default_password' if extracted is None else extracted
self.set_password(self.raw_password)
if create:
self.save()
|
from django.contrib.auth.models import User
import factory
class UserFactory(factory.DjangoModelFactory):
username = factory.Sequence('User {}'.format)
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
self.raw_password = 'default_password' if extracted is None else extracted
self.set_password(self.raw_password)
if create:
self.save()
Add comment about User factory post_generationfrom django.contrib.auth.models import User
import factory
class UserFactory(factory.DjangoModelFactory):
username = factory.Sequence('User {}'.format)
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
# By using this method password can never be set to `None`!
self.raw_password = 'default_password' if extracted is None else extracted
self.set_password(self.raw_password)
if create:
self.save()
|
<commit_before>from django.contrib.auth.models import User
import factory
class UserFactory(factory.DjangoModelFactory):
username = factory.Sequence('User {}'.format)
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
self.raw_password = 'default_password' if extracted is None else extracted
self.set_password(self.raw_password)
if create:
self.save()
<commit_msg>Add comment about User factory post_generation<commit_after>from django.contrib.auth.models import User
import factory
class UserFactory(factory.DjangoModelFactory):
username = factory.Sequence('User {}'.format)
class Meta:
model = User
@factory.post_generation
def password(self, create, extracted, **kwargs):
# By using this method password can never be set to `None`!
self.raw_password = 'default_password' if extracted is None else extracted
self.set_password(self.raw_password)
if create:
self.save()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.