commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c4dae009a376f5ce4f707595c860e6d92f9953ea
|
web/webViews/dockletrequest.py
|
web/webViews/dockletrequest.py
|
import requests
from flask import abort, session
from webViews.log import logger
endpoint = "http://0.0.0.0:9000"
class dockletRequest():
@classmethod
def post(self, url = '/', data = {}):
#try:
data = dict(data)
data['token'] = session['token']
logger.info ("Docklet Request: user = %s data = %s, url = %s"%(session['username'], data, url))
result = requests.post(endpoint + url, data = data).json()
if (result.get('success', None) == "false" and (result.get('reason', None) == "Unauthorized Action" or result.get('Unauthorized', None) == 'True')):
abort(401)
logger.info ("Docklet Response: user = %s result = %s, url = %s"%(session['username'], result, url))
return result
#except:
#abort(500)
@classmethod
def unauthorizedpost(self, url = '/', data = None):
logger.info("Docklet Unauthorized Request: data = %s, url = %s" % (data, url))
result = requests.post(endpoint + url, data = data).json()
logger.info("Docklet Unauthorized Response: result = %s, url = %s"%(result, url))
return result
|
import requests
from flask import abort, session
from webViews.log import logger
endpoint = "http://0.0.0.0:9000"
class dockletRequest():
@classmethod
def post(self, url = '/', data = {}):
#try:
data = dict(data)
data['token'] = session['token']
logger.info ("Docklet Request: user = %s data = %s, url = %s"%(session['username'], data, url))
result = requests.post(endpoint + url, data = data).json()
if (result.get('success', None) == "false" and (result.get('reason', None) == "Unauthorized Action" or result.get('Unauthorized', None) == 'True')):
abort(401)
logger.info ("Docklet Response: user = %s result = %s, url = %s"%(session['username'], result, url))
return result
#except:
#abort(500)
@classmethod
def unauthorizedpost(self, url = '/', data = None):
data = dict(data)
data_log = {'user': data.get('user', 'external')}
logger.info("Docklet Unauthorized Request: data = %s, url = %s" % (data_log, url))
result = requests.post(endpoint + url, data = data).json()
logger.info("Docklet Unauthorized Response: result = %s, url = %s"%(result, url))
return result
|
Fix a bug that will lead to error when external_login() is called
|
Fix a bug that will lead to error when external_login() is called
|
Python
|
bsd-3-clause
|
FirmlyReality/docklet,scorpionis/docklet,caodg/docklet,caodg/docklet,scorpionis/docklet,caodg/docklet,FirmlyReality/docklet,caodg/docklet,FirmlyReality/docklet,FirmlyReality/docklet
|
import requests
from flask import abort, session
from webViews.log import logger
endpoint = "http://0.0.0.0:9000"
class dockletRequest():
@classmethod
def post(self, url = '/', data = {}):
#try:
data = dict(data)
data['token'] = session['token']
logger.info ("Docklet Request: user = %s data = %s, url = %s"%(session['username'], data, url))
result = requests.post(endpoint + url, data = data).json()
if (result.get('success', None) == "false" and (result.get('reason', None) == "Unauthorized Action" or result.get('Unauthorized', None) == 'True')):
abort(401)
logger.info ("Docklet Response: user = %s result = %s, url = %s"%(session['username'], result, url))
return result
#except:
#abort(500)
@classmethod
def unauthorizedpost(self, url = '/', data = None):
logger.info("Docklet Unauthorized Request: data = %s, url = %s" % (data, url))
result = requests.post(endpoint + url, data = data).json()
logger.info("Docklet Unauthorized Response: result = %s, url = %s"%(result, url))
return result
Fix a bug that will lead to error when external_login() is called
|
import requests
from flask import abort, session
from webViews.log import logger
endpoint = "http://0.0.0.0:9000"
class dockletRequest():
@classmethod
def post(self, url = '/', data = {}):
#try:
data = dict(data)
data['token'] = session['token']
logger.info ("Docklet Request: user = %s data = %s, url = %s"%(session['username'], data, url))
result = requests.post(endpoint + url, data = data).json()
if (result.get('success', None) == "false" and (result.get('reason', None) == "Unauthorized Action" or result.get('Unauthorized', None) == 'True')):
abort(401)
logger.info ("Docklet Response: user = %s result = %s, url = %s"%(session['username'], result, url))
return result
#except:
#abort(500)
@classmethod
def unauthorizedpost(self, url = '/', data = None):
data = dict(data)
data_log = {'user': data.get('user', 'external')}
logger.info("Docklet Unauthorized Request: data = %s, url = %s" % (data_log, url))
result = requests.post(endpoint + url, data = data).json()
logger.info("Docklet Unauthorized Response: result = %s, url = %s"%(result, url))
return result
|
<commit_before>import requests
from flask import abort, session
from webViews.log import logger
endpoint = "http://0.0.0.0:9000"
class dockletRequest():
@classmethod
def post(self, url = '/', data = {}):
#try:
data = dict(data)
data['token'] = session['token']
logger.info ("Docklet Request: user = %s data = %s, url = %s"%(session['username'], data, url))
result = requests.post(endpoint + url, data = data).json()
if (result.get('success', None) == "false" and (result.get('reason', None) == "Unauthorized Action" or result.get('Unauthorized', None) == 'True')):
abort(401)
logger.info ("Docklet Response: user = %s result = %s, url = %s"%(session['username'], result, url))
return result
#except:
#abort(500)
@classmethod
def unauthorizedpost(self, url = '/', data = None):
logger.info("Docklet Unauthorized Request: data = %s, url = %s" % (data, url))
result = requests.post(endpoint + url, data = data).json()
logger.info("Docklet Unauthorized Response: result = %s, url = %s"%(result, url))
return result
<commit_msg>Fix a bug that will lead to error when external_login() is called<commit_after>
|
import requests
from flask import abort, session
from webViews.log import logger
endpoint = "http://0.0.0.0:9000"
class dockletRequest():
@classmethod
def post(self, url = '/', data = {}):
#try:
data = dict(data)
data['token'] = session['token']
logger.info ("Docklet Request: user = %s data = %s, url = %s"%(session['username'], data, url))
result = requests.post(endpoint + url, data = data).json()
if (result.get('success', None) == "false" and (result.get('reason', None) == "Unauthorized Action" or result.get('Unauthorized', None) == 'True')):
abort(401)
logger.info ("Docklet Response: user = %s result = %s, url = %s"%(session['username'], result, url))
return result
#except:
#abort(500)
@classmethod
def unauthorizedpost(self, url = '/', data = None):
data = dict(data)
data_log = {'user': data.get('user', 'external')}
logger.info("Docklet Unauthorized Request: data = %s, url = %s" % (data_log, url))
result = requests.post(endpoint + url, data = data).json()
logger.info("Docklet Unauthorized Response: result = %s, url = %s"%(result, url))
return result
|
import requests
from flask import abort, session
from webViews.log import logger
endpoint = "http://0.0.0.0:9000"
class dockletRequest():
@classmethod
def post(self, url = '/', data = {}):
#try:
data = dict(data)
data['token'] = session['token']
logger.info ("Docklet Request: user = %s data = %s, url = %s"%(session['username'], data, url))
result = requests.post(endpoint + url, data = data).json()
if (result.get('success', None) == "false" and (result.get('reason', None) == "Unauthorized Action" or result.get('Unauthorized', None) == 'True')):
abort(401)
logger.info ("Docklet Response: user = %s result = %s, url = %s"%(session['username'], result, url))
return result
#except:
#abort(500)
@classmethod
def unauthorizedpost(self, url = '/', data = None):
logger.info("Docklet Unauthorized Request: data = %s, url = %s" % (data, url))
result = requests.post(endpoint + url, data = data).json()
logger.info("Docklet Unauthorized Response: result = %s, url = %s"%(result, url))
return result
Fix a bug that will lead to error when external_login() is calledimport requests
from flask import abort, session
from webViews.log import logger
endpoint = "http://0.0.0.0:9000"
class dockletRequest():
@classmethod
def post(self, url = '/', data = {}):
#try:
data = dict(data)
data['token'] = session['token']
logger.info ("Docklet Request: user = %s data = %s, url = %s"%(session['username'], data, url))
result = requests.post(endpoint + url, data = data).json()
if (result.get('success', None) == "false" and (result.get('reason', None) == "Unauthorized Action" or result.get('Unauthorized', None) == 'True')):
abort(401)
logger.info ("Docklet Response: user = %s result = %s, url = %s"%(session['username'], result, url))
return result
#except:
#abort(500)
@classmethod
def unauthorizedpost(self, url = '/', data = None):
data = dict(data)
data_log = {'user': data.get('user', 'external')}
logger.info("Docklet Unauthorized Request: data = %s, url = %s" % (data_log, url))
result = requests.post(endpoint + url, data = data).json()
logger.info("Docklet Unauthorized Response: result = %s, url = %s"%(result, url))
return result
|
<commit_before>import requests
from flask import abort, session
from webViews.log import logger
endpoint = "http://0.0.0.0:9000"
class dockletRequest():
@classmethod
def post(self, url = '/', data = {}):
#try:
data = dict(data)
data['token'] = session['token']
logger.info ("Docklet Request: user = %s data = %s, url = %s"%(session['username'], data, url))
result = requests.post(endpoint + url, data = data).json()
if (result.get('success', None) == "false" and (result.get('reason', None) == "Unauthorized Action" or result.get('Unauthorized', None) == 'True')):
abort(401)
logger.info ("Docklet Response: user = %s result = %s, url = %s"%(session['username'], result, url))
return result
#except:
#abort(500)
@classmethod
def unauthorizedpost(self, url = '/', data = None):
logger.info("Docklet Unauthorized Request: data = %s, url = %s" % (data, url))
result = requests.post(endpoint + url, data = data).json()
logger.info("Docklet Unauthorized Response: result = %s, url = %s"%(result, url))
return result
<commit_msg>Fix a bug that will lead to error when external_login() is called<commit_after>import requests
from flask import abort, session
from webViews.log import logger
endpoint = "http://0.0.0.0:9000"
class dockletRequest():
@classmethod
def post(self, url = '/', data = {}):
#try:
data = dict(data)
data['token'] = session['token']
logger.info ("Docklet Request: user = %s data = %s, url = %s"%(session['username'], data, url))
result = requests.post(endpoint + url, data = data).json()
if (result.get('success', None) == "false" and (result.get('reason', None) == "Unauthorized Action" or result.get('Unauthorized', None) == 'True')):
abort(401)
logger.info ("Docklet Response: user = %s result = %s, url = %s"%(session['username'], result, url))
return result
#except:
#abort(500)
@classmethod
def unauthorizedpost(self, url = '/', data = None):
data = dict(data)
data_log = {'user': data.get('user', 'external')}
logger.info("Docklet Unauthorized Request: data = %s, url = %s" % (data_log, url))
result = requests.post(endpoint + url, data = data).json()
logger.info("Docklet Unauthorized Response: result = %s, url = %s"%(result, url))
return result
|
aa07f5afe1b976e6b7f503056387e184ec0b64c3
|
phileo/models.py
|
phileo/models.py
|
import datetime
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Like(models.Model):
sender = models.ForeignKey(User, related_name="liking")
receiver_content_type = models.ForeignKey(ContentType)
receiver_object_id = models.PositiveIntegerField()
receiver = generic.GenericForeignKey(
ct_field="receiver_content_type",
fk_field="receiver_object_id"
)
timestamp = models.DateTimeField(default=datetime.datetime.now)
class Meta:
unique_together = (
("sender", "receiver_content_type", "receiver_object_id"),
)
def __unicode__(self):
return "%s likes %s" % (self.sender, self.receiver)
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Like(models.Model):
sender = models.ForeignKey(User, related_name="liking")
receiver_content_type = models.ForeignKey(ContentType)
receiver_object_id = models.PositiveIntegerField()
receiver = generic.GenericForeignKey(
ct_field="receiver_content_type",
fk_field="receiver_object_id"
)
timestamp = models.DateTimeField(default=timezone.now)
class Meta:
unique_together = (
("sender", "receiver_content_type", "receiver_object_id"),
)
def __unicode__(self):
return "%s likes %s" % (self.sender, self.receiver)
|
Update the use of now()
|
Update the use of now()
|
Python
|
mit
|
rizumu/pinax-likes,pinax/pinax-likes,jacobwegner/phileo,pinax/phileo,rizumu/pinax-likes,pinax/phileo,jacobwegner/phileo
|
import datetime
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Like(models.Model):
sender = models.ForeignKey(User, related_name="liking")
receiver_content_type = models.ForeignKey(ContentType)
receiver_object_id = models.PositiveIntegerField()
receiver = generic.GenericForeignKey(
ct_field="receiver_content_type",
fk_field="receiver_object_id"
)
timestamp = models.DateTimeField(default=datetime.datetime.now)
class Meta:
unique_together = (
("sender", "receiver_content_type", "receiver_object_id"),
)
def __unicode__(self):
return "%s likes %s" % (self.sender, self.receiver)
Update the use of now()
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Like(models.Model):
sender = models.ForeignKey(User, related_name="liking")
receiver_content_type = models.ForeignKey(ContentType)
receiver_object_id = models.PositiveIntegerField()
receiver = generic.GenericForeignKey(
ct_field="receiver_content_type",
fk_field="receiver_object_id"
)
timestamp = models.DateTimeField(default=timezone.now)
class Meta:
unique_together = (
("sender", "receiver_content_type", "receiver_object_id"),
)
def __unicode__(self):
return "%s likes %s" % (self.sender, self.receiver)
|
<commit_before>import datetime
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Like(models.Model):
sender = models.ForeignKey(User, related_name="liking")
receiver_content_type = models.ForeignKey(ContentType)
receiver_object_id = models.PositiveIntegerField()
receiver = generic.GenericForeignKey(
ct_field="receiver_content_type",
fk_field="receiver_object_id"
)
timestamp = models.DateTimeField(default=datetime.datetime.now)
class Meta:
unique_together = (
("sender", "receiver_content_type", "receiver_object_id"),
)
def __unicode__(self):
return "%s likes %s" % (self.sender, self.receiver)
<commit_msg>Update the use of now()<commit_after>
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Like(models.Model):
sender = models.ForeignKey(User, related_name="liking")
receiver_content_type = models.ForeignKey(ContentType)
receiver_object_id = models.PositiveIntegerField()
receiver = generic.GenericForeignKey(
ct_field="receiver_content_type",
fk_field="receiver_object_id"
)
timestamp = models.DateTimeField(default=timezone.now)
class Meta:
unique_together = (
("sender", "receiver_content_type", "receiver_object_id"),
)
def __unicode__(self):
return "%s likes %s" % (self.sender, self.receiver)
|
import datetime
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Like(models.Model):
sender = models.ForeignKey(User, related_name="liking")
receiver_content_type = models.ForeignKey(ContentType)
receiver_object_id = models.PositiveIntegerField()
receiver = generic.GenericForeignKey(
ct_field="receiver_content_type",
fk_field="receiver_object_id"
)
timestamp = models.DateTimeField(default=datetime.datetime.now)
class Meta:
unique_together = (
("sender", "receiver_content_type", "receiver_object_id"),
)
def __unicode__(self):
return "%s likes %s" % (self.sender, self.receiver)
Update the use of now()from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Like(models.Model):
sender = models.ForeignKey(User, related_name="liking")
receiver_content_type = models.ForeignKey(ContentType)
receiver_object_id = models.PositiveIntegerField()
receiver = generic.GenericForeignKey(
ct_field="receiver_content_type",
fk_field="receiver_object_id"
)
timestamp = models.DateTimeField(default=timezone.now)
class Meta:
unique_together = (
("sender", "receiver_content_type", "receiver_object_id"),
)
def __unicode__(self):
return "%s likes %s" % (self.sender, self.receiver)
|
<commit_before>import datetime
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Like(models.Model):
sender = models.ForeignKey(User, related_name="liking")
receiver_content_type = models.ForeignKey(ContentType)
receiver_object_id = models.PositiveIntegerField()
receiver = generic.GenericForeignKey(
ct_field="receiver_content_type",
fk_field="receiver_object_id"
)
timestamp = models.DateTimeField(default=datetime.datetime.now)
class Meta:
unique_together = (
("sender", "receiver_content_type", "receiver_object_id"),
)
def __unicode__(self):
return "%s likes %s" % (self.sender, self.receiver)
<commit_msg>Update the use of now()<commit_after>from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class Like(models.Model):
sender = models.ForeignKey(User, related_name="liking")
receiver_content_type = models.ForeignKey(ContentType)
receiver_object_id = models.PositiveIntegerField()
receiver = generic.GenericForeignKey(
ct_field="receiver_content_type",
fk_field="receiver_object_id"
)
timestamp = models.DateTimeField(default=timezone.now)
class Meta:
unique_together = (
("sender", "receiver_content_type", "receiver_object_id"),
)
def __unicode__(self):
return "%s likes %s" % (self.sender, self.receiver)
|
6dab43543e1b6a1e1e8119db9b38cc685dd81f82
|
ckanext/qa/controllers/base.py
|
ckanext/qa/controllers/base.py
|
from ckan.lib.base import BaseController
from pylons import config
class QAController(BaseController):
def __init__(self, *args, **kwargs):
super(QAController, self).__init(*args, **kwargs)
|
from ckan.lib.base import BaseController
from pylons import config
class QAController(BaseController):
pass
|
Fix typo in constructor. Seems unnecessary anyway.
|
Fix typo in constructor. Seems unnecessary anyway.
|
Python
|
mit
|
ckan/ckanext-qa,ckan/ckanext-qa,ckan/ckanext-qa
|
from ckan.lib.base import BaseController
from pylons import config
class QAController(BaseController):
def __init__(self, *args, **kwargs):
super(QAController, self).__init(*args, **kwargs)Fix typo in constructor. Seems unnecessary anyway.
|
from ckan.lib.base import BaseController
from pylons import config
class QAController(BaseController):
pass
|
<commit_before>from ckan.lib.base import BaseController
from pylons import config
class QAController(BaseController):
def __init__(self, *args, **kwargs):
super(QAController, self).__init(*args, **kwargs)<commit_msg>Fix typo in constructor. Seems unnecessary anyway.<commit_after>
|
from ckan.lib.base import BaseController
from pylons import config
class QAController(BaseController):
pass
|
from ckan.lib.base import BaseController
from pylons import config
class QAController(BaseController):
def __init__(self, *args, **kwargs):
super(QAController, self).__init(*args, **kwargs)Fix typo in constructor. Seems unnecessary anyway.from ckan.lib.base import BaseController
from pylons import config
class QAController(BaseController):
pass
|
<commit_before>from ckan.lib.base import BaseController
from pylons import config
class QAController(BaseController):
def __init__(self, *args, **kwargs):
super(QAController, self).__init(*args, **kwargs)<commit_msg>Fix typo in constructor. Seems unnecessary anyway.<commit_after>from ckan.lib.base import BaseController
from pylons import config
class QAController(BaseController):
pass
|
461f23a52569067a26c18dbf70a830c0494c0342
|
deepchem/models/torch_models/__init__.py
|
deepchem/models/torch_models/__init__.py
|
# flake8:noqa
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule
from deepchem.models.torch_models.cnn import CNN
|
# flake8:noqa
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule, NeighborList
from deepchem.models.torch_models.cnn import CNN
|
Add layer to module imports
|
Add layer to module imports
|
Python
|
mit
|
deepchem/deepchem,deepchem/deepchem
|
# flake8:noqa
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule
from deepchem.models.torch_models.cnn import CNN
Add layer to module imports
|
# flake8:noqa
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule, NeighborList
from deepchem.models.torch_models.cnn import CNN
|
<commit_before># flake8:noqa
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule
from deepchem.models.torch_models.cnn import CNN
<commit_msg>Add layer to module imports<commit_after>
|
# flake8:noqa
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule, NeighborList
from deepchem.models.torch_models.cnn import CNN
|
# flake8:noqa
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule
from deepchem.models.torch_models.cnn import CNN
Add layer to module imports# flake8:noqa
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule, NeighborList
from deepchem.models.torch_models.cnn import CNN
|
<commit_before># flake8:noqa
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule
from deepchem.models.torch_models.cnn import CNN
<commit_msg>Add layer to module imports<commit_after># flake8:noqa
from deepchem.models.torch_models.torch_model import TorchModel
from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel
from deepchem.models.torch_models.gat import GAT, GATModel
from deepchem.models.torch_models.gcn import GCN, GCNModel
from deepchem.models.torch_models.mpnn import MPNN, MPNNModel
from deepchem.models.torch_models.lcnn import LCNN, LCNNModel
from deepchem.models.torch_models.pagtn import Pagtn, PagtnModel
from deepchem.models.torch_models.mat import MAT, MATModel
from deepchem.models.torch_models.megnet import MEGNetModel
from deepchem.models.torch_models.layers import CNNModule, NeighborList
from deepchem.models.torch_models.cnn import CNN
|
93bf6cafa078978890df74a75355a48345f40534
|
django_bootstrap_calendar/serializers.py
|
django_bootstrap_calendar/serializers.py
|
# -*- coding: utf-8 -*-
__author__ = 'sandlbn'
from django.utils import simplejson
from django.db.models.query import QuerySet
def event_serializer(events):
"""
serialize event model
"""
objects_body = []
if isinstance(events, QuerySet):
for event in events:
field = {
"id": event.pk,
"title": event.title,
"url": event.url,
"class": event.css_class,
"start": event.start_timestamp,
"end": event.end_timestamp
}
objects_body.append(field)
objects_head = {"success": 1}
objects_head["result"] = objects_body
return simplejson.dumps(objects_head, encoding='utf-8')
|
# -*- coding: utf-8 -*-
__author__ = 'sandlbn'
import json
from django.db.models.query import QuerySet
def event_serializer(events):
"""
serialize event model
"""
objects_body = []
if isinstance(events, QuerySet):
for event in events:
field = {
"id": event.pk,
"title": event.title,
"url": event.url,
"class": event.css_class,
"start": event.start_timestamp,
"end": event.end_timestamp
}
objects_body.append(field)
objects_head = {"success": 1}
objects_head["result"] = objects_body
return json.dumps(objects_head)
|
Use json (from stdlib) instead of simplejson from django utils
|
Use json (from stdlib) instead of simplejson from django utils
|
Python
|
bsd-3-clause
|
sandlbn/django-bootstrap-calendar,mfmarlonferrari/django-bootstrap-calendar,arbitrahj/django-bootstrap-calendar,arbitrahj/django-bootstrap-calendar,dannybrowne86/django-bootstrap-calendar,tiagovaz/django-bootstrap-calendar,mfmarlonferrari/django-bootstrap-calendar,sandlbn/django-bootstrap-calendar,mfmarlonferrari/django-bootstrap-calendar,tiagovaz/django-bootstrap-calendar,dannybrowne86/django-bootstrap-calendar,sandlbn/django-bootstrap-calendar,dannybrowne86/django-bootstrap-calendar,arbitrahj/django-bootstrap-calendar,tiagovaz/django-bootstrap-calendar
|
# -*- coding: utf-8 -*-
__author__ = 'sandlbn'
from django.utils import simplejson
from django.db.models.query import QuerySet
def event_serializer(events):
"""
serialize event model
"""
objects_body = []
if isinstance(events, QuerySet):
for event in events:
field = {
"id": event.pk,
"title": event.title,
"url": event.url,
"class": event.css_class,
"start": event.start_timestamp,
"end": event.end_timestamp
}
objects_body.append(field)
objects_head = {"success": 1}
objects_head["result"] = objects_body
return simplejson.dumps(objects_head, encoding='utf-8')
Use json (from stdlib) instead of simplejson from django utils
|
# -*- coding: utf-8 -*-
__author__ = 'sandlbn'
import json
from django.db.models.query import QuerySet
def event_serializer(events):
"""
serialize event model
"""
objects_body = []
if isinstance(events, QuerySet):
for event in events:
field = {
"id": event.pk,
"title": event.title,
"url": event.url,
"class": event.css_class,
"start": event.start_timestamp,
"end": event.end_timestamp
}
objects_body.append(field)
objects_head = {"success": 1}
objects_head["result"] = objects_body
return json.dumps(objects_head)
|
<commit_before># -*- coding: utf-8 -*-
__author__ = 'sandlbn'
from django.utils import simplejson
from django.db.models.query import QuerySet
def event_serializer(events):
"""
serialize event model
"""
objects_body = []
if isinstance(events, QuerySet):
for event in events:
field = {
"id": event.pk,
"title": event.title,
"url": event.url,
"class": event.css_class,
"start": event.start_timestamp,
"end": event.end_timestamp
}
objects_body.append(field)
objects_head = {"success": 1}
objects_head["result"] = objects_body
return simplejson.dumps(objects_head, encoding='utf-8')
<commit_msg>Use json (from stdlib) instead of simplejson from django utils<commit_after>
|
# -*- coding: utf-8 -*-
__author__ = 'sandlbn'
import json
from django.db.models.query import QuerySet
def event_serializer(events):
"""
serialize event model
"""
objects_body = []
if isinstance(events, QuerySet):
for event in events:
field = {
"id": event.pk,
"title": event.title,
"url": event.url,
"class": event.css_class,
"start": event.start_timestamp,
"end": event.end_timestamp
}
objects_body.append(field)
objects_head = {"success": 1}
objects_head["result"] = objects_body
return json.dumps(objects_head)
|
# -*- coding: utf-8 -*-
__author__ = 'sandlbn'
from django.utils import simplejson
from django.db.models.query import QuerySet
def event_serializer(events):
"""
serialize event model
"""
objects_body = []
if isinstance(events, QuerySet):
for event in events:
field = {
"id": event.pk,
"title": event.title,
"url": event.url,
"class": event.css_class,
"start": event.start_timestamp,
"end": event.end_timestamp
}
objects_body.append(field)
objects_head = {"success": 1}
objects_head["result"] = objects_body
return simplejson.dumps(objects_head, encoding='utf-8')
Use json (from stdlib) instead of simplejson from django utils# -*- coding: utf-8 -*-
__author__ = 'sandlbn'
import json
from django.db.models.query import QuerySet
def event_serializer(events):
"""
serialize event model
"""
objects_body = []
if isinstance(events, QuerySet):
for event in events:
field = {
"id": event.pk,
"title": event.title,
"url": event.url,
"class": event.css_class,
"start": event.start_timestamp,
"end": event.end_timestamp
}
objects_body.append(field)
objects_head = {"success": 1}
objects_head["result"] = objects_body
return json.dumps(objects_head)
|
<commit_before># -*- coding: utf-8 -*-
__author__ = 'sandlbn'
from django.utils import simplejson
from django.db.models.query import QuerySet
def event_serializer(events):
"""
serialize event model
"""
objects_body = []
if isinstance(events, QuerySet):
for event in events:
field = {
"id": event.pk,
"title": event.title,
"url": event.url,
"class": event.css_class,
"start": event.start_timestamp,
"end": event.end_timestamp
}
objects_body.append(field)
objects_head = {"success": 1}
objects_head["result"] = objects_body
return simplejson.dumps(objects_head, encoding='utf-8')
<commit_msg>Use json (from stdlib) instead of simplejson from django utils<commit_after># -*- coding: utf-8 -*-
__author__ = 'sandlbn'
import json
from django.db.models.query import QuerySet
def event_serializer(events):
"""
serialize event model
"""
objects_body = []
if isinstance(events, QuerySet):
for event in events:
field = {
"id": event.pk,
"title": event.title,
"url": event.url,
"class": event.css_class,
"start": event.start_timestamp,
"end": event.end_timestamp
}
objects_body.append(field)
objects_head = {"success": 1}
objects_head["result"] = objects_body
return json.dumps(objects_head)
|
c9ffe4fb86ccd39d199c953c860a9076cb309e0c
|
labonneboite/importer/jobs/check_etablissements.py
|
labonneboite/importer/jobs/check_etablissements.py
|
import sys
from labonneboite.importer import util as import_util
from labonneboite.importer import settings
if __name__ == "__main__":
filename = import_util.detect_runnable_file("etablissements")
if filename:
with open(settings.JENKINS_ETAB_PROPERTIES_FILENAME, "w") as f:
f.write("LBB_ETABLISSEMENT_INPUT_FILE=%s\n" % filename)
sys.exit(0)
else:
sys.exit(-1)
|
import sys
from labonneboite.importer import util as import_util
from labonneboite.importer import settings
def run():
filename = import_util.detect_runnable_file("etablissements")
if filename:
with open(settings.JENKINS_ETAB_PROPERTIES_FILENAME, "w") as f:
f.write("LBB_ETABLISSEMENT_INPUT_FILE=%s\n" % filename)
sys.exit(0)
else:
sys.exit(-1)
if __name__ == '__main__':
run()
|
Add a run method for the entry point
|
Add a run method for the entry point
|
Python
|
agpl-3.0
|
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
|
import sys
from labonneboite.importer import util as import_util
from labonneboite.importer import settings
if __name__ == "__main__":
filename = import_util.detect_runnable_file("etablissements")
if filename:
with open(settings.JENKINS_ETAB_PROPERTIES_FILENAME, "w") as f:
f.write("LBB_ETABLISSEMENT_INPUT_FILE=%s\n" % filename)
sys.exit(0)
else:
sys.exit(-1)
Add a run method for the entry point
|
import sys
from labonneboite.importer import util as import_util
from labonneboite.importer import settings
def run():
filename = import_util.detect_runnable_file("etablissements")
if filename:
with open(settings.JENKINS_ETAB_PROPERTIES_FILENAME, "w") as f:
f.write("LBB_ETABLISSEMENT_INPUT_FILE=%s\n" % filename)
sys.exit(0)
else:
sys.exit(-1)
if __name__ == '__main__':
run()
|
<commit_before>import sys
from labonneboite.importer import util as import_util
from labonneboite.importer import settings
if __name__ == "__main__":
filename = import_util.detect_runnable_file("etablissements")
if filename:
with open(settings.JENKINS_ETAB_PROPERTIES_FILENAME, "w") as f:
f.write("LBB_ETABLISSEMENT_INPUT_FILE=%s\n" % filename)
sys.exit(0)
else:
sys.exit(-1)
<commit_msg>Add a run method for the entry point<commit_after>
|
import sys
from labonneboite.importer import util as import_util
from labonneboite.importer import settings
def run():
filename = import_util.detect_runnable_file("etablissements")
if filename:
with open(settings.JENKINS_ETAB_PROPERTIES_FILENAME, "w") as f:
f.write("LBB_ETABLISSEMENT_INPUT_FILE=%s\n" % filename)
sys.exit(0)
else:
sys.exit(-1)
if __name__ == '__main__':
run()
|
import sys
from labonneboite.importer import util as import_util
from labonneboite.importer import settings
if __name__ == "__main__":
filename = import_util.detect_runnable_file("etablissements")
if filename:
with open(settings.JENKINS_ETAB_PROPERTIES_FILENAME, "w") as f:
f.write("LBB_ETABLISSEMENT_INPUT_FILE=%s\n" % filename)
sys.exit(0)
else:
sys.exit(-1)
Add a run method for the entry pointimport sys
from labonneboite.importer import util as import_util
from labonneboite.importer import settings
def run():
filename = import_util.detect_runnable_file("etablissements")
if filename:
with open(settings.JENKINS_ETAB_PROPERTIES_FILENAME, "w") as f:
f.write("LBB_ETABLISSEMENT_INPUT_FILE=%s\n" % filename)
sys.exit(0)
else:
sys.exit(-1)
if __name__ == '__main__':
run()
|
<commit_before>import sys
from labonneboite.importer import util as import_util
from labonneboite.importer import settings
if __name__ == "__main__":
filename = import_util.detect_runnable_file("etablissements")
if filename:
with open(settings.JENKINS_ETAB_PROPERTIES_FILENAME, "w") as f:
f.write("LBB_ETABLISSEMENT_INPUT_FILE=%s\n" % filename)
sys.exit(0)
else:
sys.exit(-1)
<commit_msg>Add a run method for the entry point<commit_after>import sys
from labonneboite.importer import util as import_util
from labonneboite.importer import settings
def run():
filename = import_util.detect_runnable_file("etablissements")
if filename:
with open(settings.JENKINS_ETAB_PROPERTIES_FILENAME, "w") as f:
f.write("LBB_ETABLISSEMENT_INPUT_FILE=%s\n" % filename)
sys.exit(0)
else:
sys.exit(-1)
if __name__ == '__main__':
run()
|
25da28f685b9cffa86b9400957089bdd7b5513de
|
kaptan/handlers/yaml_handler.py
|
kaptan/handlers/yaml_handler.py
|
# -*- coding: utf8 -*-
"""
kaptan.handlers.yaml_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by the authors and contributors (See AUTHORS file).
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function, unicode_literals
import yaml
from . import BaseHandler
class YamlHandler(BaseHandler):
def load(self, data):
return yaml.load(data)
def dump(self, data, safe=False, **kwargs):
if not safe:
return yaml.dump(data, **kwargs)
else:
return yaml.safe_dump(data, **kwargs)
|
# -*- coding: utf8 -*-
"""
kaptan.handlers.yaml_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by the authors and contributors (See AUTHORS file).
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function, unicode_literals
import yaml
from . import BaseHandler
class YamlHandler(BaseHandler):
def load(self, data, safe=True):
if safe:
func = yaml.safe_load
else:
func = yaml.load
return func(data)
def dump(self, data, safe=True, **kwargs):
if safe:
func = yaml.safe_dump
else:
func = yaml.dump
return func(data, **kwargs)
|
Make YAML handler safe by default
|
Make YAML handler safe by default
Fixes #43
|
Python
|
bsd-3-clause
|
emre/kaptan
|
# -*- coding: utf8 -*-
"""
kaptan.handlers.yaml_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by the authors and contributors (See AUTHORS file).
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function, unicode_literals
import yaml
from . import BaseHandler
class YamlHandler(BaseHandler):
def load(self, data):
return yaml.load(data)
def dump(self, data, safe=False, **kwargs):
if not safe:
return yaml.dump(data, **kwargs)
else:
return yaml.safe_dump(data, **kwargs)
Make YAML handler safe by default
Fixes #43
|
# -*- coding: utf8 -*-
"""
kaptan.handlers.yaml_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by the authors and contributors (See AUTHORS file).
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function, unicode_literals
import yaml
from . import BaseHandler
class YamlHandler(BaseHandler):
def load(self, data, safe=True):
if safe:
func = yaml.safe_load
else:
func = yaml.load
return func(data)
def dump(self, data, safe=True, **kwargs):
if safe:
func = yaml.safe_dump
else:
func = yaml.dump
return func(data, **kwargs)
|
<commit_before># -*- coding: utf8 -*-
"""
kaptan.handlers.yaml_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by the authors and contributors (See AUTHORS file).
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function, unicode_literals
import yaml
from . import BaseHandler
class YamlHandler(BaseHandler):
def load(self, data):
return yaml.load(data)
def dump(self, data, safe=False, **kwargs):
if not safe:
return yaml.dump(data, **kwargs)
else:
return yaml.safe_dump(data, **kwargs)
<commit_msg>Make YAML handler safe by default
Fixes #43<commit_after>
|
# -*- coding: utf8 -*-
"""
kaptan.handlers.yaml_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by the authors and contributors (See AUTHORS file).
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function, unicode_literals
import yaml
from . import BaseHandler
class YamlHandler(BaseHandler):
def load(self, data, safe=True):
if safe:
func = yaml.safe_load
else:
func = yaml.load
return func(data)
def dump(self, data, safe=True, **kwargs):
if safe:
func = yaml.safe_dump
else:
func = yaml.dump
return func(data, **kwargs)
|
# -*- coding: utf8 -*-
"""
kaptan.handlers.yaml_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by the authors and contributors (See AUTHORS file).
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function, unicode_literals
import yaml
from . import BaseHandler
class YamlHandler(BaseHandler):
def load(self, data):
return yaml.load(data)
def dump(self, data, safe=False, **kwargs):
if not safe:
return yaml.dump(data, **kwargs)
else:
return yaml.safe_dump(data, **kwargs)
Make YAML handler safe by default
Fixes #43# -*- coding: utf8 -*-
"""
kaptan.handlers.yaml_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by the authors and contributors (See AUTHORS file).
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function, unicode_literals
import yaml
from . import BaseHandler
class YamlHandler(BaseHandler):
def load(self, data, safe=True):
if safe:
func = yaml.safe_load
else:
func = yaml.load
return func(data)
def dump(self, data, safe=True, **kwargs):
if safe:
func = yaml.safe_dump
else:
func = yaml.dump
return func(data, **kwargs)
|
<commit_before># -*- coding: utf8 -*-
"""
kaptan.handlers.yaml_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by the authors and contributors (See AUTHORS file).
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function, unicode_literals
import yaml
from . import BaseHandler
class YamlHandler(BaseHandler):
def load(self, data):
return yaml.load(data)
def dump(self, data, safe=False, **kwargs):
if not safe:
return yaml.dump(data, **kwargs)
else:
return yaml.safe_dump(data, **kwargs)
<commit_msg>Make YAML handler safe by default
Fixes #43<commit_after># -*- coding: utf8 -*-
"""
kaptan.handlers.yaml_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by the authors and contributors (See AUTHORS file).
:license: BSD, see LICENSE for more details.
"""
from __future__ import print_function, unicode_literals
import yaml
from . import BaseHandler
class YamlHandler(BaseHandler):
def load(self, data, safe=True):
if safe:
func = yaml.safe_load
else:
func = yaml.load
return func(data)
def dump(self, data, safe=True, **kwargs):
if safe:
func = yaml.safe_dump
else:
func = yaml.dump
return func(data, **kwargs)
|
15d87fb06b3882334f48fd71b258e915dbefa6e1
|
koans/about_list_assignments.py
|
koans/about_list_assignments.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(__, names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual(__, title)
self.assertEqual(__, first_names)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual("Sir", title)
self.assertEqual(["Ricky", "Bobby"], first_names)
self.assertEqual("Worthington", last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(["Willie", "Rae"], first_name)
self.assertEqual("Johnson", last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual("Rob", first_name)
self.assertEqual("Roy", last_name)
|
Add completed list assignments koan.
|
Add completed list assignments koan.
|
Python
|
mit
|
javierjulio/python-koans-completed,javierjulio/python-koans-completed
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(__, names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual(__, title)
self.assertEqual(__, first_names)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
Add completed list assignments koan.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual("Sir", title)
self.assertEqual(["Ricky", "Bobby"], first_names)
self.assertEqual("Worthington", last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(["Willie", "Rae"], first_name)
self.assertEqual("Johnson", last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual("Rob", first_name)
self.assertEqual("Roy", last_name)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(__, names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual(__, title)
self.assertEqual(__, first_names)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
<commit_msg>Add completed list assignments koan.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual("Sir", title)
self.assertEqual(["Ricky", "Bobby"], first_names)
self.assertEqual("Worthington", last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(["Willie", "Rae"], first_name)
self.assertEqual("Johnson", last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual("Rob", first_name)
self.assertEqual("Roy", last_name)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(__, names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual(__, title)
self.assertEqual(__, first_names)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
Add completed list assignments koan.#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual("Sir", title)
self.assertEqual(["Ricky", "Bobby"], first_names)
self.assertEqual("Worthington", last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(["Willie", "Rae"], first_name)
self.assertEqual("Johnson", last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual("Rob", first_name)
self.assertEqual("Roy", last_name)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(__, names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual(__, title)
self.assertEqual(__, first_names)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
<commit_msg>Add completed list assignments koan.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual("Sir", title)
self.assertEqual(["Ricky", "Bobby"], first_names)
self.assertEqual("Worthington", last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(["Willie", "Rae"], first_name)
self.assertEqual("Johnson", last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual("Rob", first_name)
self.assertEqual("Roy", last_name)
|
56e660ea4d42790fe0007100066d2add41c734f5
|
csunplugged/config/__init__.py
|
csunplugged/config/__init__.py
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.3"
__version_english__ = "2.0 Alpha 3"
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.4"
__version_english__ = "2.0 Alpha 4"
|
Increment version number to 2.0.0-alpha.4
|
Increment version number to 2.0.0-alpha.4
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.3"
__version_english__ = "2.0 Alpha 3"
Increment version number to 2.0.0-alpha.4
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.4"
__version_english__ = "2.0 Alpha 4"
|
<commit_before>"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.3"
__version_english__ = "2.0 Alpha 3"
<commit_msg>Increment version number to 2.0.0-alpha.4<commit_after>
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.4"
__version_english__ = "2.0 Alpha 4"
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.3"
__version_english__ = "2.0 Alpha 3"
Increment version number to 2.0.0-alpha.4"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.4"
__version_english__ = "2.0 Alpha 4"
|
<commit_before>"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.3"
__version_english__ = "2.0 Alpha 3"
<commit_msg>Increment version number to 2.0.0-alpha.4<commit_after>"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.4"
__version_english__ = "2.0 Alpha 4"
|
4819b35205c1ef17909e825ea75f9e91f8883ee5
|
picoCTF-problems/Examples/Cryptography/ecb1/ecb.py
|
picoCTF-problems/Examples/Cryptography/ecb1/ecb.py
|
#!/usr/bin/python2
from Crypto.Cipher import AES
import os, sys
flag = open("flag", "r").read()
key = open("key", "r").read().strip()
welcome = """
{{welcome_message}}
"""
def encrypt():
cipher = AES.new(key.decode('hex'), AES.MODE_ECB)
return cipher.encrypt(flag).encode("hex")
# flush output immediately
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
print welcome
print "KEY: " + key
print "MESSAGE: " + encrypt()
|
#!/usr/bin/python2
from Crypto.Cipher import AES
import os, sys
flag = open("flag", "r").read().strip()
key = open("key", "r").read().strip()
welcome = """
{{welcome_message}}
"""
def encrypt():
cipher = AES.new(key.decode('hex'), AES.MODE_ECB)
return cipher.encrypt(flag).encode("hex")
# flush output immediately
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
print welcome
print "KEY: " + key
print "MESSAGE: " + encrypt()
|
Fix bug in problem caused by flag file having new lines
|
Fix bug in problem caused by flag file having new lines
|
Python
|
mit
|
picoCTF/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF
|
#!/usr/bin/python2
from Crypto.Cipher import AES
import os, sys
flag = open("flag", "r").read()
key = open("key", "r").read().strip()
welcome = """
{{welcome_message}}
"""
def encrypt():
cipher = AES.new(key.decode('hex'), AES.MODE_ECB)
return cipher.encrypt(flag).encode("hex")
# flush output immediately
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
print welcome
print "KEY: " + key
print "MESSAGE: " + encrypt()
Fix bug in problem caused by flag file having new lines
|
#!/usr/bin/python2
from Crypto.Cipher import AES
import os, sys
flag = open("flag", "r").read().strip()
key = open("key", "r").read().strip()
welcome = """
{{welcome_message}}
"""
def encrypt():
cipher = AES.new(key.decode('hex'), AES.MODE_ECB)
return cipher.encrypt(flag).encode("hex")
# flush output immediately
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
print welcome
print "KEY: " + key
print "MESSAGE: " + encrypt()
|
<commit_before>#!/usr/bin/python2
from Crypto.Cipher import AES
import os, sys
flag = open("flag", "r").read()
key = open("key", "r").read().strip()
welcome = """
{{welcome_message}}
"""
def encrypt():
cipher = AES.new(key.decode('hex'), AES.MODE_ECB)
return cipher.encrypt(flag).encode("hex")
# flush output immediately
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
print welcome
print "KEY: " + key
print "MESSAGE: " + encrypt()
<commit_msg>Fix bug in problem caused by flag file having new lines<commit_after>
|
#!/usr/bin/python2
from Crypto.Cipher import AES
import os, sys
flag = open("flag", "r").read().strip()
key = open("key", "r").read().strip()
welcome = """
{{welcome_message}}
"""
def encrypt():
cipher = AES.new(key.decode('hex'), AES.MODE_ECB)
return cipher.encrypt(flag).encode("hex")
# flush output immediately
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
print welcome
print "KEY: " + key
print "MESSAGE: " + encrypt()
|
#!/usr/bin/python2
from Crypto.Cipher import AES
import os, sys
flag = open("flag", "r").read()
key = open("key", "r").read().strip()
welcome = """
{{welcome_message}}
"""
def encrypt():
cipher = AES.new(key.decode('hex'), AES.MODE_ECB)
return cipher.encrypt(flag).encode("hex")
# flush output immediately
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
print welcome
print "KEY: " + key
print "MESSAGE: " + encrypt()
Fix bug in problem caused by flag file having new lines#!/usr/bin/python2
from Crypto.Cipher import AES
import os, sys
flag = open("flag", "r").read().strip()
key = open("key", "r").read().strip()
welcome = """
{{welcome_message}}
"""
def encrypt():
cipher = AES.new(key.decode('hex'), AES.MODE_ECB)
return cipher.encrypt(flag).encode("hex")
# flush output immediately
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
print welcome
print "KEY: " + key
print "MESSAGE: " + encrypt()
|
<commit_before>#!/usr/bin/python2
from Crypto.Cipher import AES
import os, sys
flag = open("flag", "r").read()
key = open("key", "r").read().strip()
welcome = """
{{welcome_message}}
"""
def encrypt():
cipher = AES.new(key.decode('hex'), AES.MODE_ECB)
return cipher.encrypt(flag).encode("hex")
# flush output immediately
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
print welcome
print "KEY: " + key
print "MESSAGE: " + encrypt()
<commit_msg>Fix bug in problem caused by flag file having new lines<commit_after>#!/usr/bin/python2
from Crypto.Cipher import AES
import os, sys
flag = open("flag", "r").read().strip()
key = open("key", "r").read().strip()
welcome = """
{{welcome_message}}
"""
def encrypt():
cipher = AES.new(key.decode('hex'), AES.MODE_ECB)
return cipher.encrypt(flag).encode("hex")
# flush output immediately
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
print welcome
print "KEY: " + key
print "MESSAGE: " + encrypt()
|
75edc4ff2dfc244f79504eea9770e072fceb5df9
|
ppci/__main__.py
|
ppci/__main__.py
|
""" Main entry point """
# from ppci.cli import main
import sys
import importlib
valid_programs = [
"archive",
"asm",
"build",
"c3c",
"cc",
"disasm",
"hexdump",
"hexutil",
"java",
"link",
"llc",
"mkuimage",
"objcopy",
"objdump",
"ocaml",
"opt",
"pascal",
"pedump",
"pycompile",
"readelf",
"wabt",
"wasm2wat",
"wasmcompile",
"wat2wasm",
"yacc",
]
def main():
if len(sys.argv) < 2:
print_help_message()
else:
subcommand = sys.argv[1]
cmd_args = sys.argv[2:]
if subcommand in valid_programs:
m = importlib.import_module("ppci.cli." + subcommand)
func = getattr(m, "main", None) or getattr(m, subcommand)
func(cmd_args)
else:
print_help_message()
def print_help_message():
print("Welcome to PPCI command line!")
print()
print("Please use one of the subcommands below:")
for cmd in valid_programs:
print(" $ python -m ppci {} -h".format(cmd))
print()
if __name__ == "__main__":
main()
|
""" Main entry point """
# from ppci.cli import main
import sys
import importlib
valid_programs = [
"archive",
"asm",
"build",
"c3c",
"cc",
"disasm",
"hexdump",
"hexutil",
"java",
"ld",
"link",
"llc",
"mkuimage",
"objcopy",
"objdump",
"ocaml",
"opt",
"pascal",
"pedump",
"pycompile",
"readelf",
"wabt",
"wasm2wat",
"wasmcompile",
"wat2wasm",
"yacc",
]
aliases = {
'ld': 'link'
}
def main():
if len(sys.argv) < 2:
print_help_message()
else:
subcommand = sys.argv[1]
subcommand = aliases.get(subcommand, subcommand)
cmd_args = sys.argv[2:]
if subcommand in valid_programs:
m = importlib.import_module("ppci.cli." + subcommand)
func = getattr(m, "main", None) or getattr(m, subcommand)
func(cmd_args)
else:
print_help_message()
def print_help_message():
print("Welcome to PPCI command line!")
print()
print("Please use one of the subcommands below:")
for cmd in valid_programs:
print(" $ python -m ppci {} -h".format(cmd))
print()
if __name__ == "__main__":
main()
|
Add ld alias for the link subcommand.
|
Add ld alias for the link subcommand.
|
Python
|
bsd-2-clause
|
windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror
|
""" Main entry point """
# from ppci.cli import main
import sys
import importlib
valid_programs = [
"archive",
"asm",
"build",
"c3c",
"cc",
"disasm",
"hexdump",
"hexutil",
"java",
"link",
"llc",
"mkuimage",
"objcopy",
"objdump",
"ocaml",
"opt",
"pascal",
"pedump",
"pycompile",
"readelf",
"wabt",
"wasm2wat",
"wasmcompile",
"wat2wasm",
"yacc",
]
def main():
if len(sys.argv) < 2:
print_help_message()
else:
subcommand = sys.argv[1]
cmd_args = sys.argv[2:]
if subcommand in valid_programs:
m = importlib.import_module("ppci.cli." + subcommand)
func = getattr(m, "main", None) or getattr(m, subcommand)
func(cmd_args)
else:
print_help_message()
def print_help_message():
print("Welcome to PPCI command line!")
print()
print("Please use one of the subcommands below:")
for cmd in valid_programs:
print(" $ python -m ppci {} -h".format(cmd))
print()
if __name__ == "__main__":
main()
Add ld alias for the link subcommand.
|
""" Main entry point """
# from ppci.cli import main
import sys
import importlib
valid_programs = [
"archive",
"asm",
"build",
"c3c",
"cc",
"disasm",
"hexdump",
"hexutil",
"java",
"ld",
"link",
"llc",
"mkuimage",
"objcopy",
"objdump",
"ocaml",
"opt",
"pascal",
"pedump",
"pycompile",
"readelf",
"wabt",
"wasm2wat",
"wasmcompile",
"wat2wasm",
"yacc",
]
aliases = {
'ld': 'link'
}
def main():
if len(sys.argv) < 2:
print_help_message()
else:
subcommand = sys.argv[1]
subcommand = aliases.get(subcommand, subcommand)
cmd_args = sys.argv[2:]
if subcommand in valid_programs:
m = importlib.import_module("ppci.cli." + subcommand)
func = getattr(m, "main", None) or getattr(m, subcommand)
func(cmd_args)
else:
print_help_message()
def print_help_message():
print("Welcome to PPCI command line!")
print()
print("Please use one of the subcommands below:")
for cmd in valid_programs:
print(" $ python -m ppci {} -h".format(cmd))
print()
if __name__ == "__main__":
main()
|
<commit_before>""" Main entry point """
# from ppci.cli import main
import sys
import importlib
valid_programs = [
"archive",
"asm",
"build",
"c3c",
"cc",
"disasm",
"hexdump",
"hexutil",
"java",
"link",
"llc",
"mkuimage",
"objcopy",
"objdump",
"ocaml",
"opt",
"pascal",
"pedump",
"pycompile",
"readelf",
"wabt",
"wasm2wat",
"wasmcompile",
"wat2wasm",
"yacc",
]
def main():
if len(sys.argv) < 2:
print_help_message()
else:
subcommand = sys.argv[1]
cmd_args = sys.argv[2:]
if subcommand in valid_programs:
m = importlib.import_module("ppci.cli." + subcommand)
func = getattr(m, "main", None) or getattr(m, subcommand)
func(cmd_args)
else:
print_help_message()
def print_help_message():
print("Welcome to PPCI command line!")
print()
print("Please use one of the subcommands below:")
for cmd in valid_programs:
print(" $ python -m ppci {} -h".format(cmd))
print()
if __name__ == "__main__":
main()
<commit_msg>Add ld alias for the link subcommand.<commit_after>
|
""" Main entry point """
# from ppci.cli import main
import sys
import importlib
valid_programs = [
"archive",
"asm",
"build",
"c3c",
"cc",
"disasm",
"hexdump",
"hexutil",
"java",
"ld",
"link",
"llc",
"mkuimage",
"objcopy",
"objdump",
"ocaml",
"opt",
"pascal",
"pedump",
"pycompile",
"readelf",
"wabt",
"wasm2wat",
"wasmcompile",
"wat2wasm",
"yacc",
]
aliases = {
'ld': 'link'
}
def main():
if len(sys.argv) < 2:
print_help_message()
else:
subcommand = sys.argv[1]
subcommand = aliases.get(subcommand, subcommand)
cmd_args = sys.argv[2:]
if subcommand in valid_programs:
m = importlib.import_module("ppci.cli." + subcommand)
func = getattr(m, "main", None) or getattr(m, subcommand)
func(cmd_args)
else:
print_help_message()
def print_help_message():
print("Welcome to PPCI command line!")
print()
print("Please use one of the subcommands below:")
for cmd in valid_programs:
print(" $ python -m ppci {} -h".format(cmd))
print()
if __name__ == "__main__":
main()
|
""" Main entry point """
# from ppci.cli import main
import sys
import importlib
valid_programs = [
"archive",
"asm",
"build",
"c3c",
"cc",
"disasm",
"hexdump",
"hexutil",
"java",
"link",
"llc",
"mkuimage",
"objcopy",
"objdump",
"ocaml",
"opt",
"pascal",
"pedump",
"pycompile",
"readelf",
"wabt",
"wasm2wat",
"wasmcompile",
"wat2wasm",
"yacc",
]
def main():
if len(sys.argv) < 2:
print_help_message()
else:
subcommand = sys.argv[1]
cmd_args = sys.argv[2:]
if subcommand in valid_programs:
m = importlib.import_module("ppci.cli." + subcommand)
func = getattr(m, "main", None) or getattr(m, subcommand)
func(cmd_args)
else:
print_help_message()
def print_help_message():
print("Welcome to PPCI command line!")
print()
print("Please use one of the subcommands below:")
for cmd in valid_programs:
print(" $ python -m ppci {} -h".format(cmd))
print()
if __name__ == "__main__":
main()
Add ld alias for the link subcommand.""" Main entry point """
# from ppci.cli import main
import sys
import importlib
valid_programs = [
"archive",
"asm",
"build",
"c3c",
"cc",
"disasm",
"hexdump",
"hexutil",
"java",
"ld",
"link",
"llc",
"mkuimage",
"objcopy",
"objdump",
"ocaml",
"opt",
"pascal",
"pedump",
"pycompile",
"readelf",
"wabt",
"wasm2wat",
"wasmcompile",
"wat2wasm",
"yacc",
]
aliases = {
'ld': 'link'
}
def main():
if len(sys.argv) < 2:
print_help_message()
else:
subcommand = sys.argv[1]
subcommand = aliases.get(subcommand, subcommand)
cmd_args = sys.argv[2:]
if subcommand in valid_programs:
m = importlib.import_module("ppci.cli." + subcommand)
func = getattr(m, "main", None) or getattr(m, subcommand)
func(cmd_args)
else:
print_help_message()
def print_help_message():
print("Welcome to PPCI command line!")
print()
print("Please use one of the subcommands below:")
for cmd in valid_programs:
print(" $ python -m ppci {} -h".format(cmd))
print()
if __name__ == "__main__":
main()
|
<commit_before>""" Main entry point """
# from ppci.cli import main
import sys
import importlib
valid_programs = [
"archive",
"asm",
"build",
"c3c",
"cc",
"disasm",
"hexdump",
"hexutil",
"java",
"link",
"llc",
"mkuimage",
"objcopy",
"objdump",
"ocaml",
"opt",
"pascal",
"pedump",
"pycompile",
"readelf",
"wabt",
"wasm2wat",
"wasmcompile",
"wat2wasm",
"yacc",
]
def main():
if len(sys.argv) < 2:
print_help_message()
else:
subcommand = sys.argv[1]
cmd_args = sys.argv[2:]
if subcommand in valid_programs:
m = importlib.import_module("ppci.cli." + subcommand)
func = getattr(m, "main", None) or getattr(m, subcommand)
func(cmd_args)
else:
print_help_message()
def print_help_message():
print("Welcome to PPCI command line!")
print()
print("Please use one of the subcommands below:")
for cmd in valid_programs:
print(" $ python -m ppci {} -h".format(cmd))
print()
if __name__ == "__main__":
main()
<commit_msg>Add ld alias for the link subcommand.<commit_after>""" Main entry point """
# from ppci.cli import main
import sys
import importlib
valid_programs = [
"archive",
"asm",
"build",
"c3c",
"cc",
"disasm",
"hexdump",
"hexutil",
"java",
"ld",
"link",
"llc",
"mkuimage",
"objcopy",
"objdump",
"ocaml",
"opt",
"pascal",
"pedump",
"pycompile",
"readelf",
"wabt",
"wasm2wat",
"wasmcompile",
"wat2wasm",
"yacc",
]
aliases = {
'ld': 'link'
}
def main():
if len(sys.argv) < 2:
print_help_message()
else:
subcommand = sys.argv[1]
subcommand = aliases.get(subcommand, subcommand)
cmd_args = sys.argv[2:]
if subcommand in valid_programs:
m = importlib.import_module("ppci.cli." + subcommand)
func = getattr(m, "main", None) or getattr(m, subcommand)
func(cmd_args)
else:
print_help_message()
def print_help_message():
print("Welcome to PPCI command line!")
print()
print("Please use one of the subcommands below:")
for cmd in valid_programs:
print(" $ python -m ppci {} -h".format(cmd))
print()
if __name__ == "__main__":
main()
|
9fa7f06319ef5dcdb349438db63e2ead89bad455
|
command_line/ispyb_xml.py
|
command_line/ispyb_xml.py
|
import os
import sys
import iotbx.phil
import xia2.Handlers.Streams
from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
from xia2.Schema.XProject import XProject
def ispyb_xml(xml_out):
assert os.path.exists("xia2.json")
assert os.path.exists("xia2.txt")
assert os.path.exists("xia2-working.phil")
command_line = ""
for record in open("xia2.txt"):
if record.startswith("Command line:"):
command_line = record.replace("Command line:", "").strip()
with open("xia2-working.phil", "rb") as f:
working_phil = iotbx.phil.parse(f.read())
xinfo = XProject.from_json(filename="xia2.json")
crystals = xinfo.get_crystals()
assert len(crystals) == 1
crystal = next(iter(crystals.values()))
ispyb_hdl = ISPyBXmlHandler(xinfo)
ispyb_hdl.add_xcrystal(crystal)
ispyb_hdl.write_xml(xml_out, command_line, working_phil=working_phil)
if __name__ == "__main__":
xia2.Handlers.Streams.setup_logging()
if len(sys.argv) >= 2:
ispyb_xml(sys.argv[1])
else:
ispyb_xml("ispyb.xml")
|
import os
import sys
import iotbx.phil
import xia2.Handlers.Streams
from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
from xia2.Schema.XProject import XProject
def ispyb_xml(xml_out):
assert os.path.exists("xia2.json")
assert os.path.exists("xia2.txt")
assert os.path.exists("xia2-working.phil")
command_line = ""
for record in open("xia2.txt"):
if record.startswith("Command line:"):
command_line = record.replace("Command line:", "").strip()
with open("xia2-working.phil", "r") as f:
working_phil = iotbx.phil.parse(f.read())
xinfo = XProject.from_json(filename="xia2.json")
crystals = xinfo.get_crystals()
assert len(crystals) == 1
crystal = next(iter(crystals.values()))
ispyb_hdl = ISPyBXmlHandler(xinfo)
ispyb_hdl.add_xcrystal(crystal)
ispyb_hdl.write_xml(xml_out, command_line, working_phil=working_phil)
if __name__ == "__main__":
xia2.Handlers.Streams.setup_logging()
if len(sys.argv) >= 2:
ispyb_xml(sys.argv[1])
else:
ispyb_xml("ispyb.xml")
|
Read file as text not binary
|
Read file as text not binary
Reads the working PHIL file, but reading this as binary breaks the phil
parsing as the data are a byte array on Python 3 - open as text seems to
fix this - fixes #483
|
Python
|
bsd-3-clause
|
xia2/xia2,xia2/xia2
|
import os
import sys
import iotbx.phil
import xia2.Handlers.Streams
from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
from xia2.Schema.XProject import XProject
def ispyb_xml(xml_out):
assert os.path.exists("xia2.json")
assert os.path.exists("xia2.txt")
assert os.path.exists("xia2-working.phil")
command_line = ""
for record in open("xia2.txt"):
if record.startswith("Command line:"):
command_line = record.replace("Command line:", "").strip()
with open("xia2-working.phil", "rb") as f:
working_phil = iotbx.phil.parse(f.read())
xinfo = XProject.from_json(filename="xia2.json")
crystals = xinfo.get_crystals()
assert len(crystals) == 1
crystal = next(iter(crystals.values()))
ispyb_hdl = ISPyBXmlHandler(xinfo)
ispyb_hdl.add_xcrystal(crystal)
ispyb_hdl.write_xml(xml_out, command_line, working_phil=working_phil)
if __name__ == "__main__":
xia2.Handlers.Streams.setup_logging()
if len(sys.argv) >= 2:
ispyb_xml(sys.argv[1])
else:
ispyb_xml("ispyb.xml")
Read file as text not binary
Reads the working PHIL file, but reading this as binary breaks the phil
parsing as the data are a byte array on Python 3 - open as text seems to
fix this - fixes #483
|
import os
import sys
import iotbx.phil
import xia2.Handlers.Streams
from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
from xia2.Schema.XProject import XProject
def ispyb_xml(xml_out):
assert os.path.exists("xia2.json")
assert os.path.exists("xia2.txt")
assert os.path.exists("xia2-working.phil")
command_line = ""
for record in open("xia2.txt"):
if record.startswith("Command line:"):
command_line = record.replace("Command line:", "").strip()
with open("xia2-working.phil", "r") as f:
working_phil = iotbx.phil.parse(f.read())
xinfo = XProject.from_json(filename="xia2.json")
crystals = xinfo.get_crystals()
assert len(crystals) == 1
crystal = next(iter(crystals.values()))
ispyb_hdl = ISPyBXmlHandler(xinfo)
ispyb_hdl.add_xcrystal(crystal)
ispyb_hdl.write_xml(xml_out, command_line, working_phil=working_phil)
if __name__ == "__main__":
xia2.Handlers.Streams.setup_logging()
if len(sys.argv) >= 2:
ispyb_xml(sys.argv[1])
else:
ispyb_xml("ispyb.xml")
|
<commit_before>import os
import sys
import iotbx.phil
import xia2.Handlers.Streams
from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
from xia2.Schema.XProject import XProject
def ispyb_xml(xml_out):
assert os.path.exists("xia2.json")
assert os.path.exists("xia2.txt")
assert os.path.exists("xia2-working.phil")
command_line = ""
for record in open("xia2.txt"):
if record.startswith("Command line:"):
command_line = record.replace("Command line:", "").strip()
with open("xia2-working.phil", "rb") as f:
working_phil = iotbx.phil.parse(f.read())
xinfo = XProject.from_json(filename="xia2.json")
crystals = xinfo.get_crystals()
assert len(crystals) == 1
crystal = next(iter(crystals.values()))
ispyb_hdl = ISPyBXmlHandler(xinfo)
ispyb_hdl.add_xcrystal(crystal)
ispyb_hdl.write_xml(xml_out, command_line, working_phil=working_phil)
if __name__ == "__main__":
xia2.Handlers.Streams.setup_logging()
if len(sys.argv) >= 2:
ispyb_xml(sys.argv[1])
else:
ispyb_xml("ispyb.xml")
<commit_msg>Read file as text not binary
Reads the working PHIL file, but reading this as binary breaks the phil
parsing as the data are a byte array on Python 3 - open as text seems to
fix this - fixes #483<commit_after>
|
import os
import sys
import iotbx.phil
import xia2.Handlers.Streams
from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
from xia2.Schema.XProject import XProject
def ispyb_xml(xml_out):
assert os.path.exists("xia2.json")
assert os.path.exists("xia2.txt")
assert os.path.exists("xia2-working.phil")
command_line = ""
for record in open("xia2.txt"):
if record.startswith("Command line:"):
command_line = record.replace("Command line:", "").strip()
with open("xia2-working.phil", "r") as f:
working_phil = iotbx.phil.parse(f.read())
xinfo = XProject.from_json(filename="xia2.json")
crystals = xinfo.get_crystals()
assert len(crystals) == 1
crystal = next(iter(crystals.values()))
ispyb_hdl = ISPyBXmlHandler(xinfo)
ispyb_hdl.add_xcrystal(crystal)
ispyb_hdl.write_xml(xml_out, command_line, working_phil=working_phil)
if __name__ == "__main__":
xia2.Handlers.Streams.setup_logging()
if len(sys.argv) >= 2:
ispyb_xml(sys.argv[1])
else:
ispyb_xml("ispyb.xml")
|
import os
import sys
import iotbx.phil
import xia2.Handlers.Streams
from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
from xia2.Schema.XProject import XProject
def ispyb_xml(xml_out):
assert os.path.exists("xia2.json")
assert os.path.exists("xia2.txt")
assert os.path.exists("xia2-working.phil")
command_line = ""
for record in open("xia2.txt"):
if record.startswith("Command line:"):
command_line = record.replace("Command line:", "").strip()
with open("xia2-working.phil", "rb") as f:
working_phil = iotbx.phil.parse(f.read())
xinfo = XProject.from_json(filename="xia2.json")
crystals = xinfo.get_crystals()
assert len(crystals) == 1
crystal = next(iter(crystals.values()))
ispyb_hdl = ISPyBXmlHandler(xinfo)
ispyb_hdl.add_xcrystal(crystal)
ispyb_hdl.write_xml(xml_out, command_line, working_phil=working_phil)
if __name__ == "__main__":
xia2.Handlers.Streams.setup_logging()
if len(sys.argv) >= 2:
ispyb_xml(sys.argv[1])
else:
ispyb_xml("ispyb.xml")
Read file as text not binary
Reads the working PHIL file, but reading this as binary breaks the phil
parsing as the data are a byte array on Python 3 - open as text seems to
fix this - fixes #483import os
import sys
import iotbx.phil
import xia2.Handlers.Streams
from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
from xia2.Schema.XProject import XProject
def ispyb_xml(xml_out):
assert os.path.exists("xia2.json")
assert os.path.exists("xia2.txt")
assert os.path.exists("xia2-working.phil")
command_line = ""
for record in open("xia2.txt"):
if record.startswith("Command line:"):
command_line = record.replace("Command line:", "").strip()
with open("xia2-working.phil", "r") as f:
working_phil = iotbx.phil.parse(f.read())
xinfo = XProject.from_json(filename="xia2.json")
crystals = xinfo.get_crystals()
assert len(crystals) == 1
crystal = next(iter(crystals.values()))
ispyb_hdl = ISPyBXmlHandler(xinfo)
ispyb_hdl.add_xcrystal(crystal)
ispyb_hdl.write_xml(xml_out, command_line, working_phil=working_phil)
if __name__ == "__main__":
xia2.Handlers.Streams.setup_logging()
if len(sys.argv) >= 2:
ispyb_xml(sys.argv[1])
else:
ispyb_xml("ispyb.xml")
|
<commit_before>import os
import sys
import iotbx.phil
import xia2.Handlers.Streams
from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
from xia2.Schema.XProject import XProject
def ispyb_xml(xml_out):
assert os.path.exists("xia2.json")
assert os.path.exists("xia2.txt")
assert os.path.exists("xia2-working.phil")
command_line = ""
for record in open("xia2.txt"):
if record.startswith("Command line:"):
command_line = record.replace("Command line:", "").strip()
with open("xia2-working.phil", "rb") as f:
working_phil = iotbx.phil.parse(f.read())
xinfo = XProject.from_json(filename="xia2.json")
crystals = xinfo.get_crystals()
assert len(crystals) == 1
crystal = next(iter(crystals.values()))
ispyb_hdl = ISPyBXmlHandler(xinfo)
ispyb_hdl.add_xcrystal(crystal)
ispyb_hdl.write_xml(xml_out, command_line, working_phil=working_phil)
if __name__ == "__main__":
xia2.Handlers.Streams.setup_logging()
if len(sys.argv) >= 2:
ispyb_xml(sys.argv[1])
else:
ispyb_xml("ispyb.xml")
<commit_msg>Read file as text not binary
Reads the working PHIL file, but reading this as binary breaks the phil
parsing as the data are a byte array on Python 3 - open as text seems to
fix this - fixes #483<commit_after>import os
import sys
import iotbx.phil
import xia2.Handlers.Streams
from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
from xia2.Schema.XProject import XProject
def ispyb_xml(xml_out):
assert os.path.exists("xia2.json")
assert os.path.exists("xia2.txt")
assert os.path.exists("xia2-working.phil")
command_line = ""
for record in open("xia2.txt"):
if record.startswith("Command line:"):
command_line = record.replace("Command line:", "").strip()
with open("xia2-working.phil", "r") as f:
working_phil = iotbx.phil.parse(f.read())
xinfo = XProject.from_json(filename="xia2.json")
crystals = xinfo.get_crystals()
assert len(crystals) == 1
crystal = next(iter(crystals.values()))
ispyb_hdl = ISPyBXmlHandler(xinfo)
ispyb_hdl.add_xcrystal(crystal)
ispyb_hdl.write_xml(xml_out, command_line, working_phil=working_phil)
if __name__ == "__main__":
xia2.Handlers.Streams.setup_logging()
if len(sys.argv) >= 2:
ispyb_xml(sys.argv[1])
else:
ispyb_xml("ispyb.xml")
|
0463d8937f9efd571f3ad6846f6d1f351fcfe8e1
|
px/px_cpuinfo.py
|
px/px_cpuinfo.py
|
def get_core_count():
"""
Count the number of cores in the system.
Returns a tuple (physical, logical) with counts of physical and logical
cores.
"""
pass
def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"):
pass
def get_core_count_from_sysctl():
pass
|
import os
import errno
import subprocess
def get_core_count():
"""
Count the number of cores in the system.
Returns a tuple (physical, logical) with counts of physical and logical
cores.
"""
pass
def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"):
pass
def get_core_count_from_sysctl():
env = os.environ.copy()
if "LANG" in env:
del env["LANG"]
try:
sysctl = subprocess.Popen(["sysctl", 'hw'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env)
except OSError as e:
if e.errno == errno.ENOENT:
# sysctl not found, we're probably not on OSX
return None
raise
sysctl_stdout = sysctl.communicate()[0].decode('utf-8')
sysctl_lines = sysctl_stdout.split('\n')
# Note the ending spaces, they must be there for number extraction to work!
PHYSICAL_PREFIX = 'hw.physicalcpu: '
LOGICAL_PREFIX = 'hw.logicalcpu: '
physical = None
logical = None
for line in sysctl_lines:
if line.startswith(PHYSICAL_PREFIX):
physical = int(line[len(PHYSICAL_PREFIX):])
elif line.startswith(LOGICAL_PREFIX):
logical = int(line[len(LOGICAL_PREFIX)])
return (physical, logical)
|
Implement core counting of OS X
|
Implement core counting of OS X
|
Python
|
mit
|
walles/px,walles/px
|
def get_core_count():
"""
Count the number of cores in the system.
Returns a tuple (physical, logical) with counts of physical and logical
cores.
"""
pass
def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"):
pass
def get_core_count_from_sysctl():
pass
Implement core counting of OS X
|
import os
import errno
import subprocess
def get_core_count():
"""
Count the number of cores in the system.
Returns a tuple (physical, logical) with counts of physical and logical
cores.
"""
pass
def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"):
pass
def get_core_count_from_sysctl():
env = os.environ.copy()
if "LANG" in env:
del env["LANG"]
try:
sysctl = subprocess.Popen(["sysctl", 'hw'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env)
except OSError as e:
if e.errno == errno.ENOENT:
# sysctl not found, we're probably not on OSX
return None
raise
sysctl_stdout = sysctl.communicate()[0].decode('utf-8')
sysctl_lines = sysctl_stdout.split('\n')
# Note the ending spaces, they must be there for number extraction to work!
PHYSICAL_PREFIX = 'hw.physicalcpu: '
LOGICAL_PREFIX = 'hw.logicalcpu: '
physical = None
logical = None
for line in sysctl_lines:
if line.startswith(PHYSICAL_PREFIX):
physical = int(line[len(PHYSICAL_PREFIX):])
elif line.startswith(LOGICAL_PREFIX):
logical = int(line[len(LOGICAL_PREFIX)])
return (physical, logical)
|
<commit_before>def get_core_count():
"""
Count the number of cores in the system.
Returns a tuple (physical, logical) with counts of physical and logical
cores.
"""
pass
def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"):
pass
def get_core_count_from_sysctl():
pass
<commit_msg>Implement core counting of OS X<commit_after>
|
import os
import errno
import subprocess
def get_core_count():
"""
Count the number of cores in the system.
Returns a tuple (physical, logical) with counts of physical and logical
cores.
"""
pass
def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"):
pass
def get_core_count_from_sysctl():
env = os.environ.copy()
if "LANG" in env:
del env["LANG"]
try:
sysctl = subprocess.Popen(["sysctl", 'hw'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env)
except OSError as e:
if e.errno == errno.ENOENT:
# sysctl not found, we're probably not on OSX
return None
raise
sysctl_stdout = sysctl.communicate()[0].decode('utf-8')
sysctl_lines = sysctl_stdout.split('\n')
# Note the ending spaces, they must be there for number extraction to work!
PHYSICAL_PREFIX = 'hw.physicalcpu: '
LOGICAL_PREFIX = 'hw.logicalcpu: '
physical = None
logical = None
for line in sysctl_lines:
if line.startswith(PHYSICAL_PREFIX):
physical = int(line[len(PHYSICAL_PREFIX):])
elif line.startswith(LOGICAL_PREFIX):
logical = int(line[len(LOGICAL_PREFIX)])
return (physical, logical)
|
def get_core_count():
"""
Count the number of cores in the system.
Returns a tuple (physical, logical) with counts of physical and logical
cores.
"""
pass
def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"):
pass
def get_core_count_from_sysctl():
pass
Implement core counting of OS Ximport os
import errno
import subprocess
def get_core_count():
"""
Count the number of cores in the system.
Returns a tuple (physical, logical) with counts of physical and logical
cores.
"""
pass
def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"):
pass
def get_core_count_from_sysctl():
env = os.environ.copy()
if "LANG" in env:
del env["LANG"]
try:
sysctl = subprocess.Popen(["sysctl", 'hw'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env)
except OSError as e:
if e.errno == errno.ENOENT:
# sysctl not found, we're probably not on OSX
return None
raise
sysctl_stdout = sysctl.communicate()[0].decode('utf-8')
sysctl_lines = sysctl_stdout.split('\n')
# Note the ending spaces, they must be there for number extraction to work!
PHYSICAL_PREFIX = 'hw.physicalcpu: '
LOGICAL_PREFIX = 'hw.logicalcpu: '
physical = None
logical = None
for line in sysctl_lines:
if line.startswith(PHYSICAL_PREFIX):
physical = int(line[len(PHYSICAL_PREFIX):])
elif line.startswith(LOGICAL_PREFIX):
logical = int(line[len(LOGICAL_PREFIX)])
return (physical, logical)
|
<commit_before>def get_core_count():
"""
Count the number of cores in the system.
Returns a tuple (physical, logical) with counts of physical and logical
cores.
"""
pass
def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"):
pass
def get_core_count_from_sysctl():
pass
<commit_msg>Implement core counting of OS X<commit_after>import os
import errno
import subprocess
def get_core_count():
"""
Count the number of cores in the system.
Returns a tuple (physical, logical) with counts of physical and logical
cores.
"""
pass
def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"):
pass
def get_core_count_from_sysctl():
env = os.environ.copy()
if "LANG" in env:
del env["LANG"]
try:
sysctl = subprocess.Popen(["sysctl", 'hw'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env)
except OSError as e:
if e.errno == errno.ENOENT:
# sysctl not found, we're probably not on OSX
return None
raise
sysctl_stdout = sysctl.communicate()[0].decode('utf-8')
sysctl_lines = sysctl_stdout.split('\n')
# Note the ending spaces, they must be there for number extraction to work!
PHYSICAL_PREFIX = 'hw.physicalcpu: '
LOGICAL_PREFIX = 'hw.logicalcpu: '
physical = None
logical = None
for line in sysctl_lines:
if line.startswith(PHYSICAL_PREFIX):
physical = int(line[len(PHYSICAL_PREFIX):])
elif line.startswith(LOGICAL_PREFIX):
logical = int(line[len(LOGICAL_PREFIX)])
return (physical, logical)
|
d7b2f41c5cd4602aa45c7c54f964e9bec5ce6190
|
pyof/__init__.py
|
pyof/__init__.py
|
"""The ofx parser package. A package to parse OpenFlow messages.
This package is a library that parses and creates OpenFlow Messages.
It contains all implemented versions of OpenFlow protocol
"""
__version__ = '1.1.0a2'
|
"""The ofx parser package. A package to parse OpenFlow messages.
This package is a library that parses and creates OpenFlow Messages.
It contains all implemented versions of OpenFlow protocol
"""
__version__ = '1.1.0a2.post2'
|
Increase lib version to make all projects compatible
|
Increase lib version to make all projects compatible
Please, publish this version on pypi
|
Python
|
mit
|
cemsbr/python-openflow,kytos/python-openflow
|
"""The ofx parser package. A package to parse OpenFlow messages.
This package is a library that parses and creates OpenFlow Messages.
It contains all implemented versions of OpenFlow protocol
"""
__version__ = '1.1.0a2'
Increase lib version to make all projects compatible
Please, publish this version on pypi
|
"""The ofx parser package. A package to parse OpenFlow messages.
This package is a library that parses and creates OpenFlow Messages.
It contains all implemented versions of OpenFlow protocol
"""
__version__ = '1.1.0a2.post2'
|
<commit_before>"""The ofx parser package. A package to parse OpenFlow messages.
This package is a library that parses and creates OpenFlow Messages.
It contains all implemented versions of OpenFlow protocol
"""
__version__ = '1.1.0a2'
<commit_msg>Increase lib version to make all projects compatible
Please, publish this version on pypi<commit_after>
|
"""The ofx parser package. A package to parse OpenFlow messages.
This package is a library that parses and creates OpenFlow Messages.
It contains all implemented versions of OpenFlow protocol
"""
__version__ = '1.1.0a2.post2'
|
"""The ofx parser package. A package to parse OpenFlow messages.
This package is a library that parses and creates OpenFlow Messages.
It contains all implemented versions of OpenFlow protocol
"""
__version__ = '1.1.0a2'
Increase lib version to make all projects compatible
Please, publish this version on pypi"""The ofx parser package. A package to parse OpenFlow messages.
This package is a library that parses and creates OpenFlow Messages.
It contains all implemented versions of OpenFlow protocol
"""
__version__ = '1.1.0a2.post2'
|
<commit_before>"""The ofx parser package. A package to parse OpenFlow messages.
This package is a library that parses and creates OpenFlow Messages.
It contains all implemented versions of OpenFlow protocol
"""
__version__ = '1.1.0a2'
<commit_msg>Increase lib version to make all projects compatible
Please, publish this version on pypi<commit_after>"""The ofx parser package. A package to parse OpenFlow messages.
This package is a library that parses and creates OpenFlow Messages.
It contains all implemented versions of OpenFlow protocol
"""
__version__ = '1.1.0a2.post2'
|
d53499ed11b3592e4256e78f9a7186544760cf02
|
app/main/views/sub_navigation_dictionaries.py
|
app/main/views/sub_navigation_dictionaries.py
|
def features_nav():
return [
{
"name": "Features",
"link": "main.features",
},
{
"name": "Emails",
"link": "main.features_email",
},
{
"name": "Text messages",
"link": "main.features_sms",
},
{
"name": "Letters",
"link": "main.features_letters",
},
{
"name": "Roadmap",
"link": "main.roadmap",
},
{
"name": "Trial mode",
"link": "main.trial_mode_new",
},
{
"name": "Message statuses",
"link": "main.message_status",
},
{
"name": "Security",
"link": "main.security",
},
{
"name": "Terms of use",
"link": "main.terms",
},
{
"name": "Using Notify",
"link": "main.using_notify",
},
]
|
def features_nav():
return [
{
"name": "Features",
"link": "main.features",
},
{
"name": "Emails",
"link": "main.features_email",
},
{
"name": "Text messages",
"link": "main.features_sms",
},
{
"name": "Letters",
"link": "main.features_letters",
},
{
"name": "Roadmap",
"link": "main.roadmap",
},
{
"name": "Trial mode",
"link": "main.trial_mode_new",
},
{
"name": "Message status",
"link": "main.message_status",
},
{
"name": "Security",
"link": "main.security",
},
{
"name": "Terms of use",
"link": "main.terms",
},
{
"name": "Using Notify",
"link": "main.using_notify",
},
]
|
Correct 'message status' nav label
|
Correct 'message status' nav label
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
def features_nav():
return [
{
"name": "Features",
"link": "main.features",
},
{
"name": "Emails",
"link": "main.features_email",
},
{
"name": "Text messages",
"link": "main.features_sms",
},
{
"name": "Letters",
"link": "main.features_letters",
},
{
"name": "Roadmap",
"link": "main.roadmap",
},
{
"name": "Trial mode",
"link": "main.trial_mode_new",
},
{
"name": "Message statuses",
"link": "main.message_status",
},
{
"name": "Security",
"link": "main.security",
},
{
"name": "Terms of use",
"link": "main.terms",
},
{
"name": "Using Notify",
"link": "main.using_notify",
},
]
Correct 'message status' nav label
|
def features_nav():
return [
{
"name": "Features",
"link": "main.features",
},
{
"name": "Emails",
"link": "main.features_email",
},
{
"name": "Text messages",
"link": "main.features_sms",
},
{
"name": "Letters",
"link": "main.features_letters",
},
{
"name": "Roadmap",
"link": "main.roadmap",
},
{
"name": "Trial mode",
"link": "main.trial_mode_new",
},
{
"name": "Message status",
"link": "main.message_status",
},
{
"name": "Security",
"link": "main.security",
},
{
"name": "Terms of use",
"link": "main.terms",
},
{
"name": "Using Notify",
"link": "main.using_notify",
},
]
|
<commit_before>def features_nav():
return [
{
"name": "Features",
"link": "main.features",
},
{
"name": "Emails",
"link": "main.features_email",
},
{
"name": "Text messages",
"link": "main.features_sms",
},
{
"name": "Letters",
"link": "main.features_letters",
},
{
"name": "Roadmap",
"link": "main.roadmap",
},
{
"name": "Trial mode",
"link": "main.trial_mode_new",
},
{
"name": "Message statuses",
"link": "main.message_status",
},
{
"name": "Security",
"link": "main.security",
},
{
"name": "Terms of use",
"link": "main.terms",
},
{
"name": "Using Notify",
"link": "main.using_notify",
},
]
<commit_msg>Correct 'message status' nav label<commit_after>
|
def features_nav():
return [
{
"name": "Features",
"link": "main.features",
},
{
"name": "Emails",
"link": "main.features_email",
},
{
"name": "Text messages",
"link": "main.features_sms",
},
{
"name": "Letters",
"link": "main.features_letters",
},
{
"name": "Roadmap",
"link": "main.roadmap",
},
{
"name": "Trial mode",
"link": "main.trial_mode_new",
},
{
"name": "Message status",
"link": "main.message_status",
},
{
"name": "Security",
"link": "main.security",
},
{
"name": "Terms of use",
"link": "main.terms",
},
{
"name": "Using Notify",
"link": "main.using_notify",
},
]
|
def features_nav():
return [
{
"name": "Features",
"link": "main.features",
},
{
"name": "Emails",
"link": "main.features_email",
},
{
"name": "Text messages",
"link": "main.features_sms",
},
{
"name": "Letters",
"link": "main.features_letters",
},
{
"name": "Roadmap",
"link": "main.roadmap",
},
{
"name": "Trial mode",
"link": "main.trial_mode_new",
},
{
"name": "Message statuses",
"link": "main.message_status",
},
{
"name": "Security",
"link": "main.security",
},
{
"name": "Terms of use",
"link": "main.terms",
},
{
"name": "Using Notify",
"link": "main.using_notify",
},
]
Correct 'message status' nav labeldef features_nav():
return [
{
"name": "Features",
"link": "main.features",
},
{
"name": "Emails",
"link": "main.features_email",
},
{
"name": "Text messages",
"link": "main.features_sms",
},
{
"name": "Letters",
"link": "main.features_letters",
},
{
"name": "Roadmap",
"link": "main.roadmap",
},
{
"name": "Trial mode",
"link": "main.trial_mode_new",
},
{
"name": "Message status",
"link": "main.message_status",
},
{
"name": "Security",
"link": "main.security",
},
{
"name": "Terms of use",
"link": "main.terms",
},
{
"name": "Using Notify",
"link": "main.using_notify",
},
]
|
<commit_before>def features_nav():
return [
{
"name": "Features",
"link": "main.features",
},
{
"name": "Emails",
"link": "main.features_email",
},
{
"name": "Text messages",
"link": "main.features_sms",
},
{
"name": "Letters",
"link": "main.features_letters",
},
{
"name": "Roadmap",
"link": "main.roadmap",
},
{
"name": "Trial mode",
"link": "main.trial_mode_new",
},
{
"name": "Message statuses",
"link": "main.message_status",
},
{
"name": "Security",
"link": "main.security",
},
{
"name": "Terms of use",
"link": "main.terms",
},
{
"name": "Using Notify",
"link": "main.using_notify",
},
]
<commit_msg>Correct 'message status' nav label<commit_after>def features_nav():
return [
{
"name": "Features",
"link": "main.features",
},
{
"name": "Emails",
"link": "main.features_email",
},
{
"name": "Text messages",
"link": "main.features_sms",
},
{
"name": "Letters",
"link": "main.features_letters",
},
{
"name": "Roadmap",
"link": "main.roadmap",
},
{
"name": "Trial mode",
"link": "main.trial_mode_new",
},
{
"name": "Message status",
"link": "main.message_status",
},
{
"name": "Security",
"link": "main.security",
},
{
"name": "Terms of use",
"link": "main.terms",
},
{
"name": "Using Notify",
"link": "main.using_notify",
},
]
|
1f2a30c4316c6da714b7cbda1d6052e6e5040312
|
rasterio/tool.py
|
rasterio/tool.py
|
import code
import collections
import logging
import sys
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
import matplotlib.pyplot as plt
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner, local=dict(locals(), src=dataset, np=numpy, rio=rasterio))
return 0
|
import code
import collections
import logging
import sys
import matplotlib.pyplot as plt
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner,
local=dict(
locals(), src=dataset, np=numpy, rio=rasterio, plt=plt))
return 0
|
Add plt to rio_insp locals.
|
Add plt to rio_insp locals.
|
Python
|
bsd-3-clause
|
johanvdw/rasterio,perrygeo/rasterio,youngpm/rasterio,youngpm/rasterio,clembou/rasterio,sgillies/rasterio,youngpm/rasterio,kapadia/rasterio,clembou/rasterio,kapadia/rasterio,kapadia/rasterio,brendan-ward/rasterio,brendan-ward/rasterio,perrygeo/rasterio,johanvdw/rasterio,njwilson23/rasterio,snorfalorpagus/rasterio,njwilson23/rasterio,perrygeo/rasterio,brendan-ward/rasterio,njwilson23/rasterio,johanvdw/rasterio,clembou/rasterio
|
import code
import collections
import logging
import sys
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
import matplotlib.pyplot as plt
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner, local=dict(locals(), src=dataset, np=numpy, rio=rasterio))
return 0
Add plt to rio_insp locals.
|
import code
import collections
import logging
import sys
import matplotlib.pyplot as plt
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner,
local=dict(
locals(), src=dataset, np=numpy, rio=rasterio, plt=plt))
return 0
|
<commit_before>
import code
import collections
import logging
import sys
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
import matplotlib.pyplot as plt
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner, local=dict(locals(), src=dataset, np=numpy, rio=rasterio))
return 0
<commit_msg>Add plt to rio_insp locals.<commit_after>
|
import code
import collections
import logging
import sys
import matplotlib.pyplot as plt
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner,
local=dict(
locals(), src=dataset, np=numpy, rio=rasterio, plt=plt))
return 0
|
import code
import collections
import logging
import sys
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
import matplotlib.pyplot as plt
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner, local=dict(locals(), src=dataset, np=numpy, rio=rasterio))
return 0
Add plt to rio_insp locals.
import code
import collections
import logging
import sys
import matplotlib.pyplot as plt
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner,
local=dict(
locals(), src=dataset, np=numpy, rio=rasterio, plt=plt))
return 0
|
<commit_before>
import code
import collections
import logging
import sys
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
import matplotlib.pyplot as plt
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner, local=dict(locals(), src=dataset, np=numpy, rio=rasterio))
return 0
<commit_msg>Add plt to rio_insp locals.<commit_after>
import code
import collections
import logging
import sys
import matplotlib.pyplot as plt
import numpy
import rasterio
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
def main(banner, dataset):
def show(source, cmap='gray'):
"""Show a raster using matplotlib.
The raster may be either an ndarray or a (dataset, bidx)
tuple.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
plt.imshow(arr, cmap=cmap)
plt.show()
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read_band(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
code.interact(
banner,
local=dict(
locals(), src=dataset, np=numpy, rio=rasterio, plt=plt))
return 0
|
8d544103d08b17a48dc9d424db4498184e10d8a3
|
tweepy/asynchronous/__init__.py
|
tweepy/asynchronous/__init__.py
|
# Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp and oauthlib to be installed"
)
from tweepy.asynchronous.streaming import AsyncStream
from tweepy.asynchronous.client import AsyncClient
|
# Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import async_lru
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp, async_lru, and oauthlib to be "
"installed"
)
from tweepy.asynchronous.streaming import AsyncStream
from tweepy.asynchronous.client import AsyncClient
|
Check for async_lru when importing asynchronous subpackage
|
Check for async_lru when importing asynchronous subpackage
|
Python
|
mit
|
svven/tweepy,tweepy/tweepy
|
# Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp and oauthlib to be installed"
)
from tweepy.asynchronous.streaming import AsyncStream
from tweepy.asynchronous.client import AsyncClient
Check for async_lru when importing asynchronous subpackage
|
# Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import async_lru
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp, async_lru, and oauthlib to be "
"installed"
)
from tweepy.asynchronous.streaming import AsyncStream
from tweepy.asynchronous.client import AsyncClient
|
<commit_before># Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp and oauthlib to be installed"
)
from tweepy.asynchronous.streaming import AsyncStream
from tweepy.asynchronous.client import AsyncClient
<commit_msg>Check for async_lru when importing asynchronous subpackage<commit_after>
|
# Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import async_lru
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp, async_lru, and oauthlib to be "
"installed"
)
from tweepy.asynchronous.streaming import AsyncStream
from tweepy.asynchronous.client import AsyncClient
|
# Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp and oauthlib to be installed"
)
from tweepy.asynchronous.streaming import AsyncStream
from tweepy.asynchronous.client import AsyncClient
Check for async_lru when importing asynchronous subpackage# Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import async_lru
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp, async_lru, and oauthlib to be "
"installed"
)
from tweepy.asynchronous.streaming import AsyncStream
from tweepy.asynchronous.client import AsyncClient
|
<commit_before># Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp and oauthlib to be installed"
)
from tweepy.asynchronous.streaming import AsyncStream
from tweepy.asynchronous.client import AsyncClient
<commit_msg>Check for async_lru when importing asynchronous subpackage<commit_after># Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import async_lru
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp, async_lru, and oauthlib to be "
"installed"
)
from tweepy.asynchronous.streaming import AsyncStream
from tweepy.asynchronous.client import AsyncClient
|
679ca238e6a87c8178705b0715c0015224d2c00a
|
tests/adjust_unittesting_config_for_ci.py
|
tests/adjust_unittesting_config_for_ci.py
|
from os.path import abspath, dirname, join
import json
if __name__ == '__main__':
file = abspath(join(dirname(__file__), '..', 'unittesting.json'))
with open(file, 'w') as fp:
config = {
"deferred": True,
"verbosity": 2,
"capture_console": False,
"failfast": False,
"reload_package_on_testing": False,
"start_coverage_after_reload": False,
"show_reload_progress": False,
"output": None,
"generate_html_report": False
}
json.dump(config, fp, indent=4)
|
from os.path import abspath, dirname, join
import json
if __name__ == '__main__':
file = abspath(join(dirname(__file__), '..', 'unittesting.json'))
with open(file, 'w') as fp:
config = {
"deferred": True,
"verbosity": 0,
"capture_console": True,
"failfast": True,
"reload_package_on_testing": False,
"start_coverage_after_reload": False,
"show_reload_progress": False,
"output": None,
"generate_html_report": False
}
json.dump(config, fp, indent=4)
|
Adjust unittesting config for CI
|
Adjust unittesting config for CI
|
Python
|
mit
|
tomv564/LSP
|
from os.path import abspath, dirname, join
import json
if __name__ == '__main__':
file = abspath(join(dirname(__file__), '..', 'unittesting.json'))
with open(file, 'w') as fp:
config = {
"deferred": True,
"verbosity": 2,
"capture_console": False,
"failfast": False,
"reload_package_on_testing": False,
"start_coverage_after_reload": False,
"show_reload_progress": False,
"output": None,
"generate_html_report": False
}
json.dump(config, fp, indent=4)
Adjust unittesting config for CI
|
from os.path import abspath, dirname, join
import json
if __name__ == '__main__':
file = abspath(join(dirname(__file__), '..', 'unittesting.json'))
with open(file, 'w') as fp:
config = {
"deferred": True,
"verbosity": 0,
"capture_console": True,
"failfast": True,
"reload_package_on_testing": False,
"start_coverage_after_reload": False,
"show_reload_progress": False,
"output": None,
"generate_html_report": False
}
json.dump(config, fp, indent=4)
|
<commit_before>from os.path import abspath, dirname, join
import json
if __name__ == '__main__':
file = abspath(join(dirname(__file__), '..', 'unittesting.json'))
with open(file, 'w') as fp:
config = {
"deferred": True,
"verbosity": 2,
"capture_console": False,
"failfast": False,
"reload_package_on_testing": False,
"start_coverage_after_reload": False,
"show_reload_progress": False,
"output": None,
"generate_html_report": False
}
json.dump(config, fp, indent=4)
<commit_msg>Adjust unittesting config for CI<commit_after>
|
from os.path import abspath, dirname, join
import json
if __name__ == '__main__':
file = abspath(join(dirname(__file__), '..', 'unittesting.json'))
with open(file, 'w') as fp:
config = {
"deferred": True,
"verbosity": 0,
"capture_console": True,
"failfast": True,
"reload_package_on_testing": False,
"start_coverage_after_reload": False,
"show_reload_progress": False,
"output": None,
"generate_html_report": False
}
json.dump(config, fp, indent=4)
|
from os.path import abspath, dirname, join
import json
if __name__ == '__main__':
file = abspath(join(dirname(__file__), '..', 'unittesting.json'))
with open(file, 'w') as fp:
config = {
"deferred": True,
"verbosity": 2,
"capture_console": False,
"failfast": False,
"reload_package_on_testing": False,
"start_coverage_after_reload": False,
"show_reload_progress": False,
"output": None,
"generate_html_report": False
}
json.dump(config, fp, indent=4)
Adjust unittesting config for CIfrom os.path import abspath, dirname, join
import json
if __name__ == '__main__':
file = abspath(join(dirname(__file__), '..', 'unittesting.json'))
with open(file, 'w') as fp:
config = {
"deferred": True,
"verbosity": 0,
"capture_console": True,
"failfast": True,
"reload_package_on_testing": False,
"start_coverage_after_reload": False,
"show_reload_progress": False,
"output": None,
"generate_html_report": False
}
json.dump(config, fp, indent=4)
|
<commit_before>from os.path import abspath, dirname, join
import json
if __name__ == '__main__':
file = abspath(join(dirname(__file__), '..', 'unittesting.json'))
with open(file, 'w') as fp:
config = {
"deferred": True,
"verbosity": 2,
"capture_console": False,
"failfast": False,
"reload_package_on_testing": False,
"start_coverage_after_reload": False,
"show_reload_progress": False,
"output": None,
"generate_html_report": False
}
json.dump(config, fp, indent=4)
<commit_msg>Adjust unittesting config for CI<commit_after>from os.path import abspath, dirname, join
import json
if __name__ == '__main__':
file = abspath(join(dirname(__file__), '..', 'unittesting.json'))
with open(file, 'w') as fp:
config = {
"deferred": True,
"verbosity": 0,
"capture_console": True,
"failfast": True,
"reload_package_on_testing": False,
"start_coverage_after_reload": False,
"show_reload_progress": False,
"output": None,
"generate_html_report": False
}
json.dump(config, fp, indent=4)
|
4eae42a542c67e4b47e4b7fffc0b746fdb934f51
|
librarypaste/mongostore.py
|
librarypaste/mongostore.py
|
import pymongo
import bson
from datastore import DataStore
class MongoDBDataStore(pymongo.Connection, DataStore):
def _store(self, uid, content, data):
"""Store the given dict of content at uid. Nothing returned."""
doc = dict(uid=uid, data=bson.Binary(data))
doc.update(content)
self.librarypaste.pastes.save(doc)
def _storeLog(self, nick, time, uid):
"""Adds the nick & uid to the log for a given time/order. No return."""
query = dict(uid=uid)
update = {'$set': dict(nick=nick, time=time)}
self.librarypaste.pastes.update(query, update)
def _retrieve(self, uid):
"""Return a dict with the contents of the paste, including the raw
data, if any, as the key 'data'. Must pass in uid, not shortid."""
query = dict(uid=uid)
return self.librarypaste.pastes.find_one(query)
def lookup(self, nick):
"""Looks for the most recent paste by a given nick.
Returns the uid or None"""
query = dict(nick=nick)
order = dict(time=pymongo.DESCENDING)
recs = self.librarypaste.pastes.find(query).order(order).limit(1)
try:
return next(recs)['uid']
except StopIteration:
pass
def _lookupUid(self, shortid):
query = dict(shortid=shortid)
rec = self.librarypaste.pastes.find_one(query)
return rec['uid']
|
import pymongo
import bson
from datastore import DataStore
class MongoDBDataStore(pymongo.Connection, DataStore):
def _store(self, uid, content, data=None):
"""Store the given dict of content at uid. Nothing returned."""
doc = dict(uid=uid)
if data:
doc.update(data=bson.Binary(data))
doc.update(content)
self.librarypaste.pastes.save(doc)
def _storeLog(self, nick, time, uid):
"""Adds the nick & uid to the log for a given time/order. No return."""
query = dict(uid=uid)
update = {'$set': dict(nick=nick, time=time)}
self.librarypaste.pastes.update(query, update)
def _retrieve(self, uid):
"""Return a dict with the contents of the paste, including the raw
data, if any, as the key 'data'. Must pass in uid, not shortid."""
query = dict(uid=uid)
return self.librarypaste.pastes.find_one(query)
def lookup(self, nick):
"""Looks for the most recent paste by a given nick.
Returns the uid or None"""
query = dict(nick=nick)
order = dict(time=pymongo.DESCENDING)
recs = self.librarypaste.pastes.find(query).order(order).limit(1)
try:
return next(recs)['uid']
except StopIteration:
pass
def _lookupUid(self, shortid):
query = dict(shortid=shortid)
rec = self.librarypaste.pastes.find_one(query)
return rec['uid']
|
Allow data=None, even though the spec doesn't allow it
|
Allow data=None, even though the spec doesn't allow it
|
Python
|
mit
|
yougov/librarypaste,yougov/librarypaste
|
import pymongo
import bson
from datastore import DataStore
class MongoDBDataStore(pymongo.Connection, DataStore):
def _store(self, uid, content, data):
"""Store the given dict of content at uid. Nothing returned."""
doc = dict(uid=uid, data=bson.Binary(data))
doc.update(content)
self.librarypaste.pastes.save(doc)
def _storeLog(self, nick, time, uid):
"""Adds the nick & uid to the log for a given time/order. No return."""
query = dict(uid=uid)
update = {'$set': dict(nick=nick, time=time)}
self.librarypaste.pastes.update(query, update)
def _retrieve(self, uid):
"""Return a dict with the contents of the paste, including the raw
data, if any, as the key 'data'. Must pass in uid, not shortid."""
query = dict(uid=uid)
return self.librarypaste.pastes.find_one(query)
def lookup(self, nick):
"""Looks for the most recent paste by a given nick.
Returns the uid or None"""
query = dict(nick=nick)
order = dict(time=pymongo.DESCENDING)
recs = self.librarypaste.pastes.find(query).order(order).limit(1)
try:
return next(recs)['uid']
except StopIteration:
pass
def _lookupUid(self, shortid):
query = dict(shortid=shortid)
rec = self.librarypaste.pastes.find_one(query)
return rec['uid']
Allow data=None, even though the spec doesn't allow it
|
import pymongo
import bson
from datastore import DataStore
class MongoDBDataStore(pymongo.Connection, DataStore):
def _store(self, uid, content, data=None):
"""Store the given dict of content at uid. Nothing returned."""
doc = dict(uid=uid)
if data:
doc.update(data=bson.Binary(data))
doc.update(content)
self.librarypaste.pastes.save(doc)
def _storeLog(self, nick, time, uid):
"""Adds the nick & uid to the log for a given time/order. No return."""
query = dict(uid=uid)
update = {'$set': dict(nick=nick, time=time)}
self.librarypaste.pastes.update(query, update)
def _retrieve(self, uid):
"""Return a dict with the contents of the paste, including the raw
data, if any, as the key 'data'. Must pass in uid, not shortid."""
query = dict(uid=uid)
return self.librarypaste.pastes.find_one(query)
def lookup(self, nick):
"""Looks for the most recent paste by a given nick.
Returns the uid or None"""
query = dict(nick=nick)
order = dict(time=pymongo.DESCENDING)
recs = self.librarypaste.pastes.find(query).order(order).limit(1)
try:
return next(recs)['uid']
except StopIteration:
pass
def _lookupUid(self, shortid):
query = dict(shortid=shortid)
rec = self.librarypaste.pastes.find_one(query)
return rec['uid']
|
<commit_before>import pymongo
import bson
from datastore import DataStore
class MongoDBDataStore(pymongo.Connection, DataStore):
def _store(self, uid, content, data):
"""Store the given dict of content at uid. Nothing returned."""
doc = dict(uid=uid, data=bson.Binary(data))
doc.update(content)
self.librarypaste.pastes.save(doc)
def _storeLog(self, nick, time, uid):
"""Adds the nick & uid to the log for a given time/order. No return."""
query = dict(uid=uid)
update = {'$set': dict(nick=nick, time=time)}
self.librarypaste.pastes.update(query, update)
def _retrieve(self, uid):
"""Return a dict with the contents of the paste, including the raw
data, if any, as the key 'data'. Must pass in uid, not shortid."""
query = dict(uid=uid)
return self.librarypaste.pastes.find_one(query)
def lookup(self, nick):
"""Looks for the most recent paste by a given nick.
Returns the uid or None"""
query = dict(nick=nick)
order = dict(time=pymongo.DESCENDING)
recs = self.librarypaste.pastes.find(query).order(order).limit(1)
try:
return next(recs)['uid']
except StopIteration:
pass
def _lookupUid(self, shortid):
query = dict(shortid=shortid)
rec = self.librarypaste.pastes.find_one(query)
return rec['uid']
<commit_msg>Allow data=None, even though the spec doesn't allow it<commit_after>
|
import pymongo
import bson
from datastore import DataStore
class MongoDBDataStore(pymongo.Connection, DataStore):
def _store(self, uid, content, data=None):
"""Store the given dict of content at uid. Nothing returned."""
doc = dict(uid=uid)
if data:
doc.update(data=bson.Binary(data))
doc.update(content)
self.librarypaste.pastes.save(doc)
def _storeLog(self, nick, time, uid):
"""Adds the nick & uid to the log for a given time/order. No return."""
query = dict(uid=uid)
update = {'$set': dict(nick=nick, time=time)}
self.librarypaste.pastes.update(query, update)
def _retrieve(self, uid):
"""Return a dict with the contents of the paste, including the raw
data, if any, as the key 'data'. Must pass in uid, not shortid."""
query = dict(uid=uid)
return self.librarypaste.pastes.find_one(query)
def lookup(self, nick):
"""Looks for the most recent paste by a given nick.
Returns the uid or None"""
query = dict(nick=nick)
order = dict(time=pymongo.DESCENDING)
recs = self.librarypaste.pastes.find(query).order(order).limit(1)
try:
return next(recs)['uid']
except StopIteration:
pass
def _lookupUid(self, shortid):
query = dict(shortid=shortid)
rec = self.librarypaste.pastes.find_one(query)
return rec['uid']
|
import pymongo
import bson
from datastore import DataStore
class MongoDBDataStore(pymongo.Connection, DataStore):
def _store(self, uid, content, data):
"""Store the given dict of content at uid. Nothing returned."""
doc = dict(uid=uid, data=bson.Binary(data))
doc.update(content)
self.librarypaste.pastes.save(doc)
def _storeLog(self, nick, time, uid):
"""Adds the nick & uid to the log for a given time/order. No return."""
query = dict(uid=uid)
update = {'$set': dict(nick=nick, time=time)}
self.librarypaste.pastes.update(query, update)
def _retrieve(self, uid):
"""Return a dict with the contents of the paste, including the raw
data, if any, as the key 'data'. Must pass in uid, not shortid."""
query = dict(uid=uid)
return self.librarypaste.pastes.find_one(query)
def lookup(self, nick):
"""Looks for the most recent paste by a given nick.
Returns the uid or None"""
query = dict(nick=nick)
order = dict(time=pymongo.DESCENDING)
recs = self.librarypaste.pastes.find(query).order(order).limit(1)
try:
return next(recs)['uid']
except StopIteration:
pass
def _lookupUid(self, shortid):
query = dict(shortid=shortid)
rec = self.librarypaste.pastes.find_one(query)
return rec['uid']
Allow data=None, even though the spec doesn't allow itimport pymongo
import bson
from datastore import DataStore
class MongoDBDataStore(pymongo.Connection, DataStore):
def _store(self, uid, content, data=None):
"""Store the given dict of content at uid. Nothing returned."""
doc = dict(uid=uid)
if data:
doc.update(data=bson.Binary(data))
doc.update(content)
self.librarypaste.pastes.save(doc)
def _storeLog(self, nick, time, uid):
"""Adds the nick & uid to the log for a given time/order. No return."""
query = dict(uid=uid)
update = {'$set': dict(nick=nick, time=time)}
self.librarypaste.pastes.update(query, update)
def _retrieve(self, uid):
"""Return a dict with the contents of the paste, including the raw
data, if any, as the key 'data'. Must pass in uid, not shortid."""
query = dict(uid=uid)
return self.librarypaste.pastes.find_one(query)
def lookup(self, nick):
"""Looks for the most recent paste by a given nick.
Returns the uid or None"""
query = dict(nick=nick)
order = dict(time=pymongo.DESCENDING)
recs = self.librarypaste.pastes.find(query).order(order).limit(1)
try:
return next(recs)['uid']
except StopIteration:
pass
def _lookupUid(self, shortid):
query = dict(shortid=shortid)
rec = self.librarypaste.pastes.find_one(query)
return rec['uid']
|
<commit_before>import pymongo
import bson
from datastore import DataStore
class MongoDBDataStore(pymongo.Connection, DataStore):
def _store(self, uid, content, data):
"""Store the given dict of content at uid. Nothing returned."""
doc = dict(uid=uid, data=bson.Binary(data))
doc.update(content)
self.librarypaste.pastes.save(doc)
def _storeLog(self, nick, time, uid):
"""Adds the nick & uid to the log for a given time/order. No return."""
query = dict(uid=uid)
update = {'$set': dict(nick=nick, time=time)}
self.librarypaste.pastes.update(query, update)
def _retrieve(self, uid):
"""Return a dict with the contents of the paste, including the raw
data, if any, as the key 'data'. Must pass in uid, not shortid."""
query = dict(uid=uid)
return self.librarypaste.pastes.find_one(query)
def lookup(self, nick):
"""Looks for the most recent paste by a given nick.
Returns the uid or None"""
query = dict(nick=nick)
order = dict(time=pymongo.DESCENDING)
recs = self.librarypaste.pastes.find(query).order(order).limit(1)
try:
return next(recs)['uid']
except StopIteration:
pass
def _lookupUid(self, shortid):
query = dict(shortid=shortid)
rec = self.librarypaste.pastes.find_one(query)
return rec['uid']
<commit_msg>Allow data=None, even though the spec doesn't allow it<commit_after>import pymongo
import bson
from datastore import DataStore
class MongoDBDataStore(pymongo.Connection, DataStore):
def _store(self, uid, content, data=None):
"""Store the given dict of content at uid. Nothing returned."""
doc = dict(uid=uid)
if data:
doc.update(data=bson.Binary(data))
doc.update(content)
self.librarypaste.pastes.save(doc)
def _storeLog(self, nick, time, uid):
"""Adds the nick & uid to the log for a given time/order. No return."""
query = dict(uid=uid)
update = {'$set': dict(nick=nick, time=time)}
self.librarypaste.pastes.update(query, update)
def _retrieve(self, uid):
"""Return a dict with the contents of the paste, including the raw
data, if any, as the key 'data'. Must pass in uid, not shortid."""
query = dict(uid=uid)
return self.librarypaste.pastes.find_one(query)
def lookup(self, nick):
"""Looks for the most recent paste by a given nick.
Returns the uid or None"""
query = dict(nick=nick)
order = dict(time=pymongo.DESCENDING)
recs = self.librarypaste.pastes.find(query).order(order).limit(1)
try:
return next(recs)['uid']
except StopIteration:
pass
def _lookupUid(self, shortid):
query = dict(shortid=shortid)
rec = self.librarypaste.pastes.find_one(query)
return rec['uid']
|
741b61eb6fbd5dede3d80801bdf09c9bea1fe755
|
tests/sentry/runner/commands/test_init.py
|
tests/sentry/runner/commands/test_init.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from sentry.testutils import CliTestCase
from sentry.runner.commands.init import init
class InitTest(CliTestCase):
def test_simple(self):
with self.runner.isolated_filesystem():
rv = self.runner.invoke(init, ['config'], obj={})
assert rv.exit_code == 0, rv.output
contents = os.listdir('config')
assert set(contents) == {'sentry.conf.py', 'config.yml'}
# Make sure the python file is valid
ctx = {'__file__': 'sentry.conf.py'}
execfile('config/sentry.conf.py', ctx)
assert 'DEBUG' in ctx
# Make sure the yaml file is valid
from sentry.utils.yaml import safe_load
with open('config/config.yml', 'rb') as fp:
ctx = safe_load(fp)
assert 'system.secret-key' in ctx
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from sentry.testutils import CliTestCase
from sentry.runner.commands.init import init
class InitTest(CliTestCase):
command = init
def test_simple(self):
with self.runner.isolated_filesystem():
rv = self.invoke('config')
assert rv.exit_code == 0, rv.output
contents = os.listdir('config')
assert set(contents) == {'sentry.conf.py', 'config.yml'}
# Make sure the python file is valid
ctx = {'__file__': 'sentry.conf.py'}
execfile('config/sentry.conf.py', ctx)
assert 'DEBUG' in ctx
# Make sure the yaml file is valid
from sentry.utils.yaml import safe_load
with open('config/config.yml', 'rb') as fp:
ctx = safe_load(fp)
assert 'system.secret-key' in ctx
def test_no_directory(self):
rv = self.invoke('sentry.conf.py')
assert rv.exit_code != 0, rv.output
|
Test `sentry init` for directory branch
|
Test `sentry init` for directory branch
|
Python
|
bsd-3-clause
|
JamesMura/sentry,zenefits/sentry,daevaorn/sentry,ifduyue/sentry,nicholasserra/sentry,mitsuhiko/sentry,JamesMura/sentry,daevaorn/sentry,nicholasserra/sentry,mvaled/sentry,mvaled/sentry,ifduyue/sentry,jean/sentry,looker/sentry,daevaorn/sentry,fotinakis/sentry,mvaled/sentry,jean/sentry,gencer/sentry,fotinakis/sentry,zenefits/sentry,JackDanger/sentry,jean/sentry,zenefits/sentry,looker/sentry,daevaorn/sentry,JackDanger/sentry,gencer/sentry,JamesMura/sentry,alexm92/sentry,gencer/sentry,mitsuhiko/sentry,gencer/sentry,fotinakis/sentry,JamesMura/sentry,mvaled/sentry,ifduyue/sentry,mvaled/sentry,alexm92/sentry,alexm92/sentry,ifduyue/sentry,looker/sentry,zenefits/sentry,BuildingLink/sentry,gencer/sentry,BuildingLink/sentry,looker/sentry,jean/sentry,BuildingLink/sentry,beeftornado/sentry,looker/sentry,BuildingLink/sentry,jean/sentry,beeftornado/sentry,JamesMura/sentry,ifduyue/sentry,fotinakis/sentry,zenefits/sentry,nicholasserra/sentry,mvaled/sentry,beeftornado/sentry,BuildingLink/sentry,JackDanger/sentry
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from sentry.testutils import CliTestCase
from sentry.runner.commands.init import init
class InitTest(CliTestCase):
def test_simple(self):
with self.runner.isolated_filesystem():
rv = self.runner.invoke(init, ['config'], obj={})
assert rv.exit_code == 0, rv.output
contents = os.listdir('config')
assert set(contents) == {'sentry.conf.py', 'config.yml'}
# Make sure the python file is valid
ctx = {'__file__': 'sentry.conf.py'}
execfile('config/sentry.conf.py', ctx)
assert 'DEBUG' in ctx
# Make sure the yaml file is valid
from sentry.utils.yaml import safe_load
with open('config/config.yml', 'rb') as fp:
ctx = safe_load(fp)
assert 'system.secret-key' in ctx
Test `sentry init` for directory branch
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from sentry.testutils import CliTestCase
from sentry.runner.commands.init import init
class InitTest(CliTestCase):
command = init
def test_simple(self):
with self.runner.isolated_filesystem():
rv = self.invoke('config')
assert rv.exit_code == 0, rv.output
contents = os.listdir('config')
assert set(contents) == {'sentry.conf.py', 'config.yml'}
# Make sure the python file is valid
ctx = {'__file__': 'sentry.conf.py'}
execfile('config/sentry.conf.py', ctx)
assert 'DEBUG' in ctx
# Make sure the yaml file is valid
from sentry.utils.yaml import safe_load
with open('config/config.yml', 'rb') as fp:
ctx = safe_load(fp)
assert 'system.secret-key' in ctx
def test_no_directory(self):
rv = self.invoke('sentry.conf.py')
assert rv.exit_code != 0, rv.output
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from sentry.testutils import CliTestCase
from sentry.runner.commands.init import init
class InitTest(CliTestCase):
def test_simple(self):
with self.runner.isolated_filesystem():
rv = self.runner.invoke(init, ['config'], obj={})
assert rv.exit_code == 0, rv.output
contents = os.listdir('config')
assert set(contents) == {'sentry.conf.py', 'config.yml'}
# Make sure the python file is valid
ctx = {'__file__': 'sentry.conf.py'}
execfile('config/sentry.conf.py', ctx)
assert 'DEBUG' in ctx
# Make sure the yaml file is valid
from sentry.utils.yaml import safe_load
with open('config/config.yml', 'rb') as fp:
ctx = safe_load(fp)
assert 'system.secret-key' in ctx
<commit_msg>Test `sentry init` for directory branch<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from sentry.testutils import CliTestCase
from sentry.runner.commands.init import init
class InitTest(CliTestCase):
command = init
def test_simple(self):
with self.runner.isolated_filesystem():
rv = self.invoke('config')
assert rv.exit_code == 0, rv.output
contents = os.listdir('config')
assert set(contents) == {'sentry.conf.py', 'config.yml'}
# Make sure the python file is valid
ctx = {'__file__': 'sentry.conf.py'}
execfile('config/sentry.conf.py', ctx)
assert 'DEBUG' in ctx
# Make sure the yaml file is valid
from sentry.utils.yaml import safe_load
with open('config/config.yml', 'rb') as fp:
ctx = safe_load(fp)
assert 'system.secret-key' in ctx
def test_no_directory(self):
rv = self.invoke('sentry.conf.py')
assert rv.exit_code != 0, rv.output
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from sentry.testutils import CliTestCase
from sentry.runner.commands.init import init
class InitTest(CliTestCase):
def test_simple(self):
with self.runner.isolated_filesystem():
rv = self.runner.invoke(init, ['config'], obj={})
assert rv.exit_code == 0, rv.output
contents = os.listdir('config')
assert set(contents) == {'sentry.conf.py', 'config.yml'}
# Make sure the python file is valid
ctx = {'__file__': 'sentry.conf.py'}
execfile('config/sentry.conf.py', ctx)
assert 'DEBUG' in ctx
# Make sure the yaml file is valid
from sentry.utils.yaml import safe_load
with open('config/config.yml', 'rb') as fp:
ctx = safe_load(fp)
assert 'system.secret-key' in ctx
Test `sentry init` for directory branch# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from sentry.testutils import CliTestCase
from sentry.runner.commands.init import init
class InitTest(CliTestCase):
command = init
def test_simple(self):
with self.runner.isolated_filesystem():
rv = self.invoke('config')
assert rv.exit_code == 0, rv.output
contents = os.listdir('config')
assert set(contents) == {'sentry.conf.py', 'config.yml'}
# Make sure the python file is valid
ctx = {'__file__': 'sentry.conf.py'}
execfile('config/sentry.conf.py', ctx)
assert 'DEBUG' in ctx
# Make sure the yaml file is valid
from sentry.utils.yaml import safe_load
with open('config/config.yml', 'rb') as fp:
ctx = safe_load(fp)
assert 'system.secret-key' in ctx
def test_no_directory(self):
rv = self.invoke('sentry.conf.py')
assert rv.exit_code != 0, rv.output
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from sentry.testutils import CliTestCase
from sentry.runner.commands.init import init
class InitTest(CliTestCase):
def test_simple(self):
with self.runner.isolated_filesystem():
rv = self.runner.invoke(init, ['config'], obj={})
assert rv.exit_code == 0, rv.output
contents = os.listdir('config')
assert set(contents) == {'sentry.conf.py', 'config.yml'}
# Make sure the python file is valid
ctx = {'__file__': 'sentry.conf.py'}
execfile('config/sentry.conf.py', ctx)
assert 'DEBUG' in ctx
# Make sure the yaml file is valid
from sentry.utils.yaml import safe_load
with open('config/config.yml', 'rb') as fp:
ctx = safe_load(fp)
assert 'system.secret-key' in ctx
<commit_msg>Test `sentry init` for directory branch<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from sentry.testutils import CliTestCase
from sentry.runner.commands.init import init
class InitTest(CliTestCase):
command = init
def test_simple(self):
with self.runner.isolated_filesystem():
rv = self.invoke('config')
assert rv.exit_code == 0, rv.output
contents = os.listdir('config')
assert set(contents) == {'sentry.conf.py', 'config.yml'}
# Make sure the python file is valid
ctx = {'__file__': 'sentry.conf.py'}
execfile('config/sentry.conf.py', ctx)
assert 'DEBUG' in ctx
# Make sure the yaml file is valid
from sentry.utils.yaml import safe_load
with open('config/config.yml', 'rb') as fp:
ctx = safe_load(fp)
assert 'system.secret-key' in ctx
def test_no_directory(self):
rv = self.invoke('sentry.conf.py')
assert rv.exit_code != 0, rv.output
|
7dd8e3339d5e29f5be4e84f949ac607c9ebddb97
|
main.py
|
main.py
|
import time
import os
from tweepy import API
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from credentials import *
class listener(StreamListener):
def __init__(self, api, start_time, time_limit=60):
self.time = start_time
self.limit = time_limit
self.tweet_data = []
self.api = api
def on_error(self, error):
print("Returned error code %s" % error)
return False
def on_status(self, status):
print(status.text)
if __name__ == "__main__":
start_time = time.time() # grabs the system time
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
twitterStream = Stream(auth, listener(api, start_time, time_limit=20)) # initialize Stream object with a time out limit
twitterStream.filter(follow=['25073877'],async=True)
|
from time import ctime
from tweepy import API
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from credentials import *
from tweepy.utils import import_simplejson
json = import_simplejson()
class listener(StreamListener):
def __init__(self, api, followed_user):
self.tweet_data = []
self.api = api
self.followed_user = followed_user
def on_error(self, error):
print("Returned error code %s" % error)
return False
def on_status(self, status):
if status.user.id == self.followed_user:
print("Tweeting at %s" % ctime())
if __name__ == "__main__":
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
followed_user = 25073877
twitterStream = Stream(auth, listener(api, followed_user))
twitterStream.filter(follow=[str(followed_user)], async=True)
|
Remove unused code + follow only specific user status
|
Remove unused code + follow only specific user status
|
Python
|
mit
|
vishoo7/TwitterAutoReply
|
import time
import os
from tweepy import API
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from credentials import *
class listener(StreamListener):
def __init__(self, api, start_time, time_limit=60):
self.time = start_time
self.limit = time_limit
self.tweet_data = []
self.api = api
def on_error(self, error):
print("Returned error code %s" % error)
return False
def on_status(self, status):
print(status.text)
if __name__ == "__main__":
start_time = time.time() # grabs the system time
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
twitterStream = Stream(auth, listener(api, start_time, time_limit=20)) # initialize Stream object with a time out limit
twitterStream.filter(follow=['25073877'],async=True)
Remove unused code + follow only specific user status
|
from time import ctime
from tweepy import API
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from credentials import *
from tweepy.utils import import_simplejson
json = import_simplejson()
class listener(StreamListener):
def __init__(self, api, followed_user):
self.tweet_data = []
self.api = api
self.followed_user = followed_user
def on_error(self, error):
print("Returned error code %s" % error)
return False
def on_status(self, status):
if status.user.id == self.followed_user:
print("Tweeting at %s" % ctime())
if __name__ == "__main__":
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
followed_user = 25073877
twitterStream = Stream(auth, listener(api, followed_user))
twitterStream.filter(follow=[str(followed_user)], async=True)
|
<commit_before>
import time
import os
from tweepy import API
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from credentials import *
class listener(StreamListener):
def __init__(self, api, start_time, time_limit=60):
self.time = start_time
self.limit = time_limit
self.tweet_data = []
self.api = api
def on_error(self, error):
print("Returned error code %s" % error)
return False
def on_status(self, status):
print(status.text)
if __name__ == "__main__":
start_time = time.time() # grabs the system time
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
twitterStream = Stream(auth, listener(api, start_time, time_limit=20)) # initialize Stream object with a time out limit
twitterStream.filter(follow=['25073877'],async=True)
<commit_msg>Remove unused code + follow only specific user status<commit_after>
|
from time import ctime
from tweepy import API
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from credentials import *
from tweepy.utils import import_simplejson
json = import_simplejson()
class listener(StreamListener):
def __init__(self, api, followed_user):
self.tweet_data = []
self.api = api
self.followed_user = followed_user
def on_error(self, error):
print("Returned error code %s" % error)
return False
def on_status(self, status):
if status.user.id == self.followed_user:
print("Tweeting at %s" % ctime())
if __name__ == "__main__":
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
followed_user = 25073877
twitterStream = Stream(auth, listener(api, followed_user))
twitterStream.filter(follow=[str(followed_user)], async=True)
|
import time
import os
from tweepy import API
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from credentials import *
class listener(StreamListener):
def __init__(self, api, start_time, time_limit=60):
self.time = start_time
self.limit = time_limit
self.tweet_data = []
self.api = api
def on_error(self, error):
print("Returned error code %s" % error)
return False
def on_status(self, status):
print(status.text)
if __name__ == "__main__":
start_time = time.time() # grabs the system time
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
twitterStream = Stream(auth, listener(api, start_time, time_limit=20)) # initialize Stream object with a time out limit
twitterStream.filter(follow=['25073877'],async=True)
Remove unused code + follow only specific user status
from time import ctime
from tweepy import API
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from credentials import *
from tweepy.utils import import_simplejson
json = import_simplejson()
class listener(StreamListener):
def __init__(self, api, followed_user):
self.tweet_data = []
self.api = api
self.followed_user = followed_user
def on_error(self, error):
print("Returned error code %s" % error)
return False
def on_status(self, status):
if status.user.id == self.followed_user:
print("Tweeting at %s" % ctime())
if __name__ == "__main__":
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
followed_user = 25073877
twitterStream = Stream(auth, listener(api, followed_user))
twitterStream.filter(follow=[str(followed_user)], async=True)
|
<commit_before>
import time
import os
from tweepy import API
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from credentials import *
class listener(StreamListener):
def __init__(self, api, start_time, time_limit=60):
self.time = start_time
self.limit = time_limit
self.tweet_data = []
self.api = api
def on_error(self, error):
print("Returned error code %s" % error)
return False
def on_status(self, status):
print(status.text)
if __name__ == "__main__":
start_time = time.time() # grabs the system time
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
twitterStream = Stream(auth, listener(api, start_time, time_limit=20)) # initialize Stream object with a time out limit
twitterStream.filter(follow=['25073877'],async=True)
<commit_msg>Remove unused code + follow only specific user status<commit_after>
from time import ctime
from tweepy import API
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
from credentials import *
from tweepy.utils import import_simplejson
json = import_simplejson()
class listener(StreamListener):
def __init__(self, api, followed_user):
self.tweet_data = []
self.api = api
self.followed_user = followed_user
def on_error(self, error):
print("Returned error code %s" % error)
return False
def on_status(self, status):
if status.user.id == self.followed_user:
print("Tweeting at %s" % ctime())
if __name__ == "__main__":
auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
followed_user = 25073877
twitterStream = Stream(auth, listener(api, followed_user))
twitterStream.filter(follow=[str(followed_user)], async=True)
|
cd174416301e03c0beea260925d6227c38444c73
|
shapely/geometry/__init__.py
|
shapely/geometry/__init__.py
|
"""Geometry classes and factories
"""
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping'
]
|
"""Geometry classes and factories
"""
from base import CAP_STYLE, JOIN_STYLE
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping', 'CAP_STYLE', 'JOIN_STYLE'
]
|
Add missing cap and join style imports
|
Add missing cap and join style imports
|
Python
|
bsd-3-clause
|
jdmcbr/Shapely,jdmcbr/Shapely,mouadino/Shapely,abali96/Shapely,mindw/shapely,mindw/shapely,mouadino/Shapely,abali96/Shapely
|
"""Geometry classes and factories
"""
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping'
]
Add missing cap and join style imports
|
"""Geometry classes and factories
"""
from base import CAP_STYLE, JOIN_STYLE
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping', 'CAP_STYLE', 'JOIN_STYLE'
]
|
<commit_before>"""Geometry classes and factories
"""
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping'
]
<commit_msg>Add missing cap and join style imports<commit_after>
|
"""Geometry classes and factories
"""
from base import CAP_STYLE, JOIN_STYLE
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping', 'CAP_STYLE', 'JOIN_STYLE'
]
|
"""Geometry classes and factories
"""
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping'
]
Add missing cap and join style imports"""Geometry classes and factories
"""
from base import CAP_STYLE, JOIN_STYLE
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping', 'CAP_STYLE', 'JOIN_STYLE'
]
|
<commit_before>"""Geometry classes and factories
"""
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping'
]
<commit_msg>Add missing cap and join style imports<commit_after>"""Geometry classes and factories
"""
from base import CAP_STYLE, JOIN_STYLE
from geo import box, shape, asShape, mapping
from point import Point, asPoint
from linestring import LineString, asLineString
from polygon import Polygon, asPolygon
from multipoint import MultiPoint, asMultiPoint
from multilinestring import MultiLineString, asMultiLineString
from multipolygon import MultiPolygon, asMultiPolygon
from collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString', 'asLineString',
'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping', 'CAP_STYLE', 'JOIN_STYLE'
]
|
29e01ab226f5451e22ba3291e81bbaff13ce1867
|
greenmine/settings/__init__.py
|
greenmine/settings/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
|
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
Send more print message to sys.stderr
|
Smallfix: Send more print message to sys.stderr
|
Python
|
agpl-3.0
|
Zaneh-/bearded-tribble-back,astagi/taiga-back,astronaut1712/taiga-back,dayatz/taiga-back,bdang2012/taiga-back-casting,jeffdwyatt/taiga-back,crr0004/taiga-back,seanchen/taiga-back,coopsource/taiga-back,EvgeneOskin/taiga-back,frt-arch/taiga-back,Rademade/taiga-back,obimod/taiga-back,dycodedev/taiga-back,Tigerwhit4/taiga-back,19kestier/taiga-back,gauravjns/taiga-back,jeffdwyatt/taiga-back,taigaio/taiga-back,gam-phon/taiga-back,taigaio/taiga-back,obimod/taiga-back,forging2012/taiga-back,WALR/taiga-back,CoolCloud/taiga-back,coopsource/taiga-back,dayatz/taiga-back,EvgeneOskin/taiga-back,astagi/taiga-back,bdang2012/taiga-back-casting,dycodedev/taiga-back,19kestier/taiga-back,gam-phon/taiga-back,obimod/taiga-back,rajiteh/taiga-back,coopsource/taiga-back,crr0004/taiga-back,Tigerwhit4/taiga-back,EvgeneOskin/taiga-back,CoolCloud/taiga-back,joshisa/taiga-back,WALR/taiga-back,dayatz/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,CMLL/taiga-back,forging2012/taiga-back,seanchen/taiga-back,astronaut1712/taiga-back,Tigerwhit4/taiga-back,seanchen/taiga-back,forging2012/taiga-back,gauravjns/taiga-back,gauravjns/taiga-back,CoolCloud/taiga-back,Zaneh-/bearded-tribble-back,Tigerwhit4/taiga-back,jeffdwyatt/taiga-back,rajiteh/taiga-back,frt-arch/taiga-back,rajiteh/taiga-back,dycodedev/taiga-back,seanchen/taiga-back,forging2012/taiga-back,xdevelsistemas/taiga-back-community,WALR/taiga-back,gam-phon/taiga-back,CMLL/taiga-back,Rademade/taiga-back,CMLL/taiga-back,gauravjns/taiga-back,Rademade/taiga-back,astagi/taiga-back,joshisa/taiga-back,Zaneh-/bearded-tribble-back,xdevelsistemas/taiga-back-community,19kestier/taiga-back,taigaio/taiga-back,jeffdwyatt/taiga-back,crr0004/taiga-back,astronaut1712/taiga-back,joshisa/taiga-back,rajiteh/taiga-back,bdang2012/taiga-back-casting,crr0004/taiga-back,CMLL/taiga-back,astagi/taiga-back,WALR/taiga-back,EvgeneOskin/taiga-back,astronaut1712/taiga-back,Rademade/taiga-back,obimod/taiga-back,xdevelsistemas/taiga-back-community,dycodedev/taiga-back,gam-phon/taiga-back,bdang2012/taiga-back-casting,frt-arch/taiga-back,joshisa/taiga-back,Rademade/taiga-back
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
Smallfix: Send more print message to sys.stderr
|
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
<commit_msg>Smallfix: Send more print message to sys.stderr<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
Smallfix: Send more print message to sys.stderr# -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
try:
print "Trying import local.py settings..."
from .local import *
except ImportError:
print "Trying import development.py settings..."
from .development import *
<commit_msg>Smallfix: Send more print message to sys.stderr<commit_after># -*- coding: utf-8 -*-
from __future__ import (
absolute_import,
print_function
)
import os, sys
try:
print("Trying import local.py settings...", file=sys.stderr)
from .local import *
except ImportError:
print("Trying import development.py settings...", file=sys.stderr)
from .development import *
|
a993ec7f6af7bd543c1084084117042e8a10be51
|
reports/tests.py
|
reports/tests.py
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
|
from datetime import time
from django.test import client, TestCase
from members.models import User
from .models import Report
from protocols.models import Topic
client = client.Client()
class ReportTest(TestCase):
def setUp(self):
self.kril = User.objects.create(
username='Kril',
faculty_number='61277',
email='kril@gmail.com',)
self.kril.set_password('kril')
self.kril.save()
self.topic1 = Topic.objects.create(
name='1',
description='first',
voted_for=13,
voted_against=4,
voted_abstain=5)
self.topic2 = Topic.objects.create(
name='2',
description='second',
voted_for=13,
voted_against=4,
voted_abstain=5)
self.topic3 = Topic.objects.create(
name='3',
description='third',
voted_for=13,
voted_against=4,
voted_abstain=5)
def test_add_report(self):
client.login(username='Kril', password='kril')
response = client.post('/reports/add/', {
"addressed_to": "Hackfmi",
"reported_from": self.kril.pk,
"content": "This is a report test",
"copies": [self.topic1.pk, self.topic2.pk, self.topic3.pk],})
import ipdb; ipdb.set_trace()
self.assertEqual(200, response.status_code)
self.assertEqual(1, len(Report.objects.all()))
|
Add test for adding report
|
Add test for adding report
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
Add test for adding report
|
from datetime import time
from django.test import client, TestCase
from members.models import User
from .models import Report
from protocols.models import Topic
client = client.Client()
class ReportTest(TestCase):
def setUp(self):
self.kril = User.objects.create(
username='Kril',
faculty_number='61277',
email='kril@gmail.com',)
self.kril.set_password('kril')
self.kril.save()
self.topic1 = Topic.objects.create(
name='1',
description='first',
voted_for=13,
voted_against=4,
voted_abstain=5)
self.topic2 = Topic.objects.create(
name='2',
description='second',
voted_for=13,
voted_against=4,
voted_abstain=5)
self.topic3 = Topic.objects.create(
name='3',
description='third',
voted_for=13,
voted_against=4,
voted_abstain=5)
def test_add_report(self):
client.login(username='Kril', password='kril')
response = client.post('/reports/add/', {
"addressed_to": "Hackfmi",
"reported_from": self.kril.pk,
"content": "This is a report test",
"copies": [self.topic1.pk, self.topic2.pk, self.topic3.pk],})
import ipdb; ipdb.set_trace()
self.assertEqual(200, response.status_code)
self.assertEqual(1, len(Report.objects.all()))
|
<commit_before>"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
<commit_msg>Add test for adding report<commit_after>
|
from datetime import time
from django.test import client, TestCase
from members.models import User
from .models import Report
from protocols.models import Topic
client = client.Client()
class ReportTest(TestCase):
def setUp(self):
self.kril = User.objects.create(
username='Kril',
faculty_number='61277',
email='kril@gmail.com',)
self.kril.set_password('kril')
self.kril.save()
self.topic1 = Topic.objects.create(
name='1',
description='first',
voted_for=13,
voted_against=4,
voted_abstain=5)
self.topic2 = Topic.objects.create(
name='2',
description='second',
voted_for=13,
voted_against=4,
voted_abstain=5)
self.topic3 = Topic.objects.create(
name='3',
description='third',
voted_for=13,
voted_against=4,
voted_abstain=5)
def test_add_report(self):
client.login(username='Kril', password='kril')
response = client.post('/reports/add/', {
"addressed_to": "Hackfmi",
"reported_from": self.kril.pk,
"content": "This is a report test",
"copies": [self.topic1.pk, self.topic2.pk, self.topic3.pk],})
import ipdb; ipdb.set_trace()
self.assertEqual(200, response.status_code)
self.assertEqual(1, len(Report.objects.all()))
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
Add test for adding reportfrom datetime import time
from django.test import client, TestCase
from members.models import User
from .models import Report
from protocols.models import Topic
client = client.Client()
class ReportTest(TestCase):
def setUp(self):
self.kril = User.objects.create(
username='Kril',
faculty_number='61277',
email='kril@gmail.com',)
self.kril.set_password('kril')
self.kril.save()
self.topic1 = Topic.objects.create(
name='1',
description='first',
voted_for=13,
voted_against=4,
voted_abstain=5)
self.topic2 = Topic.objects.create(
name='2',
description='second',
voted_for=13,
voted_against=4,
voted_abstain=5)
self.topic3 = Topic.objects.create(
name='3',
description='third',
voted_for=13,
voted_against=4,
voted_abstain=5)
def test_add_report(self):
client.login(username='Kril', password='kril')
response = client.post('/reports/add/', {
"addressed_to": "Hackfmi",
"reported_from": self.kril.pk,
"content": "This is a report test",
"copies": [self.topic1.pk, self.topic2.pk, self.topic3.pk],})
import ipdb; ipdb.set_trace()
self.assertEqual(200, response.status_code)
self.assertEqual(1, len(Report.objects.all()))
|
<commit_before>"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
<commit_msg>Add test for adding report<commit_after>from datetime import time
from django.test import client, TestCase
from members.models import User
from .models import Report
from protocols.models import Topic
client = client.Client()
class ReportTest(TestCase):
def setUp(self):
self.kril = User.objects.create(
username='Kril',
faculty_number='61277',
email='kril@gmail.com',)
self.kril.set_password('kril')
self.kril.save()
self.topic1 = Topic.objects.create(
name='1',
description='first',
voted_for=13,
voted_against=4,
voted_abstain=5)
self.topic2 = Topic.objects.create(
name='2',
description='second',
voted_for=13,
voted_against=4,
voted_abstain=5)
self.topic3 = Topic.objects.create(
name='3',
description='third',
voted_for=13,
voted_against=4,
voted_abstain=5)
def test_add_report(self):
client.login(username='Kril', password='kril')
response = client.post('/reports/add/', {
"addressed_to": "Hackfmi",
"reported_from": self.kril.pk,
"content": "This is a report test",
"copies": [self.topic1.pk, self.topic2.pk, self.topic3.pk],})
import ipdb; ipdb.set_trace()
self.assertEqual(200, response.status_code)
self.assertEqual(1, len(Report.objects.all()))
|
0566fc979f582341f968b5fb17b064a41619e6f3
|
bifrost/tests/unit/test_inventory.py
|
bifrost/tests/unit/test_inventory.py
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_inventory
----------------------------------
Tests for `inventory` module.
"""
from bifrost import inventory
from bifrost.tests import base
class TestBifrostInventoryUnit(base.TestCase):
def test_inventory_preparation(self):
(groups, hostvars) = inventory._prepare_inventory()
self.assertIn("baremetal", groups)
self.assertIn("localhost", groups)
self.assertDictEqual(hostvars, {})
localhost_value = dict(hosts=["127.0.0.1"])
self.assertDictEqual(localhost_value, groups['localhost'])
def test__val_or_none(self):
array = ['no', '', 'yes']
self.assertEqual('no', inventory._val_or_none(array, 0))
self.assertEqual(None, inventory._val_or_none(array, 1))
self.assertEqual('yes', inventory._val_or_none(array, 2))
self.assertEqual(None, inventory._val_or_none(array, 4))
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_inventory
----------------------------------
Tests for `inventory` module.
"""
from bifrost import inventory
from bifrost.tests import base
class TestBifrostInventoryUnit(base.TestCase):
def test_inventory_preparation(self):
(groups, hostvars) = inventory._prepare_inventory()
self.assertIn("baremetal", groups)
self.assertIn("localhost", groups)
self.assertDictEqual(hostvars, {})
localhost_value = dict(hosts=["127.0.0.1"])
self.assertDictEqual(localhost_value, groups['localhost'])
def test__val_or_none(self):
array = ['no', '', 'yes']
self.assertEqual('no', inventory._val_or_none(array, 0))
self.assertIsNone(inventory._val_or_none(array, 1))
self.assertEqual('yes', inventory._val_or_none(array, 2))
self.assertIsNone(inventory._val_or_none(array, 4))
|
Replace assertEqual(None, *) with assertIsNone
|
Replace assertEqual(None, *) with assertIsNone
Replace assertEqual(None, *) with assertIsNone in tests
Change-Id: I257c479b7a23e39178d292c347d04ad979c48f0f
Closes-bug: #1280522
|
Python
|
apache-2.0
|
bcornec/bifrost,openstack/bifrost,openstack/bifrost,EntropyWorks/bifrost,bcornec/bifrost,EntropyWorks/bifrost
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_inventory
----------------------------------
Tests for `inventory` module.
"""
from bifrost import inventory
from bifrost.tests import base
class TestBifrostInventoryUnit(base.TestCase):
def test_inventory_preparation(self):
(groups, hostvars) = inventory._prepare_inventory()
self.assertIn("baremetal", groups)
self.assertIn("localhost", groups)
self.assertDictEqual(hostvars, {})
localhost_value = dict(hosts=["127.0.0.1"])
self.assertDictEqual(localhost_value, groups['localhost'])
def test__val_or_none(self):
array = ['no', '', 'yes']
self.assertEqual('no', inventory._val_or_none(array, 0))
self.assertEqual(None, inventory._val_or_none(array, 1))
self.assertEqual('yes', inventory._val_or_none(array, 2))
self.assertEqual(None, inventory._val_or_none(array, 4))
Replace assertEqual(None, *) with assertIsNone
Replace assertEqual(None, *) with assertIsNone in tests
Change-Id: I257c479b7a23e39178d292c347d04ad979c48f0f
Closes-bug: #1280522
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_inventory
----------------------------------
Tests for `inventory` module.
"""
from bifrost import inventory
from bifrost.tests import base
class TestBifrostInventoryUnit(base.TestCase):
def test_inventory_preparation(self):
(groups, hostvars) = inventory._prepare_inventory()
self.assertIn("baremetal", groups)
self.assertIn("localhost", groups)
self.assertDictEqual(hostvars, {})
localhost_value = dict(hosts=["127.0.0.1"])
self.assertDictEqual(localhost_value, groups['localhost'])
def test__val_or_none(self):
array = ['no', '', 'yes']
self.assertEqual('no', inventory._val_or_none(array, 0))
self.assertIsNone(inventory._val_or_none(array, 1))
self.assertEqual('yes', inventory._val_or_none(array, 2))
self.assertIsNone(inventory._val_or_none(array, 4))
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_inventory
----------------------------------
Tests for `inventory` module.
"""
from bifrost import inventory
from bifrost.tests import base
class TestBifrostInventoryUnit(base.TestCase):
def test_inventory_preparation(self):
(groups, hostvars) = inventory._prepare_inventory()
self.assertIn("baremetal", groups)
self.assertIn("localhost", groups)
self.assertDictEqual(hostvars, {})
localhost_value = dict(hosts=["127.0.0.1"])
self.assertDictEqual(localhost_value, groups['localhost'])
def test__val_or_none(self):
array = ['no', '', 'yes']
self.assertEqual('no', inventory._val_or_none(array, 0))
self.assertEqual(None, inventory._val_or_none(array, 1))
self.assertEqual('yes', inventory._val_or_none(array, 2))
self.assertEqual(None, inventory._val_or_none(array, 4))
<commit_msg>Replace assertEqual(None, *) with assertIsNone
Replace assertEqual(None, *) with assertIsNone in tests
Change-Id: I257c479b7a23e39178d292c347d04ad979c48f0f
Closes-bug: #1280522<commit_after>
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_inventory
----------------------------------
Tests for `inventory` module.
"""
from bifrost import inventory
from bifrost.tests import base
class TestBifrostInventoryUnit(base.TestCase):
def test_inventory_preparation(self):
(groups, hostvars) = inventory._prepare_inventory()
self.assertIn("baremetal", groups)
self.assertIn("localhost", groups)
self.assertDictEqual(hostvars, {})
localhost_value = dict(hosts=["127.0.0.1"])
self.assertDictEqual(localhost_value, groups['localhost'])
def test__val_or_none(self):
array = ['no', '', 'yes']
self.assertEqual('no', inventory._val_or_none(array, 0))
self.assertIsNone(inventory._val_or_none(array, 1))
self.assertEqual('yes', inventory._val_or_none(array, 2))
self.assertIsNone(inventory._val_or_none(array, 4))
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_inventory
----------------------------------
Tests for `inventory` module.
"""
from bifrost import inventory
from bifrost.tests import base
class TestBifrostInventoryUnit(base.TestCase):
def test_inventory_preparation(self):
(groups, hostvars) = inventory._prepare_inventory()
self.assertIn("baremetal", groups)
self.assertIn("localhost", groups)
self.assertDictEqual(hostvars, {})
localhost_value = dict(hosts=["127.0.0.1"])
self.assertDictEqual(localhost_value, groups['localhost'])
def test__val_or_none(self):
array = ['no', '', 'yes']
self.assertEqual('no', inventory._val_or_none(array, 0))
self.assertEqual(None, inventory._val_or_none(array, 1))
self.assertEqual('yes', inventory._val_or_none(array, 2))
self.assertEqual(None, inventory._val_or_none(array, 4))
Replace assertEqual(None, *) with assertIsNone
Replace assertEqual(None, *) with assertIsNone in tests
Change-Id: I257c479b7a23e39178d292c347d04ad979c48f0f
Closes-bug: #1280522# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_inventory
----------------------------------
Tests for `inventory` module.
"""
from bifrost import inventory
from bifrost.tests import base
class TestBifrostInventoryUnit(base.TestCase):
def test_inventory_preparation(self):
(groups, hostvars) = inventory._prepare_inventory()
self.assertIn("baremetal", groups)
self.assertIn("localhost", groups)
self.assertDictEqual(hostvars, {})
localhost_value = dict(hosts=["127.0.0.1"])
self.assertDictEqual(localhost_value, groups['localhost'])
def test__val_or_none(self):
array = ['no', '', 'yes']
self.assertEqual('no', inventory._val_or_none(array, 0))
self.assertIsNone(inventory._val_or_none(array, 1))
self.assertEqual('yes', inventory._val_or_none(array, 2))
self.assertIsNone(inventory._val_or_none(array, 4))
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_inventory
----------------------------------
Tests for `inventory` module.
"""
from bifrost import inventory
from bifrost.tests import base
class TestBifrostInventoryUnit(base.TestCase):
def test_inventory_preparation(self):
(groups, hostvars) = inventory._prepare_inventory()
self.assertIn("baremetal", groups)
self.assertIn("localhost", groups)
self.assertDictEqual(hostvars, {})
localhost_value = dict(hosts=["127.0.0.1"])
self.assertDictEqual(localhost_value, groups['localhost'])
def test__val_or_none(self):
array = ['no', '', 'yes']
self.assertEqual('no', inventory._val_or_none(array, 0))
self.assertEqual(None, inventory._val_or_none(array, 1))
self.assertEqual('yes', inventory._val_or_none(array, 2))
self.assertEqual(None, inventory._val_or_none(array, 4))
<commit_msg>Replace assertEqual(None, *) with assertIsNone
Replace assertEqual(None, *) with assertIsNone in tests
Change-Id: I257c479b7a23e39178d292c347d04ad979c48f0f
Closes-bug: #1280522<commit_after># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_inventory
----------------------------------
Tests for `inventory` module.
"""
from bifrost import inventory
from bifrost.tests import base
class TestBifrostInventoryUnit(base.TestCase):
def test_inventory_preparation(self):
(groups, hostvars) = inventory._prepare_inventory()
self.assertIn("baremetal", groups)
self.assertIn("localhost", groups)
self.assertDictEqual(hostvars, {})
localhost_value = dict(hosts=["127.0.0.1"])
self.assertDictEqual(localhost_value, groups['localhost'])
def test__val_or_none(self):
array = ['no', '', 'yes']
self.assertEqual('no', inventory._val_or_none(array, 0))
self.assertIsNone(inventory._val_or_none(array, 1))
self.assertEqual('yes', inventory._val_or_none(array, 2))
self.assertIsNone(inventory._val_or_none(array, 4))
|
74c7f22cfdd14761932fb9c138435671d1490dfa
|
partner_industry_secondary/models/res_partner.py
|
partner_industry_secondary/models/res_partner.py
|
# Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, exceptions, fields, models, _
class ResPartner(models.Model):
_inherit = 'res.partner'
industry_id = fields.Many2one(string='Main Industry')
secondary_industry_ids = fields.Many2many(
comodel_name='res.partner.industry', string="Secondary Industries",
domain="[('id', '!=', industry_id)]")
@api.constrains('industry_id', 'secondary_industry_ids')
def _check_industries(self):
if self.industry_id in self.secondary_industry_ids:
raise exceptions.ValidationError(
_('The main industry must be different '
'from the secondary industries.'))
|
# Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, exceptions, fields, models, _
class ResPartner(models.Model):
_inherit = 'res.partner'
industry_id = fields.Many2one(string='Main Industry')
secondary_industry_ids = fields.Many2many(
comodel_name='res.partner.industry', string="Secondary Industries",
domain="[('id', '!=', industry_id)]")
@api.constrains('industry_id', 'secondary_industry_ids')
def _check_industries(self):
for partner in self:
if partner.industry_id in partner.secondary_industry_ids:
raise exceptions.ValidationError(
_('The main industry must be different '
'from the secondary industries.'))
|
Make api constrains multi to avoid error when create a company with 2 contacts
|
partner_industry_Secondary: Make api constrains multi to avoid error when create a company with 2 contacts
|
Python
|
agpl-3.0
|
syci/partner-contact,syci/partner-contact
|
# Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, exceptions, fields, models, _
class ResPartner(models.Model):
_inherit = 'res.partner'
industry_id = fields.Many2one(string='Main Industry')
secondary_industry_ids = fields.Many2many(
comodel_name='res.partner.industry', string="Secondary Industries",
domain="[('id', '!=', industry_id)]")
@api.constrains('industry_id', 'secondary_industry_ids')
def _check_industries(self):
if self.industry_id in self.secondary_industry_ids:
raise exceptions.ValidationError(
_('The main industry must be different '
'from the secondary industries.'))
partner_industry_Secondary: Make api constrains multi to avoid error when create a company with 2 contacts
|
# Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, exceptions, fields, models, _
class ResPartner(models.Model):
_inherit = 'res.partner'
industry_id = fields.Many2one(string='Main Industry')
secondary_industry_ids = fields.Many2many(
comodel_name='res.partner.industry', string="Secondary Industries",
domain="[('id', '!=', industry_id)]")
@api.constrains('industry_id', 'secondary_industry_ids')
def _check_industries(self):
for partner in self:
if partner.industry_id in partner.secondary_industry_ids:
raise exceptions.ValidationError(
_('The main industry must be different '
'from the secondary industries.'))
|
<commit_before># Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, exceptions, fields, models, _
class ResPartner(models.Model):
_inherit = 'res.partner'
industry_id = fields.Many2one(string='Main Industry')
secondary_industry_ids = fields.Many2many(
comodel_name='res.partner.industry', string="Secondary Industries",
domain="[('id', '!=', industry_id)]")
@api.constrains('industry_id', 'secondary_industry_ids')
def _check_industries(self):
if self.industry_id in self.secondary_industry_ids:
raise exceptions.ValidationError(
_('The main industry must be different '
'from the secondary industries.'))
<commit_msg>partner_industry_Secondary: Make api constrains multi to avoid error when create a company with 2 contacts<commit_after>
|
# Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, exceptions, fields, models, _
class ResPartner(models.Model):
_inherit = 'res.partner'
industry_id = fields.Many2one(string='Main Industry')
secondary_industry_ids = fields.Many2many(
comodel_name='res.partner.industry', string="Secondary Industries",
domain="[('id', '!=', industry_id)]")
@api.constrains('industry_id', 'secondary_industry_ids')
def _check_industries(self):
for partner in self:
if partner.industry_id in partner.secondary_industry_ids:
raise exceptions.ValidationError(
_('The main industry must be different '
'from the secondary industries.'))
|
# Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, exceptions, fields, models, _
class ResPartner(models.Model):
_inherit = 'res.partner'
industry_id = fields.Many2one(string='Main Industry')
secondary_industry_ids = fields.Many2many(
comodel_name='res.partner.industry', string="Secondary Industries",
domain="[('id', '!=', industry_id)]")
@api.constrains('industry_id', 'secondary_industry_ids')
def _check_industries(self):
if self.industry_id in self.secondary_industry_ids:
raise exceptions.ValidationError(
_('The main industry must be different '
'from the secondary industries.'))
partner_industry_Secondary: Make api constrains multi to avoid error when create a company with 2 contacts# Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, exceptions, fields, models, _
class ResPartner(models.Model):
_inherit = 'res.partner'
industry_id = fields.Many2one(string='Main Industry')
secondary_industry_ids = fields.Many2many(
comodel_name='res.partner.industry', string="Secondary Industries",
domain="[('id', '!=', industry_id)]")
@api.constrains('industry_id', 'secondary_industry_ids')
def _check_industries(self):
for partner in self:
if partner.industry_id in partner.secondary_industry_ids:
raise exceptions.ValidationError(
_('The main industry must be different '
'from the secondary industries.'))
|
<commit_before># Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, exceptions, fields, models, _
class ResPartner(models.Model):
_inherit = 'res.partner'
industry_id = fields.Many2one(string='Main Industry')
secondary_industry_ids = fields.Many2many(
comodel_name='res.partner.industry', string="Secondary Industries",
domain="[('id', '!=', industry_id)]")
@api.constrains('industry_id', 'secondary_industry_ids')
def _check_industries(self):
if self.industry_id in self.secondary_industry_ids:
raise exceptions.ValidationError(
_('The main industry must be different '
'from the secondary industries.'))
<commit_msg>partner_industry_Secondary: Make api constrains multi to avoid error when create a company with 2 contacts<commit_after># Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2018 Eficent Business and IT Consulting Services, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, exceptions, fields, models, _
class ResPartner(models.Model):
_inherit = 'res.partner'
industry_id = fields.Many2one(string='Main Industry')
secondary_industry_ids = fields.Many2many(
comodel_name='res.partner.industry', string="Secondary Industries",
domain="[('id', '!=', industry_id)]")
@api.constrains('industry_id', 'secondary_industry_ids')
def _check_industries(self):
for partner in self:
if partner.industry_id in partner.secondary_industry_ids:
raise exceptions.ValidationError(
_('The main industry must be different '
'from the secondary industries.'))
|
eed276146fe06e5d8191462cc7ef81a65c4bbdbb
|
pdf_generator/styles.py
|
pdf_generator/styles.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def bold(string, *args, **kw):
"""
Return string as a :class:`Paragraph` in bold
"""
return Paragraph(u'<b>{}</b>'.format(string), *args, **kw)
def italic(string, *args, **kw):
"""
Return string as a :class:`Paragraph` in italic
"""
return Paragraph(u'<i>{}</i>'.format(string), *args, **kw)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
|
Add bold and italic helpers
|
Add bold and italic helpers
|
Python
|
mit
|
cecedille1/PDF_generator
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
Add bold and italic helpers
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def bold(string, *args, **kw):
"""
Return string as a :class:`Paragraph` in bold
"""
return Paragraph(u'<b>{}</b>'.format(string), *args, **kw)
def italic(string, *args, **kw):
"""
Return string as a :class:`Paragraph` in italic
"""
return Paragraph(u'<i>{}</i>'.format(string), *args, **kw)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
<commit_msg>Add bold and italic helpers<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def bold(string, *args, **kw):
"""
Return string as a :class:`Paragraph` in bold
"""
return Paragraph(u'<b>{}</b>'.format(string), *args, **kw)
def italic(string, *args, **kw):
"""
Return string as a :class:`Paragraph` in italic
"""
return Paragraph(u'<i>{}</i>'.format(string), *args, **kw)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
Add bold and italic helpers#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def bold(string, *args, **kw):
"""
Return string as a :class:`Paragraph` in bold
"""
return Paragraph(u'<b>{}</b>'.format(string), *args, **kw)
def italic(string, *args, **kw):
"""
Return string as a :class:`Paragraph` in italic
"""
return Paragraph(u'<i>{}</i>'.format(string), *args, **kw)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
<commit_msg>Add bold and italic helpers<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from reportlab.platypus import (
Paragraph as BaseParagraph,
Image as BaseImage,
Spacer,
)
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
styles = getSampleStyleSheet()
snormal = ParagraphStyle('normal')
def Paragraph(text, style=snormal, **kw):
if isinstance(style, basestring):
style = styles[style]
if kw:
style = ParagraphStyle('style', parent=style, **kw)
return BaseParagraph(text, style)
def bold(string, *args, **kw):
"""
Return string as a :class:`Paragraph` in bold
"""
return Paragraph(u'<b>{}</b>'.format(string), *args, **kw)
def italic(string, *args, **kw):
"""
Return string as a :class:`Paragraph` in italic
"""
return Paragraph(u'<i>{}</i>'.format(string), *args, **kw)
def HSpacer(width):
return Spacer(0, width)
def Image(path, width=None, height=None, ratio=None):
if width and ratio:
height = width / ratio
elif height and ratio:
width = height * ratio
return BaseImage(path, width, height)
|
ee7147e6d781a92d0ded0e094cc01a187fcb64ae
|
openstates/people.py
|
openstates/people.py
|
from pupa.scrape import Legislator
from .base import OpenstatesBaseScraper
class OpenstatesPersonScraper(OpenstatesBaseScraper):
def scrape_legislator(self, legislator_id):
old = self.api('legislators/' + legislator_id + '?')
old.pop('country', None)
old.pop('level', None)
new = Legislator(name=old['full_name'], image=old['photo_url'])
return new
def scrape(self, apikey=None):
if apikey:
self.apikey = apikey
if not self.apikey:
print('apikey not set')
return
# TODO: change this to just get ids, then scrape legislator can take an id
# and get the data it it leaving behind here
method = 'legislators/?state={}&fields=id'.format(self.state)
for result in self.api(method):
yield self.scrape_legislator(result['id'])
|
from pupa.scrape import Legislator
from .base import OpenstatesBaseScraper
class OpenstatesPersonScraper(OpenstatesBaseScraper):
def scrape_legislator(self, legislator_id):
old = self.api('legislators/' + legislator_id + '?')
old.pop('country', None)
old.pop('level', None)
new = Legislator(name=old['full_name'], image=old['photo_url'])
return new
def scrape(self):
method = 'legislators/?state={}&fields=id'.format(self.state)
for result in self.api(method):
yield self.scrape_legislator(result['id'])
|
Move the APIKey bits out to the init
|
Move the APIKey bits out to the init
|
Python
|
bsd-3-clause
|
openstates/billy,openstates/billy,sunlightlabs/billy,openstates/billy,sunlightlabs/billy,sunlightlabs/billy
|
from pupa.scrape import Legislator
from .base import OpenstatesBaseScraper
class OpenstatesPersonScraper(OpenstatesBaseScraper):
def scrape_legislator(self, legislator_id):
old = self.api('legislators/' + legislator_id + '?')
old.pop('country', None)
old.pop('level', None)
new = Legislator(name=old['full_name'], image=old['photo_url'])
return new
def scrape(self, apikey=None):
if apikey:
self.apikey = apikey
if not self.apikey:
print('apikey not set')
return
# TODO: change this to just get ids, then scrape legislator can take an id
# and get the data it it leaving behind here
method = 'legislators/?state={}&fields=id'.format(self.state)
for result in self.api(method):
yield self.scrape_legislator(result['id'])
Move the APIKey bits out to the init
|
from pupa.scrape import Legislator
from .base import OpenstatesBaseScraper
class OpenstatesPersonScraper(OpenstatesBaseScraper):
def scrape_legislator(self, legislator_id):
old = self.api('legislators/' + legislator_id + '?')
old.pop('country', None)
old.pop('level', None)
new = Legislator(name=old['full_name'], image=old['photo_url'])
return new
def scrape(self):
method = 'legislators/?state={}&fields=id'.format(self.state)
for result in self.api(method):
yield self.scrape_legislator(result['id'])
|
<commit_before>from pupa.scrape import Legislator
from .base import OpenstatesBaseScraper
class OpenstatesPersonScraper(OpenstatesBaseScraper):
def scrape_legislator(self, legislator_id):
old = self.api('legislators/' + legislator_id + '?')
old.pop('country', None)
old.pop('level', None)
new = Legislator(name=old['full_name'], image=old['photo_url'])
return new
def scrape(self, apikey=None):
if apikey:
self.apikey = apikey
if not self.apikey:
print('apikey not set')
return
# TODO: change this to just get ids, then scrape legislator can take an id
# and get the data it it leaving behind here
method = 'legislators/?state={}&fields=id'.format(self.state)
for result in self.api(method):
yield self.scrape_legislator(result['id'])
<commit_msg>Move the APIKey bits out to the init<commit_after>
|
from pupa.scrape import Legislator
from .base import OpenstatesBaseScraper
class OpenstatesPersonScraper(OpenstatesBaseScraper):
def scrape_legislator(self, legislator_id):
old = self.api('legislators/' + legislator_id + '?')
old.pop('country', None)
old.pop('level', None)
new = Legislator(name=old['full_name'], image=old['photo_url'])
return new
def scrape(self):
method = 'legislators/?state={}&fields=id'.format(self.state)
for result in self.api(method):
yield self.scrape_legislator(result['id'])
|
from pupa.scrape import Legislator
from .base import OpenstatesBaseScraper
class OpenstatesPersonScraper(OpenstatesBaseScraper):
def scrape_legislator(self, legislator_id):
old = self.api('legislators/' + legislator_id + '?')
old.pop('country', None)
old.pop('level', None)
new = Legislator(name=old['full_name'], image=old['photo_url'])
return new
def scrape(self, apikey=None):
if apikey:
self.apikey = apikey
if not self.apikey:
print('apikey not set')
return
# TODO: change this to just get ids, then scrape legislator can take an id
# and get the data it it leaving behind here
method = 'legislators/?state={}&fields=id'.format(self.state)
for result in self.api(method):
yield self.scrape_legislator(result['id'])
Move the APIKey bits out to the initfrom pupa.scrape import Legislator
from .base import OpenstatesBaseScraper
class OpenstatesPersonScraper(OpenstatesBaseScraper):
def scrape_legislator(self, legislator_id):
old = self.api('legislators/' + legislator_id + '?')
old.pop('country', None)
old.pop('level', None)
new = Legislator(name=old['full_name'], image=old['photo_url'])
return new
def scrape(self):
method = 'legislators/?state={}&fields=id'.format(self.state)
for result in self.api(method):
yield self.scrape_legislator(result['id'])
|
<commit_before>from pupa.scrape import Legislator
from .base import OpenstatesBaseScraper
class OpenstatesPersonScraper(OpenstatesBaseScraper):
def scrape_legislator(self, legislator_id):
old = self.api('legislators/' + legislator_id + '?')
old.pop('country', None)
old.pop('level', None)
new = Legislator(name=old['full_name'], image=old['photo_url'])
return new
def scrape(self, apikey=None):
if apikey:
self.apikey = apikey
if not self.apikey:
print('apikey not set')
return
# TODO: change this to just get ids, then scrape legislator can take an id
# and get the data it it leaving behind here
method = 'legislators/?state={}&fields=id'.format(self.state)
for result in self.api(method):
yield self.scrape_legislator(result['id'])
<commit_msg>Move the APIKey bits out to the init<commit_after>from pupa.scrape import Legislator
from .base import OpenstatesBaseScraper
class OpenstatesPersonScraper(OpenstatesBaseScraper):
def scrape_legislator(self, legislator_id):
old = self.api('legislators/' + legislator_id + '?')
old.pop('country', None)
old.pop('level', None)
new = Legislator(name=old['full_name'], image=old['photo_url'])
return new
def scrape(self):
method = 'legislators/?state={}&fields=id'.format(self.state)
for result in self.api(method):
yield self.scrape_legislator(result['id'])
|
76ceb2f7c39d6cd82710e8e02df7a7a4b7d6360a
|
spitfire/runtime/repeater.py
|
spitfire/runtime/repeater.py
|
class RepeatTracker(object):
def __init__(self):
self.repeater_map = {}
def __setitem__(self, key, value):
try:
self.repeater_map[key].index = value
except KeyError, e:
self.repeater_map[key] = Repeater(value)
def __getitem__(self, key):
return self.repeater_map[key]
class Repeater(object):
def __init__(self, index=0, length=None):
self.index = index
self.length = length
@property
def number(self):
return self.index + 1
@property
def even(self):
return not (self.index % 2)
@property
def odd(self):
return (self.index % 2)
@property
def first(self):
return (self.index == 0)
@property
def last(self):
return (self.index == (self.length - 1))
def reiterate(iterable):
try:
length = len(iterable)
except TypeError:
# if the iterable is a generator, then we have no length
length = None
for index, item in enumerate(iterable):
yield (Repeater(index, length), item)
|
class RepeatTracker(object):
def __init__(self):
self.repeater_map = {}
def __setitem__(self, key, value):
try:
self.repeater_map[key].index = value
except KeyError, e:
self.repeater_map[key] = Repeater(value)
def __getitem__(self, key):
return self.repeater_map[key]
class Repeater(object):
def __init__(self, index=0, item=None, length=None):
self.index = index
self.item = item
self.length = length
@property
def number(self):
return self.index + 1
@property
def even(self):
return not (self.index % 2)
@property
def odd(self):
return (self.index % 2)
@property
def first(self):
return (self.index == 0)
@property
def last(self):
return (self.index == (self.length - 1))
class RepeatIterator(object):
def __init__(self, iterable):
self.src_iterable = iterable
self.src_iterator = enumerate(iterable)
try:
self.length = len(iterable)
except TypeError:
# if the iterable is a generator, then we have no length
self.length = None
def __iter__(self):
return self
def next(self):
index, item = self.src_iterator.next()
return Repeater(index, item, self.length)
|
Revert last change (breaks XSPT)
|
Revert last change (breaks XSPT)
|
Python
|
bsd-3-clause
|
ahmedissa/spitfire,tkisme/spitfire,infin8/spitfire,infin8/spitfire,funic/spitfire,ahmedissa/spitfire,YifanCao/spitfire,YifanCao/spitfire,infin8/spitfire,TheProjecter/spitfire,YifanCao/spitfire,ahmedissa/spitfire,lefay1982/spitfire,funic/spitfire,tkisme/spitfire,coverband/spitfire,ahmedissa/spitfire,tkisme/spitfire,coverband/spitfire,TheProjecter/spitfire,tkisme/spitfire,TheProjecter/spitfire,YifanCao/spitfire,coverband/spitfire,infin8/spitfire,coverband/spitfire,funic/spitfire,lefay1982/spitfire,lefay1982/spitfire,lefay1982/spitfire,funic/spitfire,TheProjecter/spitfire
|
class RepeatTracker(object):
def __init__(self):
self.repeater_map = {}
def __setitem__(self, key, value):
try:
self.repeater_map[key].index = value
except KeyError, e:
self.repeater_map[key] = Repeater(value)
def __getitem__(self, key):
return self.repeater_map[key]
class Repeater(object):
def __init__(self, index=0, length=None):
self.index = index
self.length = length
@property
def number(self):
return self.index + 1
@property
def even(self):
return not (self.index % 2)
@property
def odd(self):
return (self.index % 2)
@property
def first(self):
return (self.index == 0)
@property
def last(self):
return (self.index == (self.length - 1))
def reiterate(iterable):
try:
length = len(iterable)
except TypeError:
# if the iterable is a generator, then we have no length
length = None
for index, item in enumerate(iterable):
yield (Repeater(index, length), item)
Revert last change (breaks XSPT)
|
class RepeatTracker(object):
def __init__(self):
self.repeater_map = {}
def __setitem__(self, key, value):
try:
self.repeater_map[key].index = value
except KeyError, e:
self.repeater_map[key] = Repeater(value)
def __getitem__(self, key):
return self.repeater_map[key]
class Repeater(object):
def __init__(self, index=0, item=None, length=None):
self.index = index
self.item = item
self.length = length
@property
def number(self):
return self.index + 1
@property
def even(self):
return not (self.index % 2)
@property
def odd(self):
return (self.index % 2)
@property
def first(self):
return (self.index == 0)
@property
def last(self):
return (self.index == (self.length - 1))
class RepeatIterator(object):
def __init__(self, iterable):
self.src_iterable = iterable
self.src_iterator = enumerate(iterable)
try:
self.length = len(iterable)
except TypeError:
# if the iterable is a generator, then we have no length
self.length = None
def __iter__(self):
return self
def next(self):
index, item = self.src_iterator.next()
return Repeater(index, item, self.length)
|
<commit_before>class RepeatTracker(object):
def __init__(self):
self.repeater_map = {}
def __setitem__(self, key, value):
try:
self.repeater_map[key].index = value
except KeyError, e:
self.repeater_map[key] = Repeater(value)
def __getitem__(self, key):
return self.repeater_map[key]
class Repeater(object):
def __init__(self, index=0, length=None):
self.index = index
self.length = length
@property
def number(self):
return self.index + 1
@property
def even(self):
return not (self.index % 2)
@property
def odd(self):
return (self.index % 2)
@property
def first(self):
return (self.index == 0)
@property
def last(self):
return (self.index == (self.length - 1))
def reiterate(iterable):
try:
length = len(iterable)
except TypeError:
# if the iterable is a generator, then we have no length
length = None
for index, item in enumerate(iterable):
yield (Repeater(index, length), item)
<commit_msg>Revert last change (breaks XSPT)<commit_after>
|
class RepeatTracker(object):
def __init__(self):
self.repeater_map = {}
def __setitem__(self, key, value):
try:
self.repeater_map[key].index = value
except KeyError, e:
self.repeater_map[key] = Repeater(value)
def __getitem__(self, key):
return self.repeater_map[key]
class Repeater(object):
def __init__(self, index=0, item=None, length=None):
self.index = index
self.item = item
self.length = length
@property
def number(self):
return self.index + 1
@property
def even(self):
return not (self.index % 2)
@property
def odd(self):
return (self.index % 2)
@property
def first(self):
return (self.index == 0)
@property
def last(self):
return (self.index == (self.length - 1))
class RepeatIterator(object):
def __init__(self, iterable):
self.src_iterable = iterable
self.src_iterator = enumerate(iterable)
try:
self.length = len(iterable)
except TypeError:
# if the iterable is a generator, then we have no length
self.length = None
def __iter__(self):
return self
def next(self):
index, item = self.src_iterator.next()
return Repeater(index, item, self.length)
|
class RepeatTracker(object):
def __init__(self):
self.repeater_map = {}
def __setitem__(self, key, value):
try:
self.repeater_map[key].index = value
except KeyError, e:
self.repeater_map[key] = Repeater(value)
def __getitem__(self, key):
return self.repeater_map[key]
class Repeater(object):
def __init__(self, index=0, length=None):
self.index = index
self.length = length
@property
def number(self):
return self.index + 1
@property
def even(self):
return not (self.index % 2)
@property
def odd(self):
return (self.index % 2)
@property
def first(self):
return (self.index == 0)
@property
def last(self):
return (self.index == (self.length - 1))
def reiterate(iterable):
try:
length = len(iterable)
except TypeError:
# if the iterable is a generator, then we have no length
length = None
for index, item in enumerate(iterable):
yield (Repeater(index, length), item)
Revert last change (breaks XSPT)class RepeatTracker(object):
def __init__(self):
self.repeater_map = {}
def __setitem__(self, key, value):
try:
self.repeater_map[key].index = value
except KeyError, e:
self.repeater_map[key] = Repeater(value)
def __getitem__(self, key):
return self.repeater_map[key]
class Repeater(object):
def __init__(self, index=0, item=None, length=None):
self.index = index
self.item = item
self.length = length
@property
def number(self):
return self.index + 1
@property
def even(self):
return not (self.index % 2)
@property
def odd(self):
return (self.index % 2)
@property
def first(self):
return (self.index == 0)
@property
def last(self):
return (self.index == (self.length - 1))
class RepeatIterator(object):
def __init__(self, iterable):
self.src_iterable = iterable
self.src_iterator = enumerate(iterable)
try:
self.length = len(iterable)
except TypeError:
# if the iterable is a generator, then we have no length
self.length = None
def __iter__(self):
return self
def next(self):
index, item = self.src_iterator.next()
return Repeater(index, item, self.length)
|
<commit_before>class RepeatTracker(object):
def __init__(self):
self.repeater_map = {}
def __setitem__(self, key, value):
try:
self.repeater_map[key].index = value
except KeyError, e:
self.repeater_map[key] = Repeater(value)
def __getitem__(self, key):
return self.repeater_map[key]
class Repeater(object):
def __init__(self, index=0, length=None):
self.index = index
self.length = length
@property
def number(self):
return self.index + 1
@property
def even(self):
return not (self.index % 2)
@property
def odd(self):
return (self.index % 2)
@property
def first(self):
return (self.index == 0)
@property
def last(self):
return (self.index == (self.length - 1))
def reiterate(iterable):
try:
length = len(iterable)
except TypeError:
# if the iterable is a generator, then we have no length
length = None
for index, item in enumerate(iterable):
yield (Repeater(index, length), item)
<commit_msg>Revert last change (breaks XSPT)<commit_after>class RepeatTracker(object):
def __init__(self):
self.repeater_map = {}
def __setitem__(self, key, value):
try:
self.repeater_map[key].index = value
except KeyError, e:
self.repeater_map[key] = Repeater(value)
def __getitem__(self, key):
return self.repeater_map[key]
class Repeater(object):
def __init__(self, index=0, item=None, length=None):
self.index = index
self.item = item
self.length = length
@property
def number(self):
return self.index + 1
@property
def even(self):
return not (self.index % 2)
@property
def odd(self):
return (self.index % 2)
@property
def first(self):
return (self.index == 0)
@property
def last(self):
return (self.index == (self.length - 1))
class RepeatIterator(object):
def __init__(self, iterable):
self.src_iterable = iterable
self.src_iterator = enumerate(iterable)
try:
self.length = len(iterable)
except TypeError:
# if the iterable is a generator, then we have no length
self.length = None
def __iter__(self):
return self
def next(self):
index, item = self.src_iterator.next()
return Repeater(index, item, self.length)
|
11f5b2a82da1fad974c4ed505b9cd4938414b859
|
sponsorship_compassion/model/project_compassion.py
|
sponsorship_compassion/model/project_compassion.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def suspend_project(self, cr, uid, project_id,
start, context=None):
""" When a project is suspended, We update all contracts of
sponsored children in the project, so that we don't create invoices
during the period of suspension.
"""
project = self.browse(cr, uid, project_id, context)
contract_obj = self.pool.get('recurring.contract')
contract_ids = contract_obj.search(
cr, uid, [('child_code', 'like', project.code),
('state', 'in', ('active', 'waiting'))], context=context)
# For now, suspend the contract for 3 months
contract_obj.suspend_contract(cr, uid, contract_ids, start, 3, context)
return True
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def suspend_project(self, cr, uid, project_id,
start, context=None):
""" When a project is suspended, We update all contracts of
sponsored children in the project, so that we don't create invoices
during the period of suspension.
We also remove the children on internet.
"""
project = self.browse(cr, uid, project_id, context)
contract_obj = self.pool.get('recurring.contract')
contract_ids = contract_obj.search(cr, uid, [
('child_code', 'like', project.code),
('state', 'in', ('active', 'waiting'))], context=context)
# For now, suspend the contract for 3 months
contract_obj.suspend_contract(cr, uid, contract_ids, start, 3,
context)
# Children to remove from internet
child_obj = self.pool.get('compassion.child')
child_ids = child_obj.search(cr, uid, [
('code', 'like', project.code),
('state', '=', 'I')], context=context)
if child_ids:
child_obj.child_remove_from_typo3(cr, uid, child_ids, context)
return True
|
Remove children of a suspended project from internet.
|
Remove children of a suspended project from internet.
|
Python
|
agpl-3.0
|
eicher31/compassion-modules,Secheron/compassion-modules,ndtran/compassion-accounting,emgirardin/compassion-modules,CompassionCH/compassion-modules,MickSandoz/compassion-modules,maxime-beck/compassion-modules,maxime-beck/compassion-modules,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-accounting,CompassionCH/compassion-accounting,CompassionCH/compassion-switzerland,ndtran/compassion-switzerland,ndtran/compassion-switzerland,Secheron/compassion-switzerland,ndtran/compassion-accounting,ecino/compassion-switzerland,MickSandoz/compassion-modules,eicher31/compassion-switzerland,Secheron/compassion-switzerland,MickSandoz/compassion-switzerland,eicher31/compassion-modules,Secheron/compassion-modules,CompassionCH/compassion-modules,ecino/compassion-switzerland,ecino/compassion-modules,maxime-beck/compassion-modules,maxime-beck/compassion-modules,ndtran/compassion-accounting,eicher31/compassion-modules,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-modules,CompassionCH/compassion-modules,emgirardin/compassion-modules,MickSandoz/compassion-switzerland,emgirardin/compassion-modules,philippe89/compassion-modules,ndtran/compassion-modules,philippe89/compassion-modules,MickSandoz/compassion-modules,ecino/compassion-modules,ndtran/compassion-modules,ecino/compassion-modules,eicher31/compassion-modules,philippe89/compassion-modules,ndtran/compassion-modules,CompassionCH/compassion-modules,eicher31/compassion-modules,Secheron/compassion-modules,ecino/compassion-accounting,ecino/compassion-modules,eicher31/compassion-switzerland,CompassionCH/compassion-accounting
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def suspend_project(self, cr, uid, project_id,
start, context=None):
""" When a project is suspended, We update all contracts of
sponsored children in the project, so that we don't create invoices
during the period of suspension.
"""
project = self.browse(cr, uid, project_id, context)
contract_obj = self.pool.get('recurring.contract')
contract_ids = contract_obj.search(
cr, uid, [('child_code', 'like', project.code),
('state', 'in', ('active', 'waiting'))], context=context)
# For now, suspend the contract for 3 months
contract_obj.suspend_contract(cr, uid, contract_ids, start, 3, context)
return True
Remove children of a suspended project from internet.
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def suspend_project(self, cr, uid, project_id,
start, context=None):
""" When a project is suspended, We update all contracts of
sponsored children in the project, so that we don't create invoices
during the period of suspension.
We also remove the children on internet.
"""
project = self.browse(cr, uid, project_id, context)
contract_obj = self.pool.get('recurring.contract')
contract_ids = contract_obj.search(cr, uid, [
('child_code', 'like', project.code),
('state', 'in', ('active', 'waiting'))], context=context)
# For now, suspend the contract for 3 months
contract_obj.suspend_contract(cr, uid, contract_ids, start, 3,
context)
# Children to remove from internet
child_obj = self.pool.get('compassion.child')
child_ids = child_obj.search(cr, uid, [
('code', 'like', project.code),
('state', '=', 'I')], context=context)
if child_ids:
child_obj.child_remove_from_typo3(cr, uid, child_ids, context)
return True
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def suspend_project(self, cr, uid, project_id,
start, context=None):
""" When a project is suspended, We update all contracts of
sponsored children in the project, so that we don't create invoices
during the period of suspension.
"""
project = self.browse(cr, uid, project_id, context)
contract_obj = self.pool.get('recurring.contract')
contract_ids = contract_obj.search(
cr, uid, [('child_code', 'like', project.code),
('state', 'in', ('active', 'waiting'))], context=context)
# For now, suspend the contract for 3 months
contract_obj.suspend_contract(cr, uid, contract_ids, start, 3, context)
return True
<commit_msg>Remove children of a suspended project from internet.<commit_after>
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def suspend_project(self, cr, uid, project_id,
start, context=None):
""" When a project is suspended, We update all contracts of
sponsored children in the project, so that we don't create invoices
during the period of suspension.
We also remove the children on internet.
"""
project = self.browse(cr, uid, project_id, context)
contract_obj = self.pool.get('recurring.contract')
contract_ids = contract_obj.search(cr, uid, [
('child_code', 'like', project.code),
('state', 'in', ('active', 'waiting'))], context=context)
# For now, suspend the contract for 3 months
contract_obj.suspend_contract(cr, uid, contract_ids, start, 3,
context)
# Children to remove from internet
child_obj = self.pool.get('compassion.child')
child_ids = child_obj.search(cr, uid, [
('code', 'like', project.code),
('state', '=', 'I')], context=context)
if child_ids:
child_obj.child_remove_from_typo3(cr, uid, child_ids, context)
return True
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def suspend_project(self, cr, uid, project_id,
start, context=None):
""" When a project is suspended, We update all contracts of
sponsored children in the project, so that we don't create invoices
during the period of suspension.
"""
project = self.browse(cr, uid, project_id, context)
contract_obj = self.pool.get('recurring.contract')
contract_ids = contract_obj.search(
cr, uid, [('child_code', 'like', project.code),
('state', 'in', ('active', 'waiting'))], context=context)
# For now, suspend the contract for 3 months
contract_obj.suspend_contract(cr, uid, contract_ids, start, 3, context)
return True
Remove children of a suspended project from internet.# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def suspend_project(self, cr, uid, project_id,
start, context=None):
""" When a project is suspended, We update all contracts of
sponsored children in the project, so that we don't create invoices
during the period of suspension.
We also remove the children on internet.
"""
project = self.browse(cr, uid, project_id, context)
contract_obj = self.pool.get('recurring.contract')
contract_ids = contract_obj.search(cr, uid, [
('child_code', 'like', project.code),
('state', 'in', ('active', 'waiting'))], context=context)
# For now, suspend the contract for 3 months
contract_obj.suspend_contract(cr, uid, contract_ids, start, 3,
context)
# Children to remove from internet
child_obj = self.pool.get('compassion.child')
child_ids = child_obj.search(cr, uid, [
('code', 'like', project.code),
('state', '=', 'I')], context=context)
if child_ids:
child_obj.child_remove_from_typo3(cr, uid, child_ids, context)
return True
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def suspend_project(self, cr, uid, project_id,
start, context=None):
""" When a project is suspended, We update all contracts of
sponsored children in the project, so that we don't create invoices
during the period of suspension.
"""
project = self.browse(cr, uid, project_id, context)
contract_obj = self.pool.get('recurring.contract')
contract_ids = contract_obj.search(
cr, uid, [('child_code', 'like', project.code),
('state', 'in', ('active', 'waiting'))], context=context)
# For now, suspend the contract for 3 months
contract_obj.suspend_contract(cr, uid, contract_ids, start, 3, context)
return True
<commit_msg>Remove children of a suspended project from internet.<commit_after># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def suspend_project(self, cr, uid, project_id,
start, context=None):
""" When a project is suspended, We update all contracts of
sponsored children in the project, so that we don't create invoices
during the period of suspension.
We also remove the children on internet.
"""
project = self.browse(cr, uid, project_id, context)
contract_obj = self.pool.get('recurring.contract')
contract_ids = contract_obj.search(cr, uid, [
('child_code', 'like', project.code),
('state', 'in', ('active', 'waiting'))], context=context)
# For now, suspend the contract for 3 months
contract_obj.suspend_contract(cr, uid, contract_ids, start, 3,
context)
# Children to remove from internet
child_obj = self.pool.get('compassion.child')
child_ids = child_obj.search(cr, uid, [
('code', 'like', project.code),
('state', '=', 'I')], context=context)
if child_ids:
child_obj.child_remove_from_typo3(cr, uid, child_ids, context)
return True
|
29b5337132373d624f291af3f64bb3b05fd48e77
|
tests/test_list.py
|
tests/test_list.py
|
import os
import unittest
from carbonate.list import listMetrics
class ListTest(unittest.TestCase):
metrics_tree = ["foo",
"foo/sprockets.wsp",
"foo/widgets.wsp",
"ham",
"ham/bones.wsp",
"ham/hocks.wsp"]
expected_metrics = ["foo.sprockets",
"foo.widgets",
"ham.bones",
"ham.hocks"]
rootdir = os.path.join(os.curdir, 'test_storage')
@classmethod
def setUpClass(cls):
os.system("rm -rf %s" % cls.rootdir)
os.mkdir(cls.rootdir)
for f in cls.metrics_tree:
if f.endswith('wsp'):
open(os.path.join(cls.rootdir, f), 'w').close()
else:
os.mkdir(os.path.join(cls.rootdir, f))
def test_list(self):
res = list(listMetrics(self.rootdir))
self.assertEqual(res, self.expected_metrics)
def test_list_with_trailing_slash(self):
res = list(listMetrics(self.rootdir + '/'))
self.assertEqual(res, self.expected_metrics)
@classmethod
def tearDownClass(cls):
os.system("rm -rf %s" % cls.rootdir)
|
import os
import unittest
from carbonate.list import listMetrics
class ListTest(unittest.TestCase):
metrics_tree = ["foo",
"foo/sprockets.wsp",
"foo/widgets.wsp",
"ham",
"ham/bones.wsp",
"ham/hocks.wsp"]
expected_metrics = ["foo.sprockets",
"foo.widgets",
"ham.bones",
"ham.hocks"]
rootdir = os.path.join(os.curdir, 'test_storage')
@classmethod
def setUpClass(cls):
os.system("rm -rf %s" % cls.rootdir)
os.mkdir(cls.rootdir)
for f in cls.metrics_tree:
if f.endswith('wsp'):
open(os.path.join(cls.rootdir, f), 'w').close()
else:
os.mkdir(os.path.join(cls.rootdir, f))
def test_list(self):
res = sorted(list(listMetrics(self.rootdir)))
self.assertEqual(res, self.expected_metrics)
def test_list_with_trailing_slash(self):
res = sorted(list(listMetrics(self.rootdir + '/')))
self.assertEqual(res, self.expected_metrics)
@classmethod
def tearDownClass(cls):
os.system("rm -rf %s" % cls.rootdir)
|
Make sure we're sorting results
|
Make sure we're sorting results
|
Python
|
mit
|
skbkontur/carbonate,unbrice/carbonate,skbkontur/carbonate,ross/carbonate,ross/carbonate,graphite-project/carbonate,deniszh/carbonate,unbrice/carbonate,jssjr/carbonate,criteo-forks/carbonate,criteo-forks/carbonate,ross/carbonate,jssjr/carbonate,unbrice/carbonate,skbkontur/carbonate,jssjr/carbonate,graphite-project/carbonate,deniszh/carbonate,criteo-forks/carbonate,graphite-project/carbonate,deniszh/carbonate
|
import os
import unittest
from carbonate.list import listMetrics
class ListTest(unittest.TestCase):
metrics_tree = ["foo",
"foo/sprockets.wsp",
"foo/widgets.wsp",
"ham",
"ham/bones.wsp",
"ham/hocks.wsp"]
expected_metrics = ["foo.sprockets",
"foo.widgets",
"ham.bones",
"ham.hocks"]
rootdir = os.path.join(os.curdir, 'test_storage')
@classmethod
def setUpClass(cls):
os.system("rm -rf %s" % cls.rootdir)
os.mkdir(cls.rootdir)
for f in cls.metrics_tree:
if f.endswith('wsp'):
open(os.path.join(cls.rootdir, f), 'w').close()
else:
os.mkdir(os.path.join(cls.rootdir, f))
def test_list(self):
res = list(listMetrics(self.rootdir))
self.assertEqual(res, self.expected_metrics)
def test_list_with_trailing_slash(self):
res = list(listMetrics(self.rootdir + '/'))
self.assertEqual(res, self.expected_metrics)
@classmethod
def tearDownClass(cls):
os.system("rm -rf %s" % cls.rootdir)
Make sure we're sorting results
|
import os
import unittest
from carbonate.list import listMetrics
class ListTest(unittest.TestCase):
metrics_tree = ["foo",
"foo/sprockets.wsp",
"foo/widgets.wsp",
"ham",
"ham/bones.wsp",
"ham/hocks.wsp"]
expected_metrics = ["foo.sprockets",
"foo.widgets",
"ham.bones",
"ham.hocks"]
rootdir = os.path.join(os.curdir, 'test_storage')
@classmethod
def setUpClass(cls):
os.system("rm -rf %s" % cls.rootdir)
os.mkdir(cls.rootdir)
for f in cls.metrics_tree:
if f.endswith('wsp'):
open(os.path.join(cls.rootdir, f), 'w').close()
else:
os.mkdir(os.path.join(cls.rootdir, f))
def test_list(self):
res = sorted(list(listMetrics(self.rootdir)))
self.assertEqual(res, self.expected_metrics)
def test_list_with_trailing_slash(self):
res = sorted(list(listMetrics(self.rootdir + '/')))
self.assertEqual(res, self.expected_metrics)
@classmethod
def tearDownClass(cls):
os.system("rm -rf %s" % cls.rootdir)
|
<commit_before>import os
import unittest
from carbonate.list import listMetrics
class ListTest(unittest.TestCase):
metrics_tree = ["foo",
"foo/sprockets.wsp",
"foo/widgets.wsp",
"ham",
"ham/bones.wsp",
"ham/hocks.wsp"]
expected_metrics = ["foo.sprockets",
"foo.widgets",
"ham.bones",
"ham.hocks"]
rootdir = os.path.join(os.curdir, 'test_storage')
@classmethod
def setUpClass(cls):
os.system("rm -rf %s" % cls.rootdir)
os.mkdir(cls.rootdir)
for f in cls.metrics_tree:
if f.endswith('wsp'):
open(os.path.join(cls.rootdir, f), 'w').close()
else:
os.mkdir(os.path.join(cls.rootdir, f))
def test_list(self):
res = list(listMetrics(self.rootdir))
self.assertEqual(res, self.expected_metrics)
def test_list_with_trailing_slash(self):
res = list(listMetrics(self.rootdir + '/'))
self.assertEqual(res, self.expected_metrics)
@classmethod
def tearDownClass(cls):
os.system("rm -rf %s" % cls.rootdir)
<commit_msg>Make sure we're sorting results<commit_after>
|
import os
import unittest
from carbonate.list import listMetrics
class ListTest(unittest.TestCase):
metrics_tree = ["foo",
"foo/sprockets.wsp",
"foo/widgets.wsp",
"ham",
"ham/bones.wsp",
"ham/hocks.wsp"]
expected_metrics = ["foo.sprockets",
"foo.widgets",
"ham.bones",
"ham.hocks"]
rootdir = os.path.join(os.curdir, 'test_storage')
@classmethod
def setUpClass(cls):
os.system("rm -rf %s" % cls.rootdir)
os.mkdir(cls.rootdir)
for f in cls.metrics_tree:
if f.endswith('wsp'):
open(os.path.join(cls.rootdir, f), 'w').close()
else:
os.mkdir(os.path.join(cls.rootdir, f))
def test_list(self):
res = sorted(list(listMetrics(self.rootdir)))
self.assertEqual(res, self.expected_metrics)
def test_list_with_trailing_slash(self):
res = sorted(list(listMetrics(self.rootdir + '/')))
self.assertEqual(res, self.expected_metrics)
@classmethod
def tearDownClass(cls):
os.system("rm -rf %s" % cls.rootdir)
|
import os
import unittest
from carbonate.list import listMetrics
class ListTest(unittest.TestCase):
metrics_tree = ["foo",
"foo/sprockets.wsp",
"foo/widgets.wsp",
"ham",
"ham/bones.wsp",
"ham/hocks.wsp"]
expected_metrics = ["foo.sprockets",
"foo.widgets",
"ham.bones",
"ham.hocks"]
rootdir = os.path.join(os.curdir, 'test_storage')
@classmethod
def setUpClass(cls):
os.system("rm -rf %s" % cls.rootdir)
os.mkdir(cls.rootdir)
for f in cls.metrics_tree:
if f.endswith('wsp'):
open(os.path.join(cls.rootdir, f), 'w').close()
else:
os.mkdir(os.path.join(cls.rootdir, f))
def test_list(self):
res = list(listMetrics(self.rootdir))
self.assertEqual(res, self.expected_metrics)
def test_list_with_trailing_slash(self):
res = list(listMetrics(self.rootdir + '/'))
self.assertEqual(res, self.expected_metrics)
@classmethod
def tearDownClass(cls):
os.system("rm -rf %s" % cls.rootdir)
Make sure we're sorting resultsimport os
import unittest
from carbonate.list import listMetrics
class ListTest(unittest.TestCase):
metrics_tree = ["foo",
"foo/sprockets.wsp",
"foo/widgets.wsp",
"ham",
"ham/bones.wsp",
"ham/hocks.wsp"]
expected_metrics = ["foo.sprockets",
"foo.widgets",
"ham.bones",
"ham.hocks"]
rootdir = os.path.join(os.curdir, 'test_storage')
@classmethod
def setUpClass(cls):
os.system("rm -rf %s" % cls.rootdir)
os.mkdir(cls.rootdir)
for f in cls.metrics_tree:
if f.endswith('wsp'):
open(os.path.join(cls.rootdir, f), 'w').close()
else:
os.mkdir(os.path.join(cls.rootdir, f))
def test_list(self):
res = sorted(list(listMetrics(self.rootdir)))
self.assertEqual(res, self.expected_metrics)
def test_list_with_trailing_slash(self):
res = sorted(list(listMetrics(self.rootdir + '/')))
self.assertEqual(res, self.expected_metrics)
@classmethod
def tearDownClass(cls):
os.system("rm -rf %s" % cls.rootdir)
|
<commit_before>import os
import unittest
from carbonate.list import listMetrics
class ListTest(unittest.TestCase):
metrics_tree = ["foo",
"foo/sprockets.wsp",
"foo/widgets.wsp",
"ham",
"ham/bones.wsp",
"ham/hocks.wsp"]
expected_metrics = ["foo.sprockets",
"foo.widgets",
"ham.bones",
"ham.hocks"]
rootdir = os.path.join(os.curdir, 'test_storage')
@classmethod
def setUpClass(cls):
os.system("rm -rf %s" % cls.rootdir)
os.mkdir(cls.rootdir)
for f in cls.metrics_tree:
if f.endswith('wsp'):
open(os.path.join(cls.rootdir, f), 'w').close()
else:
os.mkdir(os.path.join(cls.rootdir, f))
def test_list(self):
res = list(listMetrics(self.rootdir))
self.assertEqual(res, self.expected_metrics)
def test_list_with_trailing_slash(self):
res = list(listMetrics(self.rootdir + '/'))
self.assertEqual(res, self.expected_metrics)
@classmethod
def tearDownClass(cls):
os.system("rm -rf %s" % cls.rootdir)
<commit_msg>Make sure we're sorting results<commit_after>import os
import unittest
from carbonate.list import listMetrics
class ListTest(unittest.TestCase):
metrics_tree = ["foo",
"foo/sprockets.wsp",
"foo/widgets.wsp",
"ham",
"ham/bones.wsp",
"ham/hocks.wsp"]
expected_metrics = ["foo.sprockets",
"foo.widgets",
"ham.bones",
"ham.hocks"]
rootdir = os.path.join(os.curdir, 'test_storage')
@classmethod
def setUpClass(cls):
os.system("rm -rf %s" % cls.rootdir)
os.mkdir(cls.rootdir)
for f in cls.metrics_tree:
if f.endswith('wsp'):
open(os.path.join(cls.rootdir, f), 'w').close()
else:
os.mkdir(os.path.join(cls.rootdir, f))
def test_list(self):
res = sorted(list(listMetrics(self.rootdir)))
self.assertEqual(res, self.expected_metrics)
def test_list_with_trailing_slash(self):
res = sorted(list(listMetrics(self.rootdir + '/')))
self.assertEqual(res, self.expected_metrics)
@classmethod
def tearDownClass(cls):
os.system("rm -rf %s" % cls.rootdir)
|
bf383d4425510a17bed0780fd80d2e3b1c741aa8
|
run-preglyphs.py
|
run-preglyphs.py
|
#MenuTitle: Run preglyphs
# -*- coding: utf-8 -*-
__doc__="""
Runs preglyphs from your chosen project folder then open the generated file
"""
__copyright__ = 'Copyright (c) 2019, SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Nicolas Spalinger'
import GlyphsApp
from subprocess import Popen, PIPE
def runAppleScript(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
if stderr:
print "AppleScript Error:"
print stderr.decode('utf-8')
return stdout
runpreglyphs = """
tell application "Finder"
activate
set frontmost to true
set projectRoot to quoted form of POSIX path of (choose folder with prompt "Please select the project folder root, e.g. font-gentium")
set sourcefolder to projectRoot & "source/"
tell application "Terminal"
activate
tell window 1
do script "cd " & projectRoot & "; ./preglyphs"
delay 25
do script "cd " & sourcefolder & "; open *.glyphs"
tell window 1 to quit
end tell
end tell
end tell
"""
save = runAppleScript( runpreglyphs )
|
#MenuTitle: Run preglyphs
# -*- coding: utf-8 -*-
__doc__="""
Runs preglyphs from your chosen project folder then open the generated file
"""
__copyright__ = 'Copyright (c) 2019, SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Nicolas Spalinger'
import GlyphsApp
from subprocess import Popen, PIPE
def runAppleScript(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
if stderr:
print "AppleScript Error:"
print stderr.decode('utf-8')
return stdout
runpreglyphs = """
tell application "Finder"
activate
set frontmost to true
set projectRoot to quoted form of POSIX path of (choose folder with prompt "Please select the project folder root, e.g. font-gentium")
set sourcefolder to projectRoot & "source/"
tell application "Terminal"
activate
tell window 1
do script "cd " & projectRoot & "; ./preglyphs"
delay 25
do script "cd " & sourcefolder & "; open *.glyphs masters/*.glyphs"
tell window 1 to quit
end tell
end tell
end tell
"""
save = runAppleScript( runpreglyphs )
|
Expand path for other folder structures.
|
Expand path for other folder structures.
|
Python
|
mit
|
n7s/scripts-for-glyphs,n7s/scripts-for-glyphs
|
#MenuTitle: Run preglyphs
# -*- coding: utf-8 -*-
__doc__="""
Runs preglyphs from your chosen project folder then open the generated file
"""
__copyright__ = 'Copyright (c) 2019, SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Nicolas Spalinger'
import GlyphsApp
from subprocess import Popen, PIPE
def runAppleScript(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
if stderr:
print "AppleScript Error:"
print stderr.decode('utf-8')
return stdout
runpreglyphs = """
tell application "Finder"
activate
set frontmost to true
set projectRoot to quoted form of POSIX path of (choose folder with prompt "Please select the project folder root, e.g. font-gentium")
set sourcefolder to projectRoot & "source/"
tell application "Terminal"
activate
tell window 1
do script "cd " & projectRoot & "; ./preglyphs"
delay 25
do script "cd " & sourcefolder & "; open *.glyphs"
tell window 1 to quit
end tell
end tell
end tell
"""
save = runAppleScript( runpreglyphs )
Expand path for other folder structures.
|
#MenuTitle: Run preglyphs
# -*- coding: utf-8 -*-
__doc__="""
Runs preglyphs from your chosen project folder then open the generated file
"""
__copyright__ = 'Copyright (c) 2019, SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Nicolas Spalinger'
import GlyphsApp
from subprocess import Popen, PIPE
def runAppleScript(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
if stderr:
print "AppleScript Error:"
print stderr.decode('utf-8')
return stdout
runpreglyphs = """
tell application "Finder"
activate
set frontmost to true
set projectRoot to quoted form of POSIX path of (choose folder with prompt "Please select the project folder root, e.g. font-gentium")
set sourcefolder to projectRoot & "source/"
tell application "Terminal"
activate
tell window 1
do script "cd " & projectRoot & "; ./preglyphs"
delay 25
do script "cd " & sourcefolder & "; open *.glyphs masters/*.glyphs"
tell window 1 to quit
end tell
end tell
end tell
"""
save = runAppleScript( runpreglyphs )
|
<commit_before>#MenuTitle: Run preglyphs
# -*- coding: utf-8 -*-
__doc__="""
Runs preglyphs from your chosen project folder then open the generated file
"""
__copyright__ = 'Copyright (c) 2019, SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Nicolas Spalinger'
import GlyphsApp
from subprocess import Popen, PIPE
def runAppleScript(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
if stderr:
print "AppleScript Error:"
print stderr.decode('utf-8')
return stdout
runpreglyphs = """
tell application "Finder"
activate
set frontmost to true
set projectRoot to quoted form of POSIX path of (choose folder with prompt "Please select the project folder root, e.g. font-gentium")
set sourcefolder to projectRoot & "source/"
tell application "Terminal"
activate
tell window 1
do script "cd " & projectRoot & "; ./preglyphs"
delay 25
do script "cd " & sourcefolder & "; open *.glyphs"
tell window 1 to quit
end tell
end tell
end tell
"""
save = runAppleScript( runpreglyphs )
<commit_msg>Expand path for other folder structures.<commit_after>
|
#MenuTitle: Run preglyphs
# -*- coding: utf-8 -*-
__doc__="""
Runs preglyphs from your chosen project folder then open the generated file
"""
__copyright__ = 'Copyright (c) 2019, SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Nicolas Spalinger'
import GlyphsApp
from subprocess import Popen, PIPE
def runAppleScript(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
if stderr:
print "AppleScript Error:"
print stderr.decode('utf-8')
return stdout
runpreglyphs = """
tell application "Finder"
activate
set frontmost to true
set projectRoot to quoted form of POSIX path of (choose folder with prompt "Please select the project folder root, e.g. font-gentium")
set sourcefolder to projectRoot & "source/"
tell application "Terminal"
activate
tell window 1
do script "cd " & projectRoot & "; ./preglyphs"
delay 25
do script "cd " & sourcefolder & "; open *.glyphs masters/*.glyphs"
tell window 1 to quit
end tell
end tell
end tell
"""
save = runAppleScript( runpreglyphs )
|
#MenuTitle: Run preglyphs
# -*- coding: utf-8 -*-
__doc__="""
Runs preglyphs from your chosen project folder then open the generated file
"""
__copyright__ = 'Copyright (c) 2019, SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Nicolas Spalinger'
import GlyphsApp
from subprocess import Popen, PIPE
def runAppleScript(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
if stderr:
print "AppleScript Error:"
print stderr.decode('utf-8')
return stdout
runpreglyphs = """
tell application "Finder"
activate
set frontmost to true
set projectRoot to quoted form of POSIX path of (choose folder with prompt "Please select the project folder root, e.g. font-gentium")
set sourcefolder to projectRoot & "source/"
tell application "Terminal"
activate
tell window 1
do script "cd " & projectRoot & "; ./preglyphs"
delay 25
do script "cd " & sourcefolder & "; open *.glyphs"
tell window 1 to quit
end tell
end tell
end tell
"""
save = runAppleScript( runpreglyphs )
Expand path for other folder structures.#MenuTitle: Run preglyphs
# -*- coding: utf-8 -*-
__doc__="""
Runs preglyphs from your chosen project folder then open the generated file
"""
__copyright__ = 'Copyright (c) 2019, SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Nicolas Spalinger'
import GlyphsApp
from subprocess import Popen, PIPE
def runAppleScript(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
if stderr:
print "AppleScript Error:"
print stderr.decode('utf-8')
return stdout
runpreglyphs = """
tell application "Finder"
activate
set frontmost to true
set projectRoot to quoted form of POSIX path of (choose folder with prompt "Please select the project folder root, e.g. font-gentium")
set sourcefolder to projectRoot & "source/"
tell application "Terminal"
activate
tell window 1
do script "cd " & projectRoot & "; ./preglyphs"
delay 25
do script "cd " & sourcefolder & "; open *.glyphs masters/*.glyphs"
tell window 1 to quit
end tell
end tell
end tell
"""
save = runAppleScript( runpreglyphs )
|
<commit_before>#MenuTitle: Run preglyphs
# -*- coding: utf-8 -*-
__doc__="""
Runs preglyphs from your chosen project folder then open the generated file
"""
__copyright__ = 'Copyright (c) 2019, SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Nicolas Spalinger'
import GlyphsApp
from subprocess import Popen, PIPE
def runAppleScript(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
if stderr:
print "AppleScript Error:"
print stderr.decode('utf-8')
return stdout
runpreglyphs = """
tell application "Finder"
activate
set frontmost to true
set projectRoot to quoted form of POSIX path of (choose folder with prompt "Please select the project folder root, e.g. font-gentium")
set sourcefolder to projectRoot & "source/"
tell application "Terminal"
activate
tell window 1
do script "cd " & projectRoot & "; ./preglyphs"
delay 25
do script "cd " & sourcefolder & "; open *.glyphs"
tell window 1 to quit
end tell
end tell
end tell
"""
save = runAppleScript( runpreglyphs )
<commit_msg>Expand path for other folder structures.<commit_after>#MenuTitle: Run preglyphs
# -*- coding: utf-8 -*-
__doc__="""
Runs preglyphs from your chosen project folder then open the generated file
"""
__copyright__ = 'Copyright (c) 2019, SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Nicolas Spalinger'
import GlyphsApp
from subprocess import Popen, PIPE
def runAppleScript(scpt, args=[]):
p = Popen(['osascript', '-'] + args, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate(scpt)
if stderr:
print "AppleScript Error:"
print stderr.decode('utf-8')
return stdout
runpreglyphs = """
tell application "Finder"
activate
set frontmost to true
set projectRoot to quoted form of POSIX path of (choose folder with prompt "Please select the project folder root, e.g. font-gentium")
set sourcefolder to projectRoot & "source/"
tell application "Terminal"
activate
tell window 1
do script "cd " & projectRoot & "; ./preglyphs"
delay 25
do script "cd " & sourcefolder & "; open *.glyphs masters/*.glyphs"
tell window 1 to quit
end tell
end tell
end tell
"""
save = runAppleScript( runpreglyphs )
|
5cd6ed09511fdd40714ebe647577cb77fd366f7f
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, WARNING
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint ${temp_file} ${args}'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = WARNING
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, WARNING
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint ${temp_file} ${args}'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
on_stderr = None
tempfile_suffix = 'pug'
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = WARNING
|
Remove `error_stream`; Add `on_stderr` to make it works
|
Remove `error_stream`; Add `on_stderr` to make it works
|
Python
|
mit
|
benedfit/SublimeLinter-contrib-pug-lint,benedfit/SublimeLinter-contrib-jade-lint
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, WARNING
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint ${temp_file} ${args}'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = WARNING
Remove `error_stream`; Add `on_stderr` to make it works
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, WARNING
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint ${temp_file} ${args}'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
on_stderr = None
tempfile_suffix = 'pug'
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = WARNING
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, WARNING
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint ${temp_file} ${args}'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = WARNING
<commit_msg>Remove `error_stream`; Add `on_stderr` to make it works<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, WARNING
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint ${temp_file} ${args}'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
on_stderr = None
tempfile_suffix = 'pug'
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = WARNING
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, WARNING
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint ${temp_file} ${args}'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = WARNING
Remove `error_stream`; Add `on_stderr` to make it works#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, WARNING
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint ${temp_file} ${args}'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
on_stderr = None
tempfile_suffix = 'pug'
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = WARNING
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, WARNING
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint ${temp_file} ${args}'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = WARNING
<commit_msg>Remove `error_stream`; Add `on_stderr` to make it works<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, WARNING
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
cmd = 'pug-lint ${temp_file} ${args}'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
on_stderr = None
tempfile_suffix = 'pug'
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = WARNING
|
9bc6ae4eef9d3ac133dec5310180503c9e37807b
|
allauth/socialaccount/providers/kakao/provider.py
|
allauth/socialaccount/providers/kakao/provider.py
|
from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, nickname=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
|
from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, username=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
|
Change field name from 'nickname' to 'username'
|
fix(kakao): Change field name from 'nickname' to 'username'
|
Python
|
mit
|
AltSchool/django-allauth,AltSchool/django-allauth,AltSchool/django-allauth
|
from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, nickname=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
fix(kakao): Change field name from 'nickname' to 'username'
|
from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, username=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
|
<commit_before>from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, nickname=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
<commit_msg>fix(kakao): Change field name from 'nickname' to 'username'<commit_after>
|
from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, username=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
|
from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, nickname=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
fix(kakao): Change field name from 'nickname' to 'username'from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, username=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
|
<commit_before>from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, nickname=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
<commit_msg>fix(kakao): Change field name from 'nickname' to 'username'<commit_after>from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, username=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
|
7bb5329abb41c16bb59b03cf467b4abec4d948bf
|
my_test_suite/test_case.py
|
my_test_suite/test_case.py
|
class TestResult(object):
def __init__(self):
self.runCount = 0
self.errorCount = 0
def summary(self):
return "{} run, {} failed".format(self.runCount, self.errorCount)
def testStarted(self):
self.runCount += 1
def testFailed(self):
self.errorCount += 1
class TestSuite(object):
def __init__(self):
self.tests = []
def add(self, test):
self.tests.append(test)
def run(self, result):
for test in self.tests:
test.run(result)
class TestCase(object):
def __init__(self, name):
self.name = name
def run(self, result):
result.testStarted()
self.setUp()
try:
method = getattr(self, self.name)
method()
except:
result.testFailed()
self.tearDown()
return result
def setUp(self):
pass
def tearDown(self):
pass
class WasRun(TestCase):
def __init__(self, name):
self.log = ""
TestCase.__init__(self, name)
def setUp(self):
self.log += "setUp "
def tearDown(self):
self.log += "tearDown "
def testMethod(self):
self.log += "testMethod "
def testBrokenMethod(self):
raise Exception
|
class TestResult(object):
def __init__(self):
self.runCount = 0
self.errorCount = 0
self.setUpErrorCount = 0
def summary(self):
return "{} run, {} failed, {} setups failed".format(self.runCount,
self.errorCount,
self.setUpErrorCount)
def testStarted(self):
self.runCount += 1
def testFailed(self):
self.errorCount += 1
def setUpFailed(self):
self.setUpErrorCount += 1
class TestSuite(object):
def __init__(self):
self.tests = []
def add(self, test):
self.tests.append(test)
def run(self, result):
for test in self.tests:
test.run(result)
class TestCase(object):
def __init__(self, name):
self.name = name
def run(self, result):
result.testStarted()
try:
self.setUp()
except Exception as e:
result.setUpFailed()
try:
method = getattr(self, self.name)
method()
except:
result.testFailed()
self.tearDown()
return result
def setUp(self):
pass
def tearDown(self):
pass
class WasRun(TestCase):
def __init__(self, name):
self.log = ""
TestCase.__init__(self, name)
def setUp(self):
self.log += "setUp "
def tearDown(self):
self.log += "tearDown "
def testMethod(self):
self.log += "testMethod "
def testBrokenMethod(self):
raise Exception
|
Add handling for failed setup
|
Add handling for failed setup
|
Python
|
mit
|
stephtzhang/tdd
|
class TestResult(object):
def __init__(self):
self.runCount = 0
self.errorCount = 0
def summary(self):
return "{} run, {} failed".format(self.runCount, self.errorCount)
def testStarted(self):
self.runCount += 1
def testFailed(self):
self.errorCount += 1
class TestSuite(object):
def __init__(self):
self.tests = []
def add(self, test):
self.tests.append(test)
def run(self, result):
for test in self.tests:
test.run(result)
class TestCase(object):
def __init__(self, name):
self.name = name
def run(self, result):
result.testStarted()
self.setUp()
try:
method = getattr(self, self.name)
method()
except:
result.testFailed()
self.tearDown()
return result
def setUp(self):
pass
def tearDown(self):
pass
class WasRun(TestCase):
def __init__(self, name):
self.log = ""
TestCase.__init__(self, name)
def setUp(self):
self.log += "setUp "
def tearDown(self):
self.log += "tearDown "
def testMethod(self):
self.log += "testMethod "
def testBrokenMethod(self):
raise ExceptionAdd handling for failed setup
|
class TestResult(object):
def __init__(self):
self.runCount = 0
self.errorCount = 0
self.setUpErrorCount = 0
def summary(self):
return "{} run, {} failed, {} setups failed".format(self.runCount,
self.errorCount,
self.setUpErrorCount)
def testStarted(self):
self.runCount += 1
def testFailed(self):
self.errorCount += 1
def setUpFailed(self):
self.setUpErrorCount += 1
class TestSuite(object):
def __init__(self):
self.tests = []
def add(self, test):
self.tests.append(test)
def run(self, result):
for test in self.tests:
test.run(result)
class TestCase(object):
def __init__(self, name):
self.name = name
def run(self, result):
result.testStarted()
try:
self.setUp()
except Exception as e:
result.setUpFailed()
try:
method = getattr(self, self.name)
method()
except:
result.testFailed()
self.tearDown()
return result
def setUp(self):
pass
def tearDown(self):
pass
class WasRun(TestCase):
def __init__(self, name):
self.log = ""
TestCase.__init__(self, name)
def setUp(self):
self.log += "setUp "
def tearDown(self):
self.log += "tearDown "
def testMethod(self):
self.log += "testMethod "
def testBrokenMethod(self):
raise Exception
|
<commit_before>class TestResult(object):
def __init__(self):
self.runCount = 0
self.errorCount = 0
def summary(self):
return "{} run, {} failed".format(self.runCount, self.errorCount)
def testStarted(self):
self.runCount += 1
def testFailed(self):
self.errorCount += 1
class TestSuite(object):
def __init__(self):
self.tests = []
def add(self, test):
self.tests.append(test)
def run(self, result):
for test in self.tests:
test.run(result)
class TestCase(object):
def __init__(self, name):
self.name = name
def run(self, result):
result.testStarted()
self.setUp()
try:
method = getattr(self, self.name)
method()
except:
result.testFailed()
self.tearDown()
return result
def setUp(self):
pass
def tearDown(self):
pass
class WasRun(TestCase):
def __init__(self, name):
self.log = ""
TestCase.__init__(self, name)
def setUp(self):
self.log += "setUp "
def tearDown(self):
self.log += "tearDown "
def testMethod(self):
self.log += "testMethod "
def testBrokenMethod(self):
raise Exception<commit_msg>Add handling for failed setup<commit_after>
|
class TestResult(object):
def __init__(self):
self.runCount = 0
self.errorCount = 0
self.setUpErrorCount = 0
def summary(self):
return "{} run, {} failed, {} setups failed".format(self.runCount,
self.errorCount,
self.setUpErrorCount)
def testStarted(self):
self.runCount += 1
def testFailed(self):
self.errorCount += 1
def setUpFailed(self):
self.setUpErrorCount += 1
class TestSuite(object):
def __init__(self):
self.tests = []
def add(self, test):
self.tests.append(test)
def run(self, result):
for test in self.tests:
test.run(result)
class TestCase(object):
def __init__(self, name):
self.name = name
def run(self, result):
result.testStarted()
try:
self.setUp()
except Exception as e:
result.setUpFailed()
try:
method = getattr(self, self.name)
method()
except:
result.testFailed()
self.tearDown()
return result
def setUp(self):
pass
def tearDown(self):
pass
class WasRun(TestCase):
def __init__(self, name):
self.log = ""
TestCase.__init__(self, name)
def setUp(self):
self.log += "setUp "
def tearDown(self):
self.log += "tearDown "
def testMethod(self):
self.log += "testMethod "
def testBrokenMethod(self):
raise Exception
|
class TestResult(object):
def __init__(self):
self.runCount = 0
self.errorCount = 0
def summary(self):
return "{} run, {} failed".format(self.runCount, self.errorCount)
def testStarted(self):
self.runCount += 1
def testFailed(self):
self.errorCount += 1
class TestSuite(object):
def __init__(self):
self.tests = []
def add(self, test):
self.tests.append(test)
def run(self, result):
for test in self.tests:
test.run(result)
class TestCase(object):
def __init__(self, name):
self.name = name
def run(self, result):
result.testStarted()
self.setUp()
try:
method = getattr(self, self.name)
method()
except:
result.testFailed()
self.tearDown()
return result
def setUp(self):
pass
def tearDown(self):
pass
class WasRun(TestCase):
def __init__(self, name):
self.log = ""
TestCase.__init__(self, name)
def setUp(self):
self.log += "setUp "
def tearDown(self):
self.log += "tearDown "
def testMethod(self):
self.log += "testMethod "
def testBrokenMethod(self):
raise ExceptionAdd handling for failed setupclass TestResult(object):
def __init__(self):
self.runCount = 0
self.errorCount = 0
self.setUpErrorCount = 0
def summary(self):
return "{} run, {} failed, {} setups failed".format(self.runCount,
self.errorCount,
self.setUpErrorCount)
def testStarted(self):
self.runCount += 1
def testFailed(self):
self.errorCount += 1
def setUpFailed(self):
self.setUpErrorCount += 1
class TestSuite(object):
def __init__(self):
self.tests = []
def add(self, test):
self.tests.append(test)
def run(self, result):
for test in self.tests:
test.run(result)
class TestCase(object):
def __init__(self, name):
self.name = name
def run(self, result):
result.testStarted()
try:
self.setUp()
except Exception as e:
result.setUpFailed()
try:
method = getattr(self, self.name)
method()
except:
result.testFailed()
self.tearDown()
return result
def setUp(self):
pass
def tearDown(self):
pass
class WasRun(TestCase):
def __init__(self, name):
self.log = ""
TestCase.__init__(self, name)
def setUp(self):
self.log += "setUp "
def tearDown(self):
self.log += "tearDown "
def testMethod(self):
self.log += "testMethod "
def testBrokenMethod(self):
raise Exception
|
<commit_before>class TestResult(object):
def __init__(self):
self.runCount = 0
self.errorCount = 0
def summary(self):
return "{} run, {} failed".format(self.runCount, self.errorCount)
def testStarted(self):
self.runCount += 1
def testFailed(self):
self.errorCount += 1
class TestSuite(object):
def __init__(self):
self.tests = []
def add(self, test):
self.tests.append(test)
def run(self, result):
for test in self.tests:
test.run(result)
class TestCase(object):
def __init__(self, name):
self.name = name
def run(self, result):
result.testStarted()
self.setUp()
try:
method = getattr(self, self.name)
method()
except:
result.testFailed()
self.tearDown()
return result
def setUp(self):
pass
def tearDown(self):
pass
class WasRun(TestCase):
def __init__(self, name):
self.log = ""
TestCase.__init__(self, name)
def setUp(self):
self.log += "setUp "
def tearDown(self):
self.log += "tearDown "
def testMethod(self):
self.log += "testMethod "
def testBrokenMethod(self):
raise Exception<commit_msg>Add handling for failed setup<commit_after>class TestResult(object):
def __init__(self):
self.runCount = 0
self.errorCount = 0
self.setUpErrorCount = 0
def summary(self):
return "{} run, {} failed, {} setups failed".format(self.runCount,
self.errorCount,
self.setUpErrorCount)
def testStarted(self):
self.runCount += 1
def testFailed(self):
self.errorCount += 1
def setUpFailed(self):
self.setUpErrorCount += 1
class TestSuite(object):
def __init__(self):
self.tests = []
def add(self, test):
self.tests.append(test)
def run(self, result):
for test in self.tests:
test.run(result)
class TestCase(object):
def __init__(self, name):
self.name = name
def run(self, result):
result.testStarted()
try:
self.setUp()
except Exception as e:
result.setUpFailed()
try:
method = getattr(self, self.name)
method()
except:
result.testFailed()
self.tearDown()
return result
def setUp(self):
pass
def tearDown(self):
pass
class WasRun(TestCase):
def __init__(self, name):
self.log = ""
TestCase.__init__(self, name)
def setUp(self):
self.log += "setUp "
def tearDown(self):
self.log += "tearDown "
def testMethod(self):
self.log += "testMethod "
def testBrokenMethod(self):
raise Exception
|
e28ba167fe0fafd9db5f2e582520b3237d1be36f
|
Python/Mac/sample_python_mac.py
|
Python/Mac/sample_python_mac.py
|
#using the pymssql driver
import pymssql
#Connect to your database.
#Replace server name, username, password, and database name with your credentials
conn = pymssql.connect(server='yourserver.database.windows.net',
user='yourusername@yourserver', password='yourpassword',
database='AdventureWorks')
cursor = conn.cursor()
#Execute a simple select statement.
#Replace schema name and table name with your own
cursor.execute('SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;')
row = cursor.fetchone()
#Print results from select statement.
while row:
print str(row[0]) + " " + str(row[1]) + " " + str(row[2])
row = cursor.fetchone()
#INSERT
#Execute an insert statement
cursor.execute("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES ('SQL Server Express', 'SQLEXPRESS', 0, 0, CURRENT_TIMESTAMP)")
row = cursor.fetchone()
#Print the ID of the inserted row.
while row:
print "Inserted Product ID : " +str(row[0])
row = cursor.fetchone()
|
#using the pymssql driver
import pymssql
#Connect to your database.
#Replace server name, username, password, and database name with your credentials
conn = pymssql.connect(server='yourserver.database.windows.net',
user='yourusername@yourserver', password='yourpassword',
database='AdventureWorks')
cursor = conn.cursor()
#Execute a simple select statement.
#Replace schema name and table name with your own
cursor.execute('SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;')
row = cursor.fetchone()
#Print results from select statement.
while row:
print str(row[0]) + " " + str(row[1]) + " " + str(row[2])
row = cursor.fetchone()
#INSERT
#Execute an insert statement
cursor.execute("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES ('SQL Server Express', 'SQLEXPRESS', 0, 0, CURRENT_TIMESTAMP)")
row = cursor.fetchone()
#Print the ID of the inserted row.
while row:
print "Inserted Product ID : " +str(row[0])
row = cursor.fetchone()
|
Fix white space in python example.
|
Fix white space in python example.
|
Python
|
mit
|
Azure/azure-sql-database-samples,Azure/azure-sql-database-samples,Azure/azure-sql-database-samples,Azure/azure-sql-database-samples,Azure/azure-sql-database-samples,Azure/azure-sql-database-samples,Azure/azure-sql-database-samples
|
#using the pymssql driver
import pymssql
#Connect to your database.
#Replace server name, username, password, and database name with your credentials
conn = pymssql.connect(server='yourserver.database.windows.net',
user='yourusername@yourserver', password='yourpassword',
database='AdventureWorks')
cursor = conn.cursor()
#Execute a simple select statement.
#Replace schema name and table name with your own
cursor.execute('SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;')
row = cursor.fetchone()
#Print results from select statement.
while row:
print str(row[0]) + " " + str(row[1]) + " " + str(row[2])
row = cursor.fetchone()
#INSERT
#Execute an insert statement
cursor.execute("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES ('SQL Server Express', 'SQLEXPRESS', 0, 0, CURRENT_TIMESTAMP)")
row = cursor.fetchone()
#Print the ID of the inserted row.
while row:
print "Inserted Product ID : " +str(row[0])
row = cursor.fetchone()Fix white space in python example.
|
#using the pymssql driver
import pymssql
#Connect to your database.
#Replace server name, username, password, and database name with your credentials
conn = pymssql.connect(server='yourserver.database.windows.net',
user='yourusername@yourserver', password='yourpassword',
database='AdventureWorks')
cursor = conn.cursor()
#Execute a simple select statement.
#Replace schema name and table name with your own
cursor.execute('SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;')
row = cursor.fetchone()
#Print results from select statement.
while row:
print str(row[0]) + " " + str(row[1]) + " " + str(row[2])
row = cursor.fetchone()
#INSERT
#Execute an insert statement
cursor.execute("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES ('SQL Server Express', 'SQLEXPRESS', 0, 0, CURRENT_TIMESTAMP)")
row = cursor.fetchone()
#Print the ID of the inserted row.
while row:
print "Inserted Product ID : " +str(row[0])
row = cursor.fetchone()
|
<commit_before>#using the pymssql driver
import pymssql
#Connect to your database.
#Replace server name, username, password, and database name with your credentials
conn = pymssql.connect(server='yourserver.database.windows.net',
user='yourusername@yourserver', password='yourpassword',
database='AdventureWorks')
cursor = conn.cursor()
#Execute a simple select statement.
#Replace schema name and table name with your own
cursor.execute('SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;')
row = cursor.fetchone()
#Print results from select statement.
while row:
print str(row[0]) + " " + str(row[1]) + " " + str(row[2])
row = cursor.fetchone()
#INSERT
#Execute an insert statement
cursor.execute("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES ('SQL Server Express', 'SQLEXPRESS', 0, 0, CURRENT_TIMESTAMP)")
row = cursor.fetchone()
#Print the ID of the inserted row.
while row:
print "Inserted Product ID : " +str(row[0])
row = cursor.fetchone()<commit_msg>Fix white space in python example.<commit_after>
|
#using the pymssql driver
import pymssql
#Connect to your database.
#Replace server name, username, password, and database name with your credentials
conn = pymssql.connect(server='yourserver.database.windows.net',
user='yourusername@yourserver', password='yourpassword',
database='AdventureWorks')
cursor = conn.cursor()
#Execute a simple select statement.
#Replace schema name and table name with your own
cursor.execute('SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;')
row = cursor.fetchone()
#Print results from select statement.
while row:
print str(row[0]) + " " + str(row[1]) + " " + str(row[2])
row = cursor.fetchone()
#INSERT
#Execute an insert statement
cursor.execute("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES ('SQL Server Express', 'SQLEXPRESS', 0, 0, CURRENT_TIMESTAMP)")
row = cursor.fetchone()
#Print the ID of the inserted row.
while row:
print "Inserted Product ID : " +str(row[0])
row = cursor.fetchone()
|
#using the pymssql driver
import pymssql
#Connect to your database.
#Replace server name, username, password, and database name with your credentials
conn = pymssql.connect(server='yourserver.database.windows.net',
user='yourusername@yourserver', password='yourpassword',
database='AdventureWorks')
cursor = conn.cursor()
#Execute a simple select statement.
#Replace schema name and table name with your own
cursor.execute('SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;')
row = cursor.fetchone()
#Print results from select statement.
while row:
print str(row[0]) + " " + str(row[1]) + " " + str(row[2])
row = cursor.fetchone()
#INSERT
#Execute an insert statement
cursor.execute("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES ('SQL Server Express', 'SQLEXPRESS', 0, 0, CURRENT_TIMESTAMP)")
row = cursor.fetchone()
#Print the ID of the inserted row.
while row:
print "Inserted Product ID : " +str(row[0])
row = cursor.fetchone()Fix white space in python example.#using the pymssql driver
import pymssql
#Connect to your database.
#Replace server name, username, password, and database name with your credentials
conn = pymssql.connect(server='yourserver.database.windows.net',
user='yourusername@yourserver', password='yourpassword',
database='AdventureWorks')
cursor = conn.cursor()
#Execute a simple select statement.
#Replace schema name and table name with your own
cursor.execute('SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;')
row = cursor.fetchone()
#Print results from select statement.
while row:
print str(row[0]) + " " + str(row[1]) + " " + str(row[2])
row = cursor.fetchone()
#INSERT
#Execute an insert statement
cursor.execute("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES ('SQL Server Express', 'SQLEXPRESS', 0, 0, CURRENT_TIMESTAMP)")
row = cursor.fetchone()
#Print the ID of the inserted row.
while row:
print "Inserted Product ID : " +str(row[0])
row = cursor.fetchone()
|
<commit_before>#using the pymssql driver
import pymssql
#Connect to your database.
#Replace server name, username, password, and database name with your credentials
conn = pymssql.connect(server='yourserver.database.windows.net',
user='yourusername@yourserver', password='yourpassword',
database='AdventureWorks')
cursor = conn.cursor()
#Execute a simple select statement.
#Replace schema name and table name with your own
cursor.execute('SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;')
row = cursor.fetchone()
#Print results from select statement.
while row:
print str(row[0]) + " " + str(row[1]) + " " + str(row[2])
row = cursor.fetchone()
#INSERT
#Execute an insert statement
cursor.execute("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES ('SQL Server Express', 'SQLEXPRESS', 0, 0, CURRENT_TIMESTAMP)")
row = cursor.fetchone()
#Print the ID of the inserted row.
while row:
print "Inserted Product ID : " +str(row[0])
row = cursor.fetchone()<commit_msg>Fix white space in python example.<commit_after>#using the pymssql driver
import pymssql
#Connect to your database.
#Replace server name, username, password, and database name with your credentials
conn = pymssql.connect(server='yourserver.database.windows.net',
user='yourusername@yourserver', password='yourpassword',
database='AdventureWorks')
cursor = conn.cursor()
#Execute a simple select statement.
#Replace schema name and table name with your own
cursor.execute('SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;')
row = cursor.fetchone()
#Print results from select statement.
while row:
print str(row[0]) + " " + str(row[1]) + " " + str(row[2])
row = cursor.fetchone()
#INSERT
#Execute an insert statement
cursor.execute("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES ('SQL Server Express', 'SQLEXPRESS', 0, 0, CURRENT_TIMESTAMP)")
row = cursor.fetchone()
#Print the ID of the inserted row.
while row:
print "Inserted Product ID : " +str(row[0])
row = cursor.fetchone()
|
0ac444affdff8db699684aa4cf04c2cb0daf0286
|
rplugin/python3/denite/source/workspaceSymbol.py
|
rplugin/python3/denite/source/workspaceSymbol.py
|
from os import path
import sys
from .base import Base
sys.path.insert(0, path.dirname(path.dirname(__file__)))
from common import ( # isort:skip # noqa: I100
convert_symbols_to_candidates,
SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX,
highlight_setup,
)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.vim = vim
self.name = 'workspaceSymbol'
self.kind = 'file'
def highlight(self):
highlight_setup(self, SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX)
def gather_candidates(self, context):
result = self.vim.funcs.LanguageClient_runSync(
'LanguageClient#workspace_symbol', '', {}) or []
return convert_symbols_to_candidates(result,
pwd=self.vim.funcs.getcwd())
|
from os import path
import sys
from .base import Base
sys.path.insert(0, path.dirname(path.dirname(__file__)))
from common import ( # isort:skip # noqa: I100
convert_symbols_to_candidates,
SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX,
highlight_setup,
)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.vim = vim
self.name = 'workspaceSymbol'
self.kind = 'file'
def highlight(self):
highlight_setup(self, SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX)
def gather_candidates(self, context):
context['is_interactive'] = True
prefix = context['input']
bufnr = context['bufnr']
# This a hack to get around the fact that LanguageClient APIs
# work in the context of the active buffer, when filtering results
# interactively, the denite buffer is the active buffer and it doesn't
# have a language server asscosiated with it.
# We just switch to the buffer that initiated the denite transaction
# and execute the command from it. This should be changed when we
# have a better way to run requests out of the buffer.
# See issue#674
current_buffer = self.vim.current.buffer.number
if current_buffer != bufnr:
self.vim.command("tabedit %")
self.vim.command(
"execute 'noautocmd keepalt buffer' {}".format(bufnr))
result = self.vim.funcs.LanguageClient_runSync(
'LanguageClient#workspace_symbol', prefix, {}) or []
if current_buffer != bufnr:
self.vim.command("tabclose")
candidates = convert_symbols_to_candidates(
result,
pwd=self.vim.funcs.getcwd())
return candidates
|
Make workspace symbols interactive in denite
|
Make workspace symbols interactive in denite
Some servers limit the amount of symbols they return.
Having an interactive implementation allows us to use the server instead
of the client which means we allways get the best results of the query.
|
Python
|
mit
|
autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim
|
from os import path
import sys
from .base import Base
sys.path.insert(0, path.dirname(path.dirname(__file__)))
from common import ( # isort:skip # noqa: I100
convert_symbols_to_candidates,
SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX,
highlight_setup,
)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.vim = vim
self.name = 'workspaceSymbol'
self.kind = 'file'
def highlight(self):
highlight_setup(self, SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX)
def gather_candidates(self, context):
result = self.vim.funcs.LanguageClient_runSync(
'LanguageClient#workspace_symbol', '', {}) or []
return convert_symbols_to_candidates(result,
pwd=self.vim.funcs.getcwd())
Make workspace symbols interactive in denite
Some servers limit the amount of symbols they return.
Having an interactive implementation allows us to use the server instead
of the client which means we allways get the best results of the query.
|
from os import path
import sys
from .base import Base
sys.path.insert(0, path.dirname(path.dirname(__file__)))
from common import ( # isort:skip # noqa: I100
convert_symbols_to_candidates,
SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX,
highlight_setup,
)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.vim = vim
self.name = 'workspaceSymbol'
self.kind = 'file'
def highlight(self):
highlight_setup(self, SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX)
def gather_candidates(self, context):
context['is_interactive'] = True
prefix = context['input']
bufnr = context['bufnr']
# This a hack to get around the fact that LanguageClient APIs
# work in the context of the active buffer, when filtering results
# interactively, the denite buffer is the active buffer and it doesn't
# have a language server asscosiated with it.
# We just switch to the buffer that initiated the denite transaction
# and execute the command from it. This should be changed when we
# have a better way to run requests out of the buffer.
# See issue#674
current_buffer = self.vim.current.buffer.number
if current_buffer != bufnr:
self.vim.command("tabedit %")
self.vim.command(
"execute 'noautocmd keepalt buffer' {}".format(bufnr))
result = self.vim.funcs.LanguageClient_runSync(
'LanguageClient#workspace_symbol', prefix, {}) or []
if current_buffer != bufnr:
self.vim.command("tabclose")
candidates = convert_symbols_to_candidates(
result,
pwd=self.vim.funcs.getcwd())
return candidates
|
<commit_before>from os import path
import sys
from .base import Base
sys.path.insert(0, path.dirname(path.dirname(__file__)))
from common import ( # isort:skip # noqa: I100
convert_symbols_to_candidates,
SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX,
highlight_setup,
)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.vim = vim
self.name = 'workspaceSymbol'
self.kind = 'file'
def highlight(self):
highlight_setup(self, SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX)
def gather_candidates(self, context):
result = self.vim.funcs.LanguageClient_runSync(
'LanguageClient#workspace_symbol', '', {}) or []
return convert_symbols_to_candidates(result,
pwd=self.vim.funcs.getcwd())
<commit_msg>Make workspace symbols interactive in denite
Some servers limit the amount of symbols they return.
Having an interactive implementation allows us to use the server instead
of the client which means we allways get the best results of the query.<commit_after>
|
from os import path
import sys
from .base import Base
sys.path.insert(0, path.dirname(path.dirname(__file__)))
from common import ( # isort:skip # noqa: I100
convert_symbols_to_candidates,
SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX,
highlight_setup,
)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.vim = vim
self.name = 'workspaceSymbol'
self.kind = 'file'
def highlight(self):
highlight_setup(self, SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX)
def gather_candidates(self, context):
context['is_interactive'] = True
prefix = context['input']
bufnr = context['bufnr']
# This a hack to get around the fact that LanguageClient APIs
# work in the context of the active buffer, when filtering results
# interactively, the denite buffer is the active buffer and it doesn't
# have a language server asscosiated with it.
# We just switch to the buffer that initiated the denite transaction
# and execute the command from it. This should be changed when we
# have a better way to run requests out of the buffer.
# See issue#674
current_buffer = self.vim.current.buffer.number
if current_buffer != bufnr:
self.vim.command("tabedit %")
self.vim.command(
"execute 'noautocmd keepalt buffer' {}".format(bufnr))
result = self.vim.funcs.LanguageClient_runSync(
'LanguageClient#workspace_symbol', prefix, {}) or []
if current_buffer != bufnr:
self.vim.command("tabclose")
candidates = convert_symbols_to_candidates(
result,
pwd=self.vim.funcs.getcwd())
return candidates
|
from os import path
import sys
from .base import Base
sys.path.insert(0, path.dirname(path.dirname(__file__)))
from common import ( # isort:skip # noqa: I100
convert_symbols_to_candidates,
SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX,
highlight_setup,
)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.vim = vim
self.name = 'workspaceSymbol'
self.kind = 'file'
def highlight(self):
highlight_setup(self, SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX)
def gather_candidates(self, context):
result = self.vim.funcs.LanguageClient_runSync(
'LanguageClient#workspace_symbol', '', {}) or []
return convert_symbols_to_candidates(result,
pwd=self.vim.funcs.getcwd())
Make workspace symbols interactive in denite
Some servers limit the amount of symbols they return.
Having an interactive implementation allows us to use the server instead
of the client which means we allways get the best results of the query.from os import path
import sys
from .base import Base
sys.path.insert(0, path.dirname(path.dirname(__file__)))
from common import ( # isort:skip # noqa: I100
convert_symbols_to_candidates,
SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX,
highlight_setup,
)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.vim = vim
self.name = 'workspaceSymbol'
self.kind = 'file'
def highlight(self):
highlight_setup(self, SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX)
def gather_candidates(self, context):
context['is_interactive'] = True
prefix = context['input']
bufnr = context['bufnr']
# This a hack to get around the fact that LanguageClient APIs
# work in the context of the active buffer, when filtering results
# interactively, the denite buffer is the active buffer and it doesn't
# have a language server asscosiated with it.
# We just switch to the buffer that initiated the denite transaction
# and execute the command from it. This should be changed when we
# have a better way to run requests out of the buffer.
# See issue#674
current_buffer = self.vim.current.buffer.number
if current_buffer != bufnr:
self.vim.command("tabedit %")
self.vim.command(
"execute 'noautocmd keepalt buffer' {}".format(bufnr))
result = self.vim.funcs.LanguageClient_runSync(
'LanguageClient#workspace_symbol', prefix, {}) or []
if current_buffer != bufnr:
self.vim.command("tabclose")
candidates = convert_symbols_to_candidates(
result,
pwd=self.vim.funcs.getcwd())
return candidates
|
<commit_before>from os import path
import sys
from .base import Base
sys.path.insert(0, path.dirname(path.dirname(__file__)))
from common import ( # isort:skip # noqa: I100
convert_symbols_to_candidates,
SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX,
highlight_setup,
)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.vim = vim
self.name = 'workspaceSymbol'
self.kind = 'file'
def highlight(self):
highlight_setup(self, SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX)
def gather_candidates(self, context):
result = self.vim.funcs.LanguageClient_runSync(
'LanguageClient#workspace_symbol', '', {}) or []
return convert_symbols_to_candidates(result,
pwd=self.vim.funcs.getcwd())
<commit_msg>Make workspace symbols interactive in denite
Some servers limit the amount of symbols they return.
Having an interactive implementation allows us to use the server instead
of the client which means we allways get the best results of the query.<commit_after>from os import path
import sys
from .base import Base
sys.path.insert(0, path.dirname(path.dirname(__file__)))
from common import ( # isort:skip # noqa: I100
convert_symbols_to_candidates,
SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX,
highlight_setup,
)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.vim = vim
self.name = 'workspaceSymbol'
self.kind = 'file'
def highlight(self):
highlight_setup(self, SYMBOL_CANDIDATE_HIGHLIGHT_SYNTAX)
def gather_candidates(self, context):
context['is_interactive'] = True
prefix = context['input']
bufnr = context['bufnr']
# This a hack to get around the fact that LanguageClient APIs
# work in the context of the active buffer, when filtering results
# interactively, the denite buffer is the active buffer and it doesn't
# have a language server asscosiated with it.
# We just switch to the buffer that initiated the denite transaction
# and execute the command from it. This should be changed when we
# have a better way to run requests out of the buffer.
# See issue#674
current_buffer = self.vim.current.buffer.number
if current_buffer != bufnr:
self.vim.command("tabedit %")
self.vim.command(
"execute 'noautocmd keepalt buffer' {}".format(bufnr))
result = self.vim.funcs.LanguageClient_runSync(
'LanguageClient#workspace_symbol', prefix, {}) or []
if current_buffer != bufnr:
self.vim.command("tabclose")
candidates = convert_symbols_to_candidates(
result,
pwd=self.vim.funcs.getcwd())
return candidates
|
831e09baadf3e7c426bc5558c04dae234b2902d2
|
account_companyweb/tests/__init__.py
|
account_companyweb/tests/__init__.py
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_companyweb
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_companyweb
checks = [
test_companyweb,
]
|
Add checks on init file
|
[ADD] Add checks on init file
|
Python
|
agpl-3.0
|
QANSEE/l10n-belgium,Niboo/l10n-belgium,QANSEE/l10n-belgium,Noviat/l10n-belgium,acsone/l10n-belgium,akretion/l10n-belgium,Noviat/l10n-belgium,Niboo/l10n-belgium,acsone/l10n-belgium,akretion/l10n-belgium,yvaucher/l10n-belgium
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_companyweb
[ADD] Add checks on init file
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_companyweb
checks = [
test_companyweb,
]
|
<commit_before># -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_companyweb
<commit_msg>[ADD] Add checks on init file<commit_after>
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_companyweb
checks = [
test_companyweb,
]
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_companyweb
[ADD] Add checks on init file# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_companyweb
checks = [
test_companyweb,
]
|
<commit_before># -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_companyweb
<commit_msg>[ADD] Add checks on init file<commit_after># -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_companyweb
checks = [
test_companyweb,
]
|
be8ac3ac13fee7db684c931cdc15be98ca6a283c
|
ample/util/tests/test_mrbump_util.py
|
ample/util/tests/test_mrbump_util.py
|
"""Test functions for util.mrbump_util"""
import cPickle
import os
import unittest
from ample.constants import AMPLE_PKL, SHARE_DIR
from ample.util import mrbump_util
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath( os.path.dirname( __file__ ) )
cls.ample_share = SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles')
def test_final_summary(self):
pkl = os.path.join(self.testfiles_dir, AMPLE_PKL)
if not os.path.isfile(pkl): return
with open(pkl) as f: d = cPickle.load(f)
summary = mrbump_util.finalSummary(d)
self.assertIsNotNone(summary)
def test_topfiles(self):
topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles()
self.assertEqual(len(topf),3)
self.assertIn('info',topf[2])
if __name__ == "__main__":
unittest.main()
|
"""Test functions for util.mrbump_util"""
import cPickle
import os
import unittest
from ample.constants import AMPLE_PKL, SHARE_DIR
from ample.util import mrbump_util
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath( os.path.dirname( __file__ ) )
cls.ample_share = SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles')
def test_final_summary(self):
pkl = os.path.join(self.testfiles_dir, AMPLE_PKL)
if not os.path.isfile(pkl): return
with open(pkl) as f: d = cPickle.load(f)
summary = mrbump_util.finalSummary(d)
self.assertIsNotNone(summary)
def test_topfiles(self):
topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles()
self.assertEqual(len(topf),3)
self.assertEqual(topf[2]['source'],'SHELXE trace of MR result')
if __name__ == "__main__":
unittest.main()
|
Update unit test for changes to topf
|
Update unit test for changes to topf
|
Python
|
bsd-3-clause
|
rigdenlab/ample,rigdenlab/ample,linucks/ample,linucks/ample
|
"""Test functions for util.mrbump_util"""
import cPickle
import os
import unittest
from ample.constants import AMPLE_PKL, SHARE_DIR
from ample.util import mrbump_util
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath( os.path.dirname( __file__ ) )
cls.ample_share = SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles')
def test_final_summary(self):
pkl = os.path.join(self.testfiles_dir, AMPLE_PKL)
if not os.path.isfile(pkl): return
with open(pkl) as f: d = cPickle.load(f)
summary = mrbump_util.finalSummary(d)
self.assertIsNotNone(summary)
def test_topfiles(self):
topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles()
self.assertEqual(len(topf),3)
self.assertIn('info',topf[2])
if __name__ == "__main__":
unittest.main()
Update unit test for changes to topf
|
"""Test functions for util.mrbump_util"""
import cPickle
import os
import unittest
from ample.constants import AMPLE_PKL, SHARE_DIR
from ample.util import mrbump_util
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath( os.path.dirname( __file__ ) )
cls.ample_share = SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles')
def test_final_summary(self):
pkl = os.path.join(self.testfiles_dir, AMPLE_PKL)
if not os.path.isfile(pkl): return
with open(pkl) as f: d = cPickle.load(f)
summary = mrbump_util.finalSummary(d)
self.assertIsNotNone(summary)
def test_topfiles(self):
topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles()
self.assertEqual(len(topf),3)
self.assertEqual(topf[2]['source'],'SHELXE trace of MR result')
if __name__ == "__main__":
unittest.main()
|
<commit_before>"""Test functions for util.mrbump_util"""
import cPickle
import os
import unittest
from ample.constants import AMPLE_PKL, SHARE_DIR
from ample.util import mrbump_util
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath( os.path.dirname( __file__ ) )
cls.ample_share = SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles')
def test_final_summary(self):
pkl = os.path.join(self.testfiles_dir, AMPLE_PKL)
if not os.path.isfile(pkl): return
with open(pkl) as f: d = cPickle.load(f)
summary = mrbump_util.finalSummary(d)
self.assertIsNotNone(summary)
def test_topfiles(self):
topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles()
self.assertEqual(len(topf),3)
self.assertIn('info',topf[2])
if __name__ == "__main__":
unittest.main()
<commit_msg>Update unit test for changes to topf<commit_after>
|
"""Test functions for util.mrbump_util"""
import cPickle
import os
import unittest
from ample.constants import AMPLE_PKL, SHARE_DIR
from ample.util import mrbump_util
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath( os.path.dirname( __file__ ) )
cls.ample_share = SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles')
def test_final_summary(self):
pkl = os.path.join(self.testfiles_dir, AMPLE_PKL)
if not os.path.isfile(pkl): return
with open(pkl) as f: d = cPickle.load(f)
summary = mrbump_util.finalSummary(d)
self.assertIsNotNone(summary)
def test_topfiles(self):
topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles()
self.assertEqual(len(topf),3)
self.assertEqual(topf[2]['source'],'SHELXE trace of MR result')
if __name__ == "__main__":
unittest.main()
|
"""Test functions for util.mrbump_util"""
import cPickle
import os
import unittest
from ample.constants import AMPLE_PKL, SHARE_DIR
from ample.util import mrbump_util
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath( os.path.dirname( __file__ ) )
cls.ample_share = SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles')
def test_final_summary(self):
pkl = os.path.join(self.testfiles_dir, AMPLE_PKL)
if not os.path.isfile(pkl): return
with open(pkl) as f: d = cPickle.load(f)
summary = mrbump_util.finalSummary(d)
self.assertIsNotNone(summary)
def test_topfiles(self):
topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles()
self.assertEqual(len(topf),3)
self.assertIn('info',topf[2])
if __name__ == "__main__":
unittest.main()
Update unit test for changes to topf"""Test functions for util.mrbump_util"""
import cPickle
import os
import unittest
from ample.constants import AMPLE_PKL, SHARE_DIR
from ample.util import mrbump_util
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath( os.path.dirname( __file__ ) )
cls.ample_share = SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles')
def test_final_summary(self):
pkl = os.path.join(self.testfiles_dir, AMPLE_PKL)
if not os.path.isfile(pkl): return
with open(pkl) as f: d = cPickle.load(f)
summary = mrbump_util.finalSummary(d)
self.assertIsNotNone(summary)
def test_topfiles(self):
topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles()
self.assertEqual(len(topf),3)
self.assertEqual(topf[2]['source'],'SHELXE trace of MR result')
if __name__ == "__main__":
unittest.main()
|
<commit_before>"""Test functions for util.mrbump_util"""
import cPickle
import os
import unittest
from ample.constants import AMPLE_PKL, SHARE_DIR
from ample.util import mrbump_util
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath( os.path.dirname( __file__ ) )
cls.ample_share = SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles')
def test_final_summary(self):
pkl = os.path.join(self.testfiles_dir, AMPLE_PKL)
if not os.path.isfile(pkl): return
with open(pkl) as f: d = cPickle.load(f)
summary = mrbump_util.finalSummary(d)
self.assertIsNotNone(summary)
def test_topfiles(self):
topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles()
self.assertEqual(len(topf),3)
self.assertIn('info',topf[2])
if __name__ == "__main__":
unittest.main()
<commit_msg>Update unit test for changes to topf<commit_after>"""Test functions for util.mrbump_util"""
import cPickle
import os
import unittest
from ample.constants import AMPLE_PKL, SHARE_DIR
from ample.util import mrbump_util
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.thisd = os.path.abspath( os.path.dirname( __file__ ) )
cls.ample_share = SHARE_DIR
cls.testfiles_dir = os.path.join(cls.ample_share,'testfiles')
def test_final_summary(self):
pkl = os.path.join(self.testfiles_dir, AMPLE_PKL)
if not os.path.isfile(pkl): return
with open(pkl) as f: d = cPickle.load(f)
summary = mrbump_util.finalSummary(d)
self.assertIsNotNone(summary)
def test_topfiles(self):
topf = mrbump_util.ResultsSummary(results_pkl=os.path.join(self.testfiles_dir, AMPLE_PKL)).topFiles()
self.assertEqual(len(topf),3)
self.assertEqual(topf[2]['source'],'SHELXE trace of MR result')
if __name__ == "__main__":
unittest.main()
|
cdb1b2b2a90010f6395abc813e27977560c659ba
|
numpy/distutils/msvccompiler.py
|
numpy/distutils/msvccompiler.py
|
import os
import distutils.msvccompiler
from distutils.msvccompiler import *
from .system_info import platform_bits
class MSVCCompiler(distutils.msvccompiler.MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
distutils.msvccompiler.MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
distutils.msvccompiler.MSVCCompiler.initialize(self, plat_name)
if environ_lib is not None:
os.environ['lib'] = environ_lib + os.environ['lib']
if environ_include is not None:
os.environ['include'] = environ_include + os.environ['include']
if platform_bits == 32:
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
|
import os
import distutils.msvccompiler
from distutils.msvccompiler import *
from .system_info import platform_bits
class MSVCCompiler(distutils.msvccompiler.MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
distutils.msvccompiler.MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
distutils.msvccompiler.MSVCCompiler.initialize(self, plat_name)
if environ_lib is not None:
os.environ['lib'] = environ_lib + os.environ['lib']
if environ_include is not None:
os.environ['include'] = environ_include + os.environ['include']
if platform_bits == 32:
# msvc9 building for 32 bits requires SSE2 to work around a
# compiler bug.
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
|
Document the reason msvc requires SSE2 on 32 bit platforms.
|
DOC: Document the reason msvc requires SSE2 on 32 bit platforms.
|
Python
|
bsd-3-clause
|
ViralLeadership/numpy,ChristopherHogan/numpy,gfyoung/numpy,argriffing/numpy,abalkin/numpy,drasmuss/numpy,bertrand-l/numpy,numpy/numpy,mhvk/numpy,mattip/numpy,shoyer/numpy,abalkin/numpy,numpy/numpy,moreati/numpy,nbeaver/numpy,rgommers/numpy,SiccarPoint/numpy,mattip/numpy,ahaldane/numpy,chiffa/numpy,stuarteberg/numpy,argriffing/numpy,mwiebe/numpy,WillieMaddox/numpy,SiccarPoint/numpy,kiwifb/numpy,pizzathief/numpy,simongibbons/numpy,leifdenby/numpy,rherault-insa/numpy,groutr/numpy,endolith/numpy,WarrenWeckesser/numpy,ddasilva/numpy,endolith/numpy,gmcastil/numpy,solarjoe/numpy,rgommers/numpy,stuarteberg/numpy,joferkington/numpy,seberg/numpy,pdebuyl/numpy,ContinuumIO/numpy,endolith/numpy,pdebuyl/numpy,tacaswell/numpy,abalkin/numpy,charris/numpy,MSeifert04/numpy,ddasilva/numpy,Eric89GXL/numpy,WillieMaddox/numpy,chatcannon/numpy,pyparallel/numpy,jakirkham/numpy,stuarteberg/numpy,ssanderson/numpy,AustereCuriosity/numpy,grlee77/numpy,madphysicist/numpy,utke1/numpy,grlee77/numpy,MSeifert04/numpy,jonathanunderwood/numpy,rherault-insa/numpy,pizzathief/numpy,charris/numpy,ChristopherHogan/numpy,utke1/numpy,utke1/numpy,gfyoung/numpy,WarrenWeckesser/numpy,moreati/numpy,dwillmer/numpy,solarjoe/numpy,ssanderson/numpy,nbeaver/numpy,dwillmer/numpy,pizzathief/numpy,solarjoe/numpy,kiwifb/numpy,Dapid/numpy,seberg/numpy,WarrenWeckesser/numpy,ViralLeadership/numpy,AustereCuriosity/numpy,jakirkham/numpy,skwbc/numpy,bertrand-l/numpy,mattip/numpy,seberg/numpy,grlee77/numpy,shoyer/numpy,anntzer/numpy,pbrod/numpy,Eric89GXL/numpy,groutr/numpy,behzadnouri/numpy,SiccarPoint/numpy,mhvk/numpy,mhvk/numpy,pyparallel/numpy,mattip/numpy,maniteja123/numpy,WarrenWeckesser/numpy,shoyer/numpy,tynn/numpy,shoyer/numpy,simongibbons/numpy,maniteja123/numpy,madphysicist/numpy,tynn/numpy,pyparallel/numpy,nbeaver/numpy,mhvk/numpy,maniteja123/numpy,b-carter/numpy,kiwifb/numpy,gfyoung/numpy,joferkington/numpy,madphysicist/numpy,jorisvandenbossche/numpy,argriffing/numpy,tynn/numpy,ahaldane/numpy,ESSS/numpy,ContinuumIO/numpy,mwiebe/numpy,jakirkham/numpy,Eric89GXL/numpy,ahaldane/numpy,anntzer/numpy,WillieMaddox/numpy,skwbc/numpy,stuarteberg/numpy,ahaldane/numpy,endolith/numpy,skwbc/numpy,bringingheavendown/numpy,mwiebe/numpy,ESSS/numpy,pdebuyl/numpy,bringingheavendown/numpy,pizzathief/numpy,jakirkham/numpy,seberg/numpy,numpy/numpy,behzadnouri/numpy,groutr/numpy,behzadnouri/numpy,ChristopherHogan/numpy,dwillmer/numpy,anntzer/numpy,tacaswell/numpy,Eric89GXL/numpy,simongibbons/numpy,pizzathief/numpy,charris/numpy,dwillmer/numpy,ddasilva/numpy,ahaldane/numpy,madphysicist/numpy,ContinuumIO/numpy,MSeifert04/numpy,njase/numpy,pbrod/numpy,ESSS/numpy,simongibbons/numpy,mhvk/numpy,b-carter/numpy,WarrenWeckesser/numpy,jorisvandenbossche/numpy,pbrod/numpy,moreati/numpy,rgommers/numpy,chatcannon/numpy,pdebuyl/numpy,pbrod/numpy,anntzer/numpy,bertrand-l/numpy,rgommers/numpy,jakirkham/numpy,jorisvandenbossche/numpy,gmcastil/numpy,pbrod/numpy,joferkington/numpy,jonathanunderwood/numpy,leifdenby/numpy,bringingheavendown/numpy,grlee77/numpy,b-carter/numpy,SiccarPoint/numpy,rherault-insa/numpy,AustereCuriosity/numpy,Dapid/numpy,jorisvandenbossche/numpy,drasmuss/numpy,leifdenby/numpy,gmcastil/numpy,grlee77/numpy,ssanderson/numpy,chiffa/numpy,jorisvandenbossche/numpy,numpy/numpy,njase/numpy,charris/numpy,simongibbons/numpy,drasmuss/numpy,madphysicist/numpy,njase/numpy,tacaswell/numpy,Dapid/numpy,joferkington/numpy,chatcannon/numpy,chiffa/numpy,shoyer/numpy,ViralLeadership/numpy,MSeifert04/numpy,MSeifert04/numpy,jonathanunderwood/numpy,ChristopherHogan/numpy
|
import os
import distutils.msvccompiler
from distutils.msvccompiler import *
from .system_info import platform_bits
class MSVCCompiler(distutils.msvccompiler.MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
distutils.msvccompiler.MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
distutils.msvccompiler.MSVCCompiler.initialize(self, plat_name)
if environ_lib is not None:
os.environ['lib'] = environ_lib + os.environ['lib']
if environ_include is not None:
os.environ['include'] = environ_include + os.environ['include']
if platform_bits == 32:
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
DOC: Document the reason msvc requires SSE2 on 32 bit platforms.
|
import os
import distutils.msvccompiler
from distutils.msvccompiler import *
from .system_info import platform_bits
class MSVCCompiler(distutils.msvccompiler.MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
distutils.msvccompiler.MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
distutils.msvccompiler.MSVCCompiler.initialize(self, plat_name)
if environ_lib is not None:
os.environ['lib'] = environ_lib + os.environ['lib']
if environ_include is not None:
os.environ['include'] = environ_include + os.environ['include']
if platform_bits == 32:
# msvc9 building for 32 bits requires SSE2 to work around a
# compiler bug.
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
|
<commit_before>import os
import distutils.msvccompiler
from distutils.msvccompiler import *
from .system_info import platform_bits
class MSVCCompiler(distutils.msvccompiler.MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
distutils.msvccompiler.MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
distutils.msvccompiler.MSVCCompiler.initialize(self, plat_name)
if environ_lib is not None:
os.environ['lib'] = environ_lib + os.environ['lib']
if environ_include is not None:
os.environ['include'] = environ_include + os.environ['include']
if platform_bits == 32:
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
<commit_msg>DOC: Document the reason msvc requires SSE2 on 32 bit platforms.<commit_after>
|
import os
import distutils.msvccompiler
from distutils.msvccompiler import *
from .system_info import platform_bits
class MSVCCompiler(distutils.msvccompiler.MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
distutils.msvccompiler.MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
distutils.msvccompiler.MSVCCompiler.initialize(self, plat_name)
if environ_lib is not None:
os.environ['lib'] = environ_lib + os.environ['lib']
if environ_include is not None:
os.environ['include'] = environ_include + os.environ['include']
if platform_bits == 32:
# msvc9 building for 32 bits requires SSE2 to work around a
# compiler bug.
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
|
import os
import distutils.msvccompiler
from distutils.msvccompiler import *
from .system_info import platform_bits
class MSVCCompiler(distutils.msvccompiler.MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
distutils.msvccompiler.MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
distutils.msvccompiler.MSVCCompiler.initialize(self, plat_name)
if environ_lib is not None:
os.environ['lib'] = environ_lib + os.environ['lib']
if environ_include is not None:
os.environ['include'] = environ_include + os.environ['include']
if platform_bits == 32:
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
DOC: Document the reason msvc requires SSE2 on 32 bit platforms.import os
import distutils.msvccompiler
from distutils.msvccompiler import *
from .system_info import platform_bits
class MSVCCompiler(distutils.msvccompiler.MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
distutils.msvccompiler.MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
distutils.msvccompiler.MSVCCompiler.initialize(self, plat_name)
if environ_lib is not None:
os.environ['lib'] = environ_lib + os.environ['lib']
if environ_include is not None:
os.environ['include'] = environ_include + os.environ['include']
if platform_bits == 32:
# msvc9 building for 32 bits requires SSE2 to work around a
# compiler bug.
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
|
<commit_before>import os
import distutils.msvccompiler
from distutils.msvccompiler import *
from .system_info import platform_bits
class MSVCCompiler(distutils.msvccompiler.MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
distutils.msvccompiler.MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
distutils.msvccompiler.MSVCCompiler.initialize(self, plat_name)
if environ_lib is not None:
os.environ['lib'] = environ_lib + os.environ['lib']
if environ_include is not None:
os.environ['include'] = environ_include + os.environ['include']
if platform_bits == 32:
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
<commit_msg>DOC: Document the reason msvc requires SSE2 on 32 bit platforms.<commit_after>import os
import distutils.msvccompiler
from distutils.msvccompiler import *
from .system_info import platform_bits
class MSVCCompiler(distutils.msvccompiler.MSVCCompiler):
def __init__(self, verbose=0, dry_run=0, force=0):
distutils.msvccompiler.MSVCCompiler.__init__(self, verbose, dry_run, force)
def initialize(self, plat_name=None):
environ_lib = os.getenv('lib')
environ_include = os.getenv('include')
distutils.msvccompiler.MSVCCompiler.initialize(self, plat_name)
if environ_lib is not None:
os.environ['lib'] = environ_lib + os.environ['lib']
if environ_include is not None:
os.environ['include'] = environ_include + os.environ['include']
if platform_bits == 32:
# msvc9 building for 32 bits requires SSE2 to work around a
# compiler bug.
self.compile_options += ['/arch:SSE2']
self.compile_options_debug += ['/arch:SSE2']
|
701a6b4a4ed8a4db9f1b961cf8d5a1a6ef5c48a1
|
gratipay/renderers/csv_dump.py
|
gratipay/renderers/csv_dump.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from io import BytesIO
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
context['response'].headers['Content-Type'] = 'text/plain'
rows = eval(self.compiled, globals(), context)
if not rows:
return ''
f = BytesIO()
w = csv.writer(f)
if hasattr(rows[0], '_fields'):
w.writerow(rows[0]._fields)
w.writerows(rows)
f.seek(0)
return f.read()
class Factory(renderers.Factory):
Renderer = Renderer
|
from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from io import BytesIO
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
rows = eval(self.compiled, globals(), context)
if not rows:
return ''
f = BytesIO()
w = csv.writer(f)
if hasattr(rows[0], '_fields'):
w.writerow(rows[0]._fields)
w.writerows(rows)
f.seek(0)
return f.read()
class Factory(renderers.Factory):
Renderer = Renderer
|
Remove line that sets content type text/plain
|
Remove line that sets content type text/plain
|
Python
|
mit
|
studio666/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,gratipay/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,gratipay/gratipay.com,mccolgst/www.gittip.com
|
from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from io import BytesIO
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
context['response'].headers['Content-Type'] = 'text/plain'
rows = eval(self.compiled, globals(), context)
if not rows:
return ''
f = BytesIO()
w = csv.writer(f)
if hasattr(rows[0], '_fields'):
w.writerow(rows[0]._fields)
w.writerows(rows)
f.seek(0)
return f.read()
class Factory(renderers.Factory):
Renderer = Renderer
Remove line that sets content type text/plain
|
from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from io import BytesIO
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
rows = eval(self.compiled, globals(), context)
if not rows:
return ''
f = BytesIO()
w = csv.writer(f)
if hasattr(rows[0], '_fields'):
w.writerow(rows[0]._fields)
w.writerows(rows)
f.seek(0)
return f.read()
class Factory(renderers.Factory):
Renderer = Renderer
|
<commit_before>from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from io import BytesIO
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
context['response'].headers['Content-Type'] = 'text/plain'
rows = eval(self.compiled, globals(), context)
if not rows:
return ''
f = BytesIO()
w = csv.writer(f)
if hasattr(rows[0], '_fields'):
w.writerow(rows[0]._fields)
w.writerows(rows)
f.seek(0)
return f.read()
class Factory(renderers.Factory):
Renderer = Renderer
<commit_msg>Remove line that sets content type text/plain<commit_after>
|
from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from io import BytesIO
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
rows = eval(self.compiled, globals(), context)
if not rows:
return ''
f = BytesIO()
w = csv.writer(f)
if hasattr(rows[0], '_fields'):
w.writerow(rows[0]._fields)
w.writerows(rows)
f.seek(0)
return f.read()
class Factory(renderers.Factory):
Renderer = Renderer
|
from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from io import BytesIO
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
context['response'].headers['Content-Type'] = 'text/plain'
rows = eval(self.compiled, globals(), context)
if not rows:
return ''
f = BytesIO()
w = csv.writer(f)
if hasattr(rows[0], '_fields'):
w.writerow(rows[0]._fields)
w.writerows(rows)
f.seek(0)
return f.read()
class Factory(renderers.Factory):
Renderer = Renderer
Remove line that sets content type text/plainfrom __future__ import absolute_import, division, print_function, unicode_literals
import csv
from io import BytesIO
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
rows = eval(self.compiled, globals(), context)
if not rows:
return ''
f = BytesIO()
w = csv.writer(f)
if hasattr(rows[0], '_fields'):
w.writerow(rows[0]._fields)
w.writerows(rows)
f.seek(0)
return f.read()
class Factory(renderers.Factory):
Renderer = Renderer
|
<commit_before>from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from io import BytesIO
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
context['response'].headers['Content-Type'] = 'text/plain'
rows = eval(self.compiled, globals(), context)
if not rows:
return ''
f = BytesIO()
w = csv.writer(f)
if hasattr(rows[0], '_fields'):
w.writerow(rows[0]._fields)
w.writerows(rows)
f.seek(0)
return f.read()
class Factory(renderers.Factory):
Renderer = Renderer
<commit_msg>Remove line that sets content type text/plain<commit_after>from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from io import BytesIO
from aspen import renderers
class Renderer(renderers.Renderer):
def render_content(self, context):
rows = eval(self.compiled, globals(), context)
if not rows:
return ''
f = BytesIO()
w = csv.writer(f)
if hasattr(rows[0], '_fields'):
w.writerow(rows[0]._fields)
w.writerows(rows)
f.seek(0)
return f.read()
class Factory(renderers.Factory):
Renderer = Renderer
|
2bf8888b3c39b8d044b1bc7bd196e0bbe275c583
|
konstrukteur/HtmlParser.py
|
konstrukteur/HtmlParser.py
|
#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
page["content"] = "".join([str(tag) for tag in parsedContent.find("body").contents])
page["title"] = parsedContent.title.string
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return page
|
#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
body = parsedContent.find("body")
page["content"] = "".join([str(tag) for tag in body.contents])
page["title"] = parsedContent.title.string
page["summary"] = body.p.get_text()
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return page
|
Add summary to html parser
|
Add summary to html parser
|
Python
|
mit
|
fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur
|
#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
page["content"] = "".join([str(tag) for tag in parsedContent.find("body").contents])
page["title"] = parsedContent.title.string
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return pageAdd summary to html parser
|
#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
body = parsedContent.find("body")
page["content"] = "".join([str(tag) for tag in body.contents])
page["title"] = parsedContent.title.string
page["summary"] = body.p.get_text()
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return page
|
<commit_before>#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
page["content"] = "".join([str(tag) for tag in parsedContent.find("body").contents])
page["title"] = parsedContent.title.string
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return page<commit_msg>Add summary to html parser<commit_after>
|
#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
body = parsedContent.find("body")
page["content"] = "".join([str(tag) for tag in body.contents])
page["title"] = parsedContent.title.string
page["summary"] = body.p.get_text()
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return page
|
#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
page["content"] = "".join([str(tag) for tag in parsedContent.find("body").contents])
page["title"] = parsedContent.title.string
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return pageAdd summary to html parser#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
body = parsedContent.find("body")
page["content"] = "".join([str(tag) for tag in body.contents])
page["title"] = parsedContent.title.string
page["summary"] = body.p.get_text()
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return page
|
<commit_before>#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
page["content"] = "".join([str(tag) for tag in parsedContent.find("body").contents])
page["title"] = parsedContent.title.string
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return page<commit_msg>Add summary to html parser<commit_after>#
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
body = parsedContent.find("body")
page["content"] = "".join([str(tag) for tag in body.contents])
page["title"] = parsedContent.title.string
page["summary"] = body.p.get_text()
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return page
|
2806517a37791b2b72e534a037bbc914cf33ba7c
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
def update():
require('code_root')
git_pull()
restart_web_server()
def git_pull():
run('cd %s; git stash; git pull' % (env.code_root))
def restart_web_server():
"Restart the web server"
run('%s/apache2/bin/restart' % env.code_root_parent)
def migrate():
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
def collect_static():
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
def pip_install():
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
def publish_changes():
update()
pip_install()
migrate()
collect_static()
restart_web_server()
|
from fabric.api import *
def update():
"""Requires code_root env variable. Does a git pull and restarts the web server"""
require('code_root')
git_pull()
restart_web_server()
def git_pull():
"""Does a git stash then a git pull on the project"""
run('cd %s; git stash; git pull' % (env.code_root))
def restart_web_server():
"""Restart the web server"""
run('%s/apache2/bin/restart' % env.code_root_parent)
def migrate():
"""Runs python manage.py migrate"""
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
def collect_static():
"""Runs python manage.py collect_static --noinput"""
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
def pip_install():
"""Runs pip install -r requirements/frozen.txt (for example site)"""
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
def publish_changes():
"""Runs these functions in order (git_pull, pip_install, migrate, collect_static, restart_web_server)"""
git_pull()
pip_install()
migrate()
collect_static()
restart_web_server()
|
Add some info in the fab tasks
|
Add some info in the fab tasks
|
Python
|
mit
|
leominov/fabric-bolt,madazone/fabric-bolt,maximon93/fabric-bolt,fabric-bolt/fabric-bolt,paperreduction/fabric-bolt,naphthalene/fabric-bolt,yourilima/fabric-bolt,damoguyan8844/fabric-bolt,brajput24/fabric-bolt,brajput24/fabric-bolt,Hedde/fabric-bolt,paperreduction/fabric-bolt,jproffitt/fabric-bolt,leominov/fabric-bolt,maximon93/fabric-bolt,qdqmedia/fabric-bolt,brajput24/fabric-bolt,gvangool/fabric-bolt,maximon93/fabric-bolt,worthwhile/fabric-bolt,worthwhile/fabric-bolt,Hedde/fabric-bolt,paperreduction/fabric-bolt,lethe3000/fabric-bolt,lethe3000/fabric-bolt,jproffitt/fabric-bolt,gvangool/fabric-bolt,qdqmedia/fabric-bolt,naphthalene/fabric-bolt,damoguyan8844/fabric-bolt,worthwhile/fabric-bolt,jproffitt/fabric-bolt,npardington/fabric-bolt,leominov/fabric-bolt,fabric-bolt/fabric-bolt,lethe3000/fabric-bolt,gvangool/fabric-bolt,npardington/fabric-bolt,qdqmedia/fabric-bolt,damoguyan8844/fabric-bolt,fabric-bolt/fabric-bolt,npardington/fabric-bolt
|
from fabric.api import *
def update():
require('code_root')
git_pull()
restart_web_server()
def git_pull():
run('cd %s; git stash; git pull' % (env.code_root))
def restart_web_server():
"Restart the web server"
run('%s/apache2/bin/restart' % env.code_root_parent)
def migrate():
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
def collect_static():
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
def pip_install():
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
def publish_changes():
update()
pip_install()
migrate()
collect_static()
restart_web_server()Add some info in the fab tasks
|
from fabric.api import *
def update():
"""Requires code_root env variable. Does a git pull and restarts the web server"""
require('code_root')
git_pull()
restart_web_server()
def git_pull():
"""Does a git stash then a git pull on the project"""
run('cd %s; git stash; git pull' % (env.code_root))
def restart_web_server():
"""Restart the web server"""
run('%s/apache2/bin/restart' % env.code_root_parent)
def migrate():
"""Runs python manage.py migrate"""
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
def collect_static():
"""Runs python manage.py collect_static --noinput"""
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
def pip_install():
"""Runs pip install -r requirements/frozen.txt (for example site)"""
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
def publish_changes():
"""Runs these functions in order (git_pull, pip_install, migrate, collect_static, restart_web_server)"""
git_pull()
pip_install()
migrate()
collect_static()
restart_web_server()
|
<commit_before>from fabric.api import *
def update():
require('code_root')
git_pull()
restart_web_server()
def git_pull():
run('cd %s; git stash; git pull' % (env.code_root))
def restart_web_server():
"Restart the web server"
run('%s/apache2/bin/restart' % env.code_root_parent)
def migrate():
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
def collect_static():
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
def pip_install():
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
def publish_changes():
update()
pip_install()
migrate()
collect_static()
restart_web_server()<commit_msg>Add some info in the fab tasks<commit_after>
|
from fabric.api import *
def update():
"""Requires code_root env variable. Does a git pull and restarts the web server"""
require('code_root')
git_pull()
restart_web_server()
def git_pull():
"""Does a git stash then a git pull on the project"""
run('cd %s; git stash; git pull' % (env.code_root))
def restart_web_server():
"""Restart the web server"""
run('%s/apache2/bin/restart' % env.code_root_parent)
def migrate():
"""Runs python manage.py migrate"""
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
def collect_static():
"""Runs python manage.py collect_static --noinput"""
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
def pip_install():
"""Runs pip install -r requirements/frozen.txt (for example site)"""
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
def publish_changes():
"""Runs these functions in order (git_pull, pip_install, migrate, collect_static, restart_web_server)"""
git_pull()
pip_install()
migrate()
collect_static()
restart_web_server()
|
from fabric.api import *
def update():
require('code_root')
git_pull()
restart_web_server()
def git_pull():
run('cd %s; git stash; git pull' % (env.code_root))
def restart_web_server():
"Restart the web server"
run('%s/apache2/bin/restart' % env.code_root_parent)
def migrate():
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
def collect_static():
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
def pip_install():
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
def publish_changes():
update()
pip_install()
migrate()
collect_static()
restart_web_server()Add some info in the fab tasksfrom fabric.api import *
def update():
"""Requires code_root env variable. Does a git pull and restarts the web server"""
require('code_root')
git_pull()
restart_web_server()
def git_pull():
"""Does a git stash then a git pull on the project"""
run('cd %s; git stash; git pull' % (env.code_root))
def restart_web_server():
"""Restart the web server"""
run('%s/apache2/bin/restart' % env.code_root_parent)
def migrate():
"""Runs python manage.py migrate"""
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
def collect_static():
"""Runs python manage.py collect_static --noinput"""
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
def pip_install():
"""Runs pip install -r requirements/frozen.txt (for example site)"""
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
def publish_changes():
"""Runs these functions in order (git_pull, pip_install, migrate, collect_static, restart_web_server)"""
git_pull()
pip_install()
migrate()
collect_static()
restart_web_server()
|
<commit_before>from fabric.api import *
def update():
require('code_root')
git_pull()
restart_web_server()
def git_pull():
run('cd %s; git stash; git pull' % (env.code_root))
def restart_web_server():
"Restart the web server"
run('%s/apache2/bin/restart' % env.code_root_parent)
def migrate():
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
def collect_static():
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
def pip_install():
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
def publish_changes():
update()
pip_install()
migrate()
collect_static()
restart_web_server()<commit_msg>Add some info in the fab tasks<commit_after>from fabric.api import *
def update():
"""Requires code_root env variable. Does a git pull and restarts the web server"""
require('code_root')
git_pull()
restart_web_server()
def git_pull():
"""Does a git stash then a git pull on the project"""
run('cd %s; git stash; git pull' % (env.code_root))
def restart_web_server():
"""Restart the web server"""
run('%s/apache2/bin/restart' % env.code_root_parent)
def migrate():
"""Runs python manage.py migrate"""
run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file))
def collect_static():
"""Runs python manage.py collect_static --noinput"""
run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file))
def pip_install():
"""Runs pip install -r requirements/frozen.txt (for example site)"""
run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root))
def publish_changes():
"""Runs these functions in order (git_pull, pip_install, migrate, collect_static, restart_web_server)"""
git_pull()
pip_install()
migrate()
collect_static()
restart_web_server()
|
47dc7edf8ff16ed27e1e6b50415c8141d5ec6eb6
|
src/diamond/handler/Handler.py
|
src/diamond/handler/Handler.py
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
Fix try-except-finally statement for python 2.4 support
|
Fix try-except-finally statement for python 2.4 support
|
Python
|
mit
|
codepython/Diamond,Nihn/Diamond-1,EzyInsights/Diamond,acquia/Diamond,cannium/Diamond,TAKEALOT/Diamond,stuartbfox/Diamond,Basis/Diamond,russss/Diamond,tuenti/Diamond,Ssawa/Diamond,dcsquared13/Diamond,disqus/Diamond,janisz/Diamond-1,gg7/diamond,hamelg/Diamond,EzyInsights/Diamond,krbaker/Diamond,TAKEALOT/Diamond,cannium/Diamond,disqus/Diamond,MichaelDoyle/Diamond,bmhatfield/Diamond,Ormod/Diamond,eMerzh/Diamond-1,hvnsweeting/Diamond,janisz/Diamond-1,Clever/Diamond,Ormod/Diamond,actmd/Diamond,ceph/Diamond,acquia/Diamond,Ormod/Diamond,ceph/Diamond,tuenti/Diamond,mfriedenhagen/Diamond,MediaMath/Diamond,joel-airspring/Diamond,joel-airspring/Diamond,tellapart/Diamond,codepython/Diamond,russss/Diamond,krbaker/Diamond,sebbrandt87/Diamond,timchenxiaoyu/Diamond,rtoma/Diamond,skbkontur/Diamond,jaingaurav/Diamond,mzupan/Diamond,stuartbfox/Diamond,h00dy/Diamond,signalfx/Diamond,Ensighten/Diamond,jumping/Diamond,thardie/Diamond,bmhatfield/Diamond,metamx/Diamond,tellapart/Diamond,ceph/Diamond,szibis/Diamond,dcsquared13/Diamond,thardie/Diamond,socialwareinc/Diamond,szibis/Diamond,Netuitive/Diamond,MediaMath/Diamond,codepython/Diamond,Basis/Diamond,jriguera/Diamond,bmhatfield/Diamond,MediaMath/Diamond,Slach/Diamond,Netuitive/netuitive-diamond,Precis/Diamond,anandbhoraskar/Diamond,mzupan/Diamond,actmd/Diamond,szibis/Diamond,Basis/Diamond,hvnsweeting/Diamond,janisz/Diamond-1,python-diamond/Diamond,joel-airspring/Diamond,jumping/Diamond,eMerzh/Diamond-1,Ormod/Diamond,russss/Diamond,TinLe/Diamond,saucelabs/Diamond,sebbrandt87/Diamond,datafiniti/Diamond,Ensighten/Diamond,metamx/Diamond,Slach/Diamond,eMerzh/Diamond-1,disqus/Diamond,signalfx/Diamond,Precis/Diamond,Ssawa/Diamond,actmd/Diamond,tusharmakkar08/Diamond,TinLe/Diamond,MichaelDoyle/Diamond,CYBERBUGJR/Diamond,Ensighten/Diamond,works-mobile/Diamond,CYBERBUGJR/Diamond,joel-airspring/Diamond,datafiniti/Diamond,Ensighten/Diamond,CYBERBUGJR/Diamond,hamelg/Diamond,EzyInsights/Diamond,anandbhoraskar/Diamond,thardie/Diamond,socialwareinc/Diamond,russss/Diamond,cannium/Diamond,stuartbfox/Diamond,mzupan/Diamond,socialwareinc/Diamond,CYBERBUGJR/Diamond,Netuitive/netuitive-diamond,Netuitive/Diamond,hvnsweeting/Diamond,jriguera/Diamond,Ssawa/Diamond,Precis/Diamond,ramjothikumar/Diamond,h00dy/Diamond,gg7/diamond,gg7/diamond,mfriedenhagen/Diamond,jriguera/Diamond,janisz/Diamond-1,tuenti/Diamond,tellapart/Diamond,ramjothikumar/Diamond,mfriedenhagen/Diamond,bmhatfield/Diamond,jumping/Diamond,hvnsweeting/Diamond,zoidbergwill/Diamond,anandbhoraskar/Diamond,TAKEALOT/Diamond,skbkontur/Diamond,gg7/diamond,jumping/Diamond,Netuitive/netuitive-diamond,ramjothikumar/Diamond,szibis/Diamond,acquia/Diamond,sebbrandt87/Diamond,MichaelDoyle/Diamond,anandbhoraskar/Diamond,python-diamond/Diamond,rtoma/Diamond,tuenti/Diamond,Slach/Diamond,actmd/Diamond,metamx/Diamond,saucelabs/Diamond,signalfx/Diamond,saucelabs/Diamond,Ssawa/Diamond,jaingaurav/Diamond,Clever/Diamond,hamelg/Diamond,mfriedenhagen/Diamond,datafiniti/Diamond,timchenxiaoyu/Diamond,python-diamond/Diamond,Netuitive/netuitive-diamond,datafiniti/Diamond,krbaker/Diamond,timchenxiaoyu/Diamond,Nihn/Diamond-1,works-mobile/Diamond,codepython/Diamond,Clever/Diamond,tusharmakkar08/Diamond,tusharmakkar08/Diamond,skbkontur/Diamond,TinLe/Diamond,tusharmakkar08/Diamond,jriguera/Diamond,sebbrandt87/Diamond,Nihn/Diamond-1,Clever/Diamond,Nihn/Diamond-1,stuartbfox/Diamond,jaingaurav/Diamond,EzyInsights/Diamond,zoidbergwill/Diamond,Netuitive/Diamond,krbaker/Diamond,signalfx/Diamond,Precis/Diamond,Slach/Diamond,dcsquared13/Diamond,cannium/Diamond,Basis/Diamond,h00dy/Diamond,thardie/Diamond,rtoma/Diamond,ramjothikumar/Diamond,hamelg/Diamond,h00dy/Diamond,mzupan/Diamond,eMerzh/Diamond-1,dcsquared13/Diamond,MediaMath/Diamond,MichaelDoyle/Diamond,saucelabs/Diamond,zoidbergwill/Diamond,acquia/Diamond,Netuitive/Diamond,TinLe/Diamond,socialwareinc/Diamond,TAKEALOT/Diamond,rtoma/Diamond,skbkontur/Diamond,works-mobile/Diamond,zoidbergwill/Diamond,works-mobile/Diamond,ceph/Diamond,timchenxiaoyu/Diamond,tellapart/Diamond,jaingaurav/Diamond
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
Fix try-except-finally statement for python 2.4 support
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
<commit_before># coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
<commit_msg>Fix try-except-finally statement for python 2.4 support<commit_after>
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
Fix try-except-finally statement for python 2.4 support# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
<commit_before># coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
<commit_msg>Fix try-except-finally statement for python 2.4 support<commit_after># coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
332a73c1d7f50cb336577921f0af218dc39d40e1
|
raiden/tests/unit/transfer/test_utils.py
|
raiden/tests/unit/transfer/test_utils.py
|
import pytest
from eth_utils import decode_hex
from raiden.constants import EMPTY_HASH, EMPTY_MERKLE_ROOT
from raiden.transfer.utils import hash_balance_data
@pytest.mark.parametrize(
"values,expected",
(
((0, 0, EMPTY_HASH), bytes(32)),
(
(1, 5, EMPTY_MERKLE_ROOT),
decode_hex("0xc6b26a4554afa01fb3409b3bd6e7605a1c1af45b7e644282c6ebf34eddb6f893"),
),
),
)
def test_hash_balance_data(values, expected):
assert hash_balance_data(values[0], values[1], values[2]) == expected
|
import pytest
from eth_utils import decode_hex
from raiden.constants import EMPTY_HASH, EMPTY_MERKLE_ROOT
from raiden.tests.utils import factories
from raiden.transfer.secret_registry import events_for_onchain_secretreveal
from raiden.transfer.state import TransactionExecutionStatus
from raiden.transfer.utils import hash_balance_data
@pytest.mark.parametrize(
"values,expected",
(
((0, 0, EMPTY_HASH), bytes(32)),
(
(1, 5, EMPTY_MERKLE_ROOT),
decode_hex("0xc6b26a4554afa01fb3409b3bd6e7605a1c1af45b7e644282c6ebf34eddb6f893"),
),
),
)
def test_hash_balance_data(values, expected):
assert hash_balance_data(values[0], values[1], values[2]) == expected
def test_events_for_onchain_secretreveal_with_unfit_channels():
settle = factories.TransactionExecutionStatusProperties()
settled = factories.create(factories.NettingChannelStateProperties(settle_transaction=settle))
secret = factories.UNIT_SECRET
block_hash = factories.make_block_hash()
events = events_for_onchain_secretreveal(settled, secret, 10, block_hash)
assert not events, "Secret reveal event should not be generated for settled channel"
settle = factories.replace(settle, result=TransactionExecutionStatus.FAILURE)
unusable = factories.create(factories.NettingChannelStateProperties(settle_transaction=settle))
events = events_for_onchain_secretreveal(unusable, secret, 10, block_hash)
assert not events, "Secret reveal event should not be generated for unusable channel."
def test_events_for_onchain_secretreveal_typechecks_secret():
channel = factories.create(factories.NettingChannelStateProperties())
block_hash = factories.make_block_hash()
with pytest.raises(ValueError):
events_for_onchain_secretreveal(channel, "This is an invalid secret", 10, block_hash)
|
Add unit tests for transfer/secret_registry
|
Add unit tests for transfer/secret_registry
|
Python
|
mit
|
hackaugusto/raiden,hackaugusto/raiden
|
import pytest
from eth_utils import decode_hex
from raiden.constants import EMPTY_HASH, EMPTY_MERKLE_ROOT
from raiden.transfer.utils import hash_balance_data
@pytest.mark.parametrize(
"values,expected",
(
((0, 0, EMPTY_HASH), bytes(32)),
(
(1, 5, EMPTY_MERKLE_ROOT),
decode_hex("0xc6b26a4554afa01fb3409b3bd6e7605a1c1af45b7e644282c6ebf34eddb6f893"),
),
),
)
def test_hash_balance_data(values, expected):
assert hash_balance_data(values[0], values[1], values[2]) == expected
Add unit tests for transfer/secret_registry
|
import pytest
from eth_utils import decode_hex
from raiden.constants import EMPTY_HASH, EMPTY_MERKLE_ROOT
from raiden.tests.utils import factories
from raiden.transfer.secret_registry import events_for_onchain_secretreveal
from raiden.transfer.state import TransactionExecutionStatus
from raiden.transfer.utils import hash_balance_data
@pytest.mark.parametrize(
"values,expected",
(
((0, 0, EMPTY_HASH), bytes(32)),
(
(1, 5, EMPTY_MERKLE_ROOT),
decode_hex("0xc6b26a4554afa01fb3409b3bd6e7605a1c1af45b7e644282c6ebf34eddb6f893"),
),
),
)
def test_hash_balance_data(values, expected):
assert hash_balance_data(values[0], values[1], values[2]) == expected
def test_events_for_onchain_secretreveal_with_unfit_channels():
settle = factories.TransactionExecutionStatusProperties()
settled = factories.create(factories.NettingChannelStateProperties(settle_transaction=settle))
secret = factories.UNIT_SECRET
block_hash = factories.make_block_hash()
events = events_for_onchain_secretreveal(settled, secret, 10, block_hash)
assert not events, "Secret reveal event should not be generated for settled channel"
settle = factories.replace(settle, result=TransactionExecutionStatus.FAILURE)
unusable = factories.create(factories.NettingChannelStateProperties(settle_transaction=settle))
events = events_for_onchain_secretreveal(unusable, secret, 10, block_hash)
assert not events, "Secret reveal event should not be generated for unusable channel."
def test_events_for_onchain_secretreveal_typechecks_secret():
channel = factories.create(factories.NettingChannelStateProperties())
block_hash = factories.make_block_hash()
with pytest.raises(ValueError):
events_for_onchain_secretreveal(channel, "This is an invalid secret", 10, block_hash)
|
<commit_before>import pytest
from eth_utils import decode_hex
from raiden.constants import EMPTY_HASH, EMPTY_MERKLE_ROOT
from raiden.transfer.utils import hash_balance_data
@pytest.mark.parametrize(
"values,expected",
(
((0, 0, EMPTY_HASH), bytes(32)),
(
(1, 5, EMPTY_MERKLE_ROOT),
decode_hex("0xc6b26a4554afa01fb3409b3bd6e7605a1c1af45b7e644282c6ebf34eddb6f893"),
),
),
)
def test_hash_balance_data(values, expected):
assert hash_balance_data(values[0], values[1], values[2]) == expected
<commit_msg>Add unit tests for transfer/secret_registry<commit_after>
|
import pytest
from eth_utils import decode_hex
from raiden.constants import EMPTY_HASH, EMPTY_MERKLE_ROOT
from raiden.tests.utils import factories
from raiden.transfer.secret_registry import events_for_onchain_secretreveal
from raiden.transfer.state import TransactionExecutionStatus
from raiden.transfer.utils import hash_balance_data
@pytest.mark.parametrize(
"values,expected",
(
((0, 0, EMPTY_HASH), bytes(32)),
(
(1, 5, EMPTY_MERKLE_ROOT),
decode_hex("0xc6b26a4554afa01fb3409b3bd6e7605a1c1af45b7e644282c6ebf34eddb6f893"),
),
),
)
def test_hash_balance_data(values, expected):
assert hash_balance_data(values[0], values[1], values[2]) == expected
def test_events_for_onchain_secretreveal_with_unfit_channels():
settle = factories.TransactionExecutionStatusProperties()
settled = factories.create(factories.NettingChannelStateProperties(settle_transaction=settle))
secret = factories.UNIT_SECRET
block_hash = factories.make_block_hash()
events = events_for_onchain_secretreveal(settled, secret, 10, block_hash)
assert not events, "Secret reveal event should not be generated for settled channel"
settle = factories.replace(settle, result=TransactionExecutionStatus.FAILURE)
unusable = factories.create(factories.NettingChannelStateProperties(settle_transaction=settle))
events = events_for_onchain_secretreveal(unusable, secret, 10, block_hash)
assert not events, "Secret reveal event should not be generated for unusable channel."
def test_events_for_onchain_secretreveal_typechecks_secret():
channel = factories.create(factories.NettingChannelStateProperties())
block_hash = factories.make_block_hash()
with pytest.raises(ValueError):
events_for_onchain_secretreveal(channel, "This is an invalid secret", 10, block_hash)
|
import pytest
from eth_utils import decode_hex
from raiden.constants import EMPTY_HASH, EMPTY_MERKLE_ROOT
from raiden.transfer.utils import hash_balance_data
@pytest.mark.parametrize(
"values,expected",
(
((0, 0, EMPTY_HASH), bytes(32)),
(
(1, 5, EMPTY_MERKLE_ROOT),
decode_hex("0xc6b26a4554afa01fb3409b3bd6e7605a1c1af45b7e644282c6ebf34eddb6f893"),
),
),
)
def test_hash_balance_data(values, expected):
assert hash_balance_data(values[0], values[1], values[2]) == expected
Add unit tests for transfer/secret_registryimport pytest
from eth_utils import decode_hex
from raiden.constants import EMPTY_HASH, EMPTY_MERKLE_ROOT
from raiden.tests.utils import factories
from raiden.transfer.secret_registry import events_for_onchain_secretreveal
from raiden.transfer.state import TransactionExecutionStatus
from raiden.transfer.utils import hash_balance_data
@pytest.mark.parametrize(
"values,expected",
(
((0, 0, EMPTY_HASH), bytes(32)),
(
(1, 5, EMPTY_MERKLE_ROOT),
decode_hex("0xc6b26a4554afa01fb3409b3bd6e7605a1c1af45b7e644282c6ebf34eddb6f893"),
),
),
)
def test_hash_balance_data(values, expected):
assert hash_balance_data(values[0], values[1], values[2]) == expected
def test_events_for_onchain_secretreveal_with_unfit_channels():
settle = factories.TransactionExecutionStatusProperties()
settled = factories.create(factories.NettingChannelStateProperties(settle_transaction=settle))
secret = factories.UNIT_SECRET
block_hash = factories.make_block_hash()
events = events_for_onchain_secretreveal(settled, secret, 10, block_hash)
assert not events, "Secret reveal event should not be generated for settled channel"
settle = factories.replace(settle, result=TransactionExecutionStatus.FAILURE)
unusable = factories.create(factories.NettingChannelStateProperties(settle_transaction=settle))
events = events_for_onchain_secretreveal(unusable, secret, 10, block_hash)
assert not events, "Secret reveal event should not be generated for unusable channel."
def test_events_for_onchain_secretreveal_typechecks_secret():
channel = factories.create(factories.NettingChannelStateProperties())
block_hash = factories.make_block_hash()
with pytest.raises(ValueError):
events_for_onchain_secretreveal(channel, "This is an invalid secret", 10, block_hash)
|
<commit_before>import pytest
from eth_utils import decode_hex
from raiden.constants import EMPTY_HASH, EMPTY_MERKLE_ROOT
from raiden.transfer.utils import hash_balance_data
@pytest.mark.parametrize(
"values,expected",
(
((0, 0, EMPTY_HASH), bytes(32)),
(
(1, 5, EMPTY_MERKLE_ROOT),
decode_hex("0xc6b26a4554afa01fb3409b3bd6e7605a1c1af45b7e644282c6ebf34eddb6f893"),
),
),
)
def test_hash_balance_data(values, expected):
assert hash_balance_data(values[0], values[1], values[2]) == expected
<commit_msg>Add unit tests for transfer/secret_registry<commit_after>import pytest
from eth_utils import decode_hex
from raiden.constants import EMPTY_HASH, EMPTY_MERKLE_ROOT
from raiden.tests.utils import factories
from raiden.transfer.secret_registry import events_for_onchain_secretreveal
from raiden.transfer.state import TransactionExecutionStatus
from raiden.transfer.utils import hash_balance_data
@pytest.mark.parametrize(
"values,expected",
(
((0, 0, EMPTY_HASH), bytes(32)),
(
(1, 5, EMPTY_MERKLE_ROOT),
decode_hex("0xc6b26a4554afa01fb3409b3bd6e7605a1c1af45b7e644282c6ebf34eddb6f893"),
),
),
)
def test_hash_balance_data(values, expected):
assert hash_balance_data(values[0], values[1], values[2]) == expected
def test_events_for_onchain_secretreveal_with_unfit_channels():
settle = factories.TransactionExecutionStatusProperties()
settled = factories.create(factories.NettingChannelStateProperties(settle_transaction=settle))
secret = factories.UNIT_SECRET
block_hash = factories.make_block_hash()
events = events_for_onchain_secretreveal(settled, secret, 10, block_hash)
assert not events, "Secret reveal event should not be generated for settled channel"
settle = factories.replace(settle, result=TransactionExecutionStatus.FAILURE)
unusable = factories.create(factories.NettingChannelStateProperties(settle_transaction=settle))
events = events_for_onchain_secretreveal(unusable, secret, 10, block_hash)
assert not events, "Secret reveal event should not be generated for unusable channel."
def test_events_for_onchain_secretreveal_typechecks_secret():
channel = factories.create(factories.NettingChannelStateProperties())
block_hash = factories.make_block_hash()
with pytest.raises(ValueError):
events_for_onchain_secretreveal(channel, "This is an invalid secret", 10, block_hash)
|
f8ac907837e198ddac3d4ce9c5f72243c89b5ca1
|
config.py
|
config.py
|
host = 'http://mech-ai.appspot.com'
try:
from local_config import * # Override with config-local if exists
except ImportError:
pass
|
import os
host_envs = {
'prod': 'http://mech-ai.appspot.com',
'dev': 'http://127.0.0.1:8080',
}
environment = os.getenv('ENV', 'dev')
host = host_env.get('environment')
username = os.getenv('USER')
access_token = os.getenv('TOKEN')
try:
from local_config import * # Override with local settings if exists
except ImportError:
pass
|
Enable environment variables for settings
|
Enable environment variables for settings
|
Python
|
mit
|
supermitch/mech-ai,supermitch/mech-ai,supermitch/mech-ai
|
host = 'http://mech-ai.appspot.com'
try:
from local_config import * # Override with config-local if exists
except ImportError:
pass
Enable environment variables for settings
|
import os
host_envs = {
'prod': 'http://mech-ai.appspot.com',
'dev': 'http://127.0.0.1:8080',
}
environment = os.getenv('ENV', 'dev')
host = host_env.get('environment')
username = os.getenv('USER')
access_token = os.getenv('TOKEN')
try:
from local_config import * # Override with local settings if exists
except ImportError:
pass
|
<commit_before>host = 'http://mech-ai.appspot.com'
try:
from local_config import * # Override with config-local if exists
except ImportError:
pass
<commit_msg>Enable environment variables for settings<commit_after>
|
import os
host_envs = {
'prod': 'http://mech-ai.appspot.com',
'dev': 'http://127.0.0.1:8080',
}
environment = os.getenv('ENV', 'dev')
host = host_env.get('environment')
username = os.getenv('USER')
access_token = os.getenv('TOKEN')
try:
from local_config import * # Override with local settings if exists
except ImportError:
pass
|
host = 'http://mech-ai.appspot.com'
try:
from local_config import * # Override with config-local if exists
except ImportError:
pass
Enable environment variables for settingsimport os
host_envs = {
'prod': 'http://mech-ai.appspot.com',
'dev': 'http://127.0.0.1:8080',
}
environment = os.getenv('ENV', 'dev')
host = host_env.get('environment')
username = os.getenv('USER')
access_token = os.getenv('TOKEN')
try:
from local_config import * # Override with local settings if exists
except ImportError:
pass
|
<commit_before>host = 'http://mech-ai.appspot.com'
try:
from local_config import * # Override with config-local if exists
except ImportError:
pass
<commit_msg>Enable environment variables for settings<commit_after>import os
host_envs = {
'prod': 'http://mech-ai.appspot.com',
'dev': 'http://127.0.0.1:8080',
}
environment = os.getenv('ENV', 'dev')
host = host_env.get('environment')
username = os.getenv('USER')
access_token = os.getenv('TOKEN')
try:
from local_config import * # Override with local settings if exists
except ImportError:
pass
|
5e8d2e545fee83d942f6837dd43a59e92aad5cdb
|
fto/cli.py
|
fto/cli.py
|
# -*- coding: utf-8 -*-
"""
cli.py
===
User-facing command-line functions for :module:`fto`.
"""
import string
from .fto import print_exercise, MassUnit
def process_input(units='lbs'):
"""Guide user through weight calculations via CLI prompts."""
name = input("Please enter the exercise name: ")\
.strip(string.whitespace)
kgs = input("Kilograms? y or n: ").strip(string.whitespace)
weight = int(input("Enter last week's max weight: "))
week = int(input("Enter current training week: "))
if week == 1:
increment = int(input("How much are we adding? "))
else:
increment = 0
units = MassUnit.kgs if kgs == 'y' else MassUnit.lbs
print_exercise(name, weight, week, units, increment)
if __name__ == '__main__':
process_input()
|
# -*- coding: utf-8 -*-
"""
cli.py
===
User-facing command-line functions for :module:`fto`.
"""
import string
from fto.fto import print_exercise, MassUnit
def process_input(units='lbs'):
"""Guide user through weight calculations via CLI prompts."""
name = input("Please enter the exercise name: ")\
.strip(string.whitespace)
kgs = input("Kilograms? y or n: ").strip(string.whitespace)
weight = int(input("Enter last week's max weight: "))
week = int(input("Enter current training week: "))
if week == 1:
increment = int(input("How much are we adding? "))
else:
increment = 0
units = MassUnit.kgs if kgs == 'y' else MassUnit.lbs
print_exercise(name, weight, week, units, increment)
if __name__ == '__main__':
process_input()
|
Use absolute import for python3
|
Use absolute import for python3
|
Python
|
mit
|
jad-b/Crank
|
# -*- coding: utf-8 -*-
"""
cli.py
===
User-facing command-line functions for :module:`fto`.
"""
import string
from .fto import print_exercise, MassUnit
def process_input(units='lbs'):
"""Guide user through weight calculations via CLI prompts."""
name = input("Please enter the exercise name: ")\
.strip(string.whitespace)
kgs = input("Kilograms? y or n: ").strip(string.whitespace)
weight = int(input("Enter last week's max weight: "))
week = int(input("Enter current training week: "))
if week == 1:
increment = int(input("How much are we adding? "))
else:
increment = 0
units = MassUnit.kgs if kgs == 'y' else MassUnit.lbs
print_exercise(name, weight, week, units, increment)
if __name__ == '__main__':
process_input()
Use absolute import for python3
|
# -*- coding: utf-8 -*-
"""
cli.py
===
User-facing command-line functions for :module:`fto`.
"""
import string
from fto.fto import print_exercise, MassUnit
def process_input(units='lbs'):
"""Guide user through weight calculations via CLI prompts."""
name = input("Please enter the exercise name: ")\
.strip(string.whitespace)
kgs = input("Kilograms? y or n: ").strip(string.whitespace)
weight = int(input("Enter last week's max weight: "))
week = int(input("Enter current training week: "))
if week == 1:
increment = int(input("How much are we adding? "))
else:
increment = 0
units = MassUnit.kgs if kgs == 'y' else MassUnit.lbs
print_exercise(name, weight, week, units, increment)
if __name__ == '__main__':
process_input()
|
<commit_before># -*- coding: utf-8 -*-
"""
cli.py
===
User-facing command-line functions for :module:`fto`.
"""
import string
from .fto import print_exercise, MassUnit
def process_input(units='lbs'):
"""Guide user through weight calculations via CLI prompts."""
name = input("Please enter the exercise name: ")\
.strip(string.whitespace)
kgs = input("Kilograms? y or n: ").strip(string.whitespace)
weight = int(input("Enter last week's max weight: "))
week = int(input("Enter current training week: "))
if week == 1:
increment = int(input("How much are we adding? "))
else:
increment = 0
units = MassUnit.kgs if kgs == 'y' else MassUnit.lbs
print_exercise(name, weight, week, units, increment)
if __name__ == '__main__':
process_input()
<commit_msg>Use absolute import for python3<commit_after>
|
# -*- coding: utf-8 -*-
"""
cli.py
===
User-facing command-line functions for :module:`fto`.
"""
import string
from fto.fto import print_exercise, MassUnit
def process_input(units='lbs'):
"""Guide user through weight calculations via CLI prompts."""
name = input("Please enter the exercise name: ")\
.strip(string.whitespace)
kgs = input("Kilograms? y or n: ").strip(string.whitespace)
weight = int(input("Enter last week's max weight: "))
week = int(input("Enter current training week: "))
if week == 1:
increment = int(input("How much are we adding? "))
else:
increment = 0
units = MassUnit.kgs if kgs == 'y' else MassUnit.lbs
print_exercise(name, weight, week, units, increment)
if __name__ == '__main__':
process_input()
|
# -*- coding: utf-8 -*-
"""
cli.py
===
User-facing command-line functions for :module:`fto`.
"""
import string
from .fto import print_exercise, MassUnit
def process_input(units='lbs'):
"""Guide user through weight calculations via CLI prompts."""
name = input("Please enter the exercise name: ")\
.strip(string.whitespace)
kgs = input("Kilograms? y or n: ").strip(string.whitespace)
weight = int(input("Enter last week's max weight: "))
week = int(input("Enter current training week: "))
if week == 1:
increment = int(input("How much are we adding? "))
else:
increment = 0
units = MassUnit.kgs if kgs == 'y' else MassUnit.lbs
print_exercise(name, weight, week, units, increment)
if __name__ == '__main__':
process_input()
Use absolute import for python3# -*- coding: utf-8 -*-
"""
cli.py
===
User-facing command-line functions for :module:`fto`.
"""
import string
from fto.fto import print_exercise, MassUnit
def process_input(units='lbs'):
"""Guide user through weight calculations via CLI prompts."""
name = input("Please enter the exercise name: ")\
.strip(string.whitespace)
kgs = input("Kilograms? y or n: ").strip(string.whitespace)
weight = int(input("Enter last week's max weight: "))
week = int(input("Enter current training week: "))
if week == 1:
increment = int(input("How much are we adding? "))
else:
increment = 0
units = MassUnit.kgs if kgs == 'y' else MassUnit.lbs
print_exercise(name, weight, week, units, increment)
if __name__ == '__main__':
process_input()
|
<commit_before># -*- coding: utf-8 -*-
"""
cli.py
===
User-facing command-line functions for :module:`fto`.
"""
import string
from .fto import print_exercise, MassUnit
def process_input(units='lbs'):
"""Guide user through weight calculations via CLI prompts."""
name = input("Please enter the exercise name: ")\
.strip(string.whitespace)
kgs = input("Kilograms? y or n: ").strip(string.whitespace)
weight = int(input("Enter last week's max weight: "))
week = int(input("Enter current training week: "))
if week == 1:
increment = int(input("How much are we adding? "))
else:
increment = 0
units = MassUnit.kgs if kgs == 'y' else MassUnit.lbs
print_exercise(name, weight, week, units, increment)
if __name__ == '__main__':
process_input()
<commit_msg>Use absolute import for python3<commit_after># -*- coding: utf-8 -*-
"""
cli.py
===
User-facing command-line functions for :module:`fto`.
"""
import string
from fto.fto import print_exercise, MassUnit
def process_input(units='lbs'):
"""Guide user through weight calculations via CLI prompts."""
name = input("Please enter the exercise name: ")\
.strip(string.whitespace)
kgs = input("Kilograms? y or n: ").strip(string.whitespace)
weight = int(input("Enter last week's max weight: "))
week = int(input("Enter current training week: "))
if week == 1:
increment = int(input("How much are we adding? "))
else:
increment = 0
units = MassUnit.kgs if kgs == 'y' else MassUnit.lbs
print_exercise(name, weight, week, units, increment)
if __name__ == '__main__':
process_input()
|
bc411a7069386196abc6de6ae2314182efbda048
|
avalonstar/apps/subscribers/admin.py
|
avalonstar/apps/subscribers/admin.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['created', 'updated', 'is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'streak', 'is_active', 'is_paid', 'twid']
list_editable = ['is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
|
Add streak to list_display, remove created and updated from list_editable.
|
Add streak to list_display, remove created and updated from list_editable.
|
Python
|
apache-2.0
|
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['created', 'updated', 'is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
Add streak to list_display, remove created and updated from list_editable.
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'streak', 'is_active', 'is_paid', 'twid']
list_editable = ['is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['created', 'updated', 'is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
<commit_msg>Add streak to list_display, remove created and updated from list_editable.<commit_after>
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'streak', 'is_active', 'is_paid', 'twid']
list_editable = ['is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['created', 'updated', 'is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
Add streak to list_display, remove created and updated from list_editable.# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'streak', 'is_active', 'is_paid', 'twid']
list_editable = ['is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['created', 'updated', 'is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
<commit_msg>Add streak to list_display, remove created and updated from list_editable.<commit_after># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'streak', 'is_active', 'is_paid', 'twid']
list_editable = ['is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
|
1bbe01fb9cafcb2effd6e95f40ae5c9872469f08
|
exporter/mailer.py
|
exporter/mailer.py
|
import sendgrid
import config as config
from sendgrid.helpers.mail import Mail, Content
sg = sendgrid.SendGridAPIClient(apikey=config.SENDGRID_API_KEY)
from_mail = sendgrid.Email(config.SENDGRID_FROM_MAIL)
def send_download_link(to, link):
to_mail = sendgrid.Email(to)
content = Content("text/html", "<html> Your download link is <a href='{0}'>{1}</a> </html>".format(link, link))
message = Mail(from_email=from_mail, subject='Your AchSo! video export is ready',
to_email=to_mail, content=content)
resp = sg.client.mail.send.post(request_body=message.get())
return resp
|
import sendgrid
import config as config
from sendgrid.helpers.mail import Mail, Content
sg = sendgrid.SendGridAPIClient(apikey=config.SENDGRID_API_KEY)
from_mail = sendgrid.Email(config.SENDGRID_FROM_MAIL)
def send_download_link(to, link):
to_mail = sendgrid.Email(to)
content = Content("text/html", "<html> <h1>Your videos are ready</h1> Hello! Your download link is <a href='{0}'>{1}</a> </html>".format(link, link))
message = Mail(from_email=from_mail, subject='Your AchSo! video export is ready',
to_email=to_mail, content=content)
resp = sg.client.mail.send.post(request_body=message.get())
return resp
|
Add heading to export mail
|
Add heading to export mail
|
Python
|
mit
|
melonmanchan/achso-video-exporter,melonmanchan/achso-video-exporter
|
import sendgrid
import config as config
from sendgrid.helpers.mail import Mail, Content
sg = sendgrid.SendGridAPIClient(apikey=config.SENDGRID_API_KEY)
from_mail = sendgrid.Email(config.SENDGRID_FROM_MAIL)
def send_download_link(to, link):
to_mail = sendgrid.Email(to)
content = Content("text/html", "<html> Your download link is <a href='{0}'>{1}</a> </html>".format(link, link))
message = Mail(from_email=from_mail, subject='Your AchSo! video export is ready',
to_email=to_mail, content=content)
resp = sg.client.mail.send.post(request_body=message.get())
return resp
Add heading to export mail
|
import sendgrid
import config as config
from sendgrid.helpers.mail import Mail, Content
sg = sendgrid.SendGridAPIClient(apikey=config.SENDGRID_API_KEY)
from_mail = sendgrid.Email(config.SENDGRID_FROM_MAIL)
def send_download_link(to, link):
to_mail = sendgrid.Email(to)
content = Content("text/html", "<html> <h1>Your videos are ready</h1> Hello! Your download link is <a href='{0}'>{1}</a> </html>".format(link, link))
message = Mail(from_email=from_mail, subject='Your AchSo! video export is ready',
to_email=to_mail, content=content)
resp = sg.client.mail.send.post(request_body=message.get())
return resp
|
<commit_before>import sendgrid
import config as config
from sendgrid.helpers.mail import Mail, Content
sg = sendgrid.SendGridAPIClient(apikey=config.SENDGRID_API_KEY)
from_mail = sendgrid.Email(config.SENDGRID_FROM_MAIL)
def send_download_link(to, link):
to_mail = sendgrid.Email(to)
content = Content("text/html", "<html> Your download link is <a href='{0}'>{1}</a> </html>".format(link, link))
message = Mail(from_email=from_mail, subject='Your AchSo! video export is ready',
to_email=to_mail, content=content)
resp = sg.client.mail.send.post(request_body=message.get())
return resp
<commit_msg>Add heading to export mail<commit_after>
|
import sendgrid
import config as config
from sendgrid.helpers.mail import Mail, Content
sg = sendgrid.SendGridAPIClient(apikey=config.SENDGRID_API_KEY)
from_mail = sendgrid.Email(config.SENDGRID_FROM_MAIL)
def send_download_link(to, link):
to_mail = sendgrid.Email(to)
content = Content("text/html", "<html> <h1>Your videos are ready</h1> Hello! Your download link is <a href='{0}'>{1}</a> </html>".format(link, link))
message = Mail(from_email=from_mail, subject='Your AchSo! video export is ready',
to_email=to_mail, content=content)
resp = sg.client.mail.send.post(request_body=message.get())
return resp
|
import sendgrid
import config as config
from sendgrid.helpers.mail import Mail, Content
sg = sendgrid.SendGridAPIClient(apikey=config.SENDGRID_API_KEY)
from_mail = sendgrid.Email(config.SENDGRID_FROM_MAIL)
def send_download_link(to, link):
to_mail = sendgrid.Email(to)
content = Content("text/html", "<html> Your download link is <a href='{0}'>{1}</a> </html>".format(link, link))
message = Mail(from_email=from_mail, subject='Your AchSo! video export is ready',
to_email=to_mail, content=content)
resp = sg.client.mail.send.post(request_body=message.get())
return resp
Add heading to export mailimport sendgrid
import config as config
from sendgrid.helpers.mail import Mail, Content
sg = sendgrid.SendGridAPIClient(apikey=config.SENDGRID_API_KEY)
from_mail = sendgrid.Email(config.SENDGRID_FROM_MAIL)
def send_download_link(to, link):
to_mail = sendgrid.Email(to)
content = Content("text/html", "<html> <h1>Your videos are ready</h1> Hello! Your download link is <a href='{0}'>{1}</a> </html>".format(link, link))
message = Mail(from_email=from_mail, subject='Your AchSo! video export is ready',
to_email=to_mail, content=content)
resp = sg.client.mail.send.post(request_body=message.get())
return resp
|
<commit_before>import sendgrid
import config as config
from sendgrid.helpers.mail import Mail, Content
sg = sendgrid.SendGridAPIClient(apikey=config.SENDGRID_API_KEY)
from_mail = sendgrid.Email(config.SENDGRID_FROM_MAIL)
def send_download_link(to, link):
to_mail = sendgrid.Email(to)
content = Content("text/html", "<html> Your download link is <a href='{0}'>{1}</a> </html>".format(link, link))
message = Mail(from_email=from_mail, subject='Your AchSo! video export is ready',
to_email=to_mail, content=content)
resp = sg.client.mail.send.post(request_body=message.get())
return resp
<commit_msg>Add heading to export mail<commit_after>import sendgrid
import config as config
from sendgrid.helpers.mail import Mail, Content
sg = sendgrid.SendGridAPIClient(apikey=config.SENDGRID_API_KEY)
from_mail = sendgrid.Email(config.SENDGRID_FROM_MAIL)
def send_download_link(to, link):
to_mail = sendgrid.Email(to)
content = Content("text/html", "<html> <h1>Your videos are ready</h1> Hello! Your download link is <a href='{0}'>{1}</a> </html>".format(link, link))
message = Mail(from_email=from_mail, subject='Your AchSo! video export is ready',
to_email=to_mail, content=content)
resp = sg.client.mail.send.post(request_body=message.get())
return resp
|
b0aa167c0d16b5262eceed9ff2af43643a987d47
|
learning_journal/models.py
|
learning_journal/models.py
|
import datetime
import psycopg2
from sqlalchemy import (
Column,
DateTime,
Integer,
Unicode,
UnicodeText,
)
from pyramid.security import Allow, Everyone
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class Entry(Base):
"""Our Journal Entry class."""
__tablename__ = 'entries'
id = Column(Integer, primary_key=True)
title = Column(Unicode(128), unique=True)
text = Column(UnicodeText)
created = Column(DateTime, default=datetime.datetime.utcnow)
@property
def __acl__(self):
"""Add permissions for specific instance of Entry object.
self.author.username is the user who created this Entry instance."""
return [
(Allow, Everyone, 'view'),
(Allow, self.author.username, 'edit')
]
|
import datetime
import psycopg2
from sqlalchemy import (
Column,
DateTime,
Integer,
Unicode,
UnicodeText,
ForeignKey,
)
from pyramid.security import Allow, Everyone
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relationship,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class Entry(Base):
"""Our Journal Entry class."""
__tablename__ = 'entries'
id = Column(Integer, primary_key=True)
title = Column(Unicode(128), unique=True)
text = Column(UnicodeText)
created = Column(DateTime, default=datetime.datetime.utcnow)
author_id = Column(Integer, ForeignKey('users.id'))
#Ties User model to Entry model
author = relationship('User', back_populates='entries')
@property
def __acl__(self):
"""Add permissions for specific instance of Entry object.
self.author.username is the user who created this Entry instance.
"""
return [
(Allow, Everyone, 'view'),
(Allow, self.author.username, 'edit')
]
|
Add bridge between Entry and User(which will be created on the next commit)
|
Add bridge between Entry and User(which will be created on the next commit)
|
Python
|
mit
|
DZwell/learning_journal,DZwell/learning_journal,DZwell/learning_journal
|
import datetime
import psycopg2
from sqlalchemy import (
Column,
DateTime,
Integer,
Unicode,
UnicodeText,
)
from pyramid.security import Allow, Everyone
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class Entry(Base):
"""Our Journal Entry class."""
__tablename__ = 'entries'
id = Column(Integer, primary_key=True)
title = Column(Unicode(128), unique=True)
text = Column(UnicodeText)
created = Column(DateTime, default=datetime.datetime.utcnow)
@property
def __acl__(self):
"""Add permissions for specific instance of Entry object.
self.author.username is the user who created this Entry instance."""
return [
(Allow, Everyone, 'view'),
(Allow, self.author.username, 'edit')
]
Add bridge between Entry and User(which will be created on the next commit)
|
import datetime
import psycopg2
from sqlalchemy import (
Column,
DateTime,
Integer,
Unicode,
UnicodeText,
ForeignKey,
)
from pyramid.security import Allow, Everyone
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relationship,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class Entry(Base):
"""Our Journal Entry class."""
__tablename__ = 'entries'
id = Column(Integer, primary_key=True)
title = Column(Unicode(128), unique=True)
text = Column(UnicodeText)
created = Column(DateTime, default=datetime.datetime.utcnow)
author_id = Column(Integer, ForeignKey('users.id'))
#Ties User model to Entry model
author = relationship('User', back_populates='entries')
@property
def __acl__(self):
"""Add permissions for specific instance of Entry object.
self.author.username is the user who created this Entry instance.
"""
return [
(Allow, Everyone, 'view'),
(Allow, self.author.username, 'edit')
]
|
<commit_before>import datetime
import psycopg2
from sqlalchemy import (
Column,
DateTime,
Integer,
Unicode,
UnicodeText,
)
from pyramid.security import Allow, Everyone
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class Entry(Base):
"""Our Journal Entry class."""
__tablename__ = 'entries'
id = Column(Integer, primary_key=True)
title = Column(Unicode(128), unique=True)
text = Column(UnicodeText)
created = Column(DateTime, default=datetime.datetime.utcnow)
@property
def __acl__(self):
"""Add permissions for specific instance of Entry object.
self.author.username is the user who created this Entry instance."""
return [
(Allow, Everyone, 'view'),
(Allow, self.author.username, 'edit')
]
<commit_msg>Add bridge between Entry and User(which will be created on the next commit)<commit_after>
|
import datetime
import psycopg2
from sqlalchemy import (
Column,
DateTime,
Integer,
Unicode,
UnicodeText,
ForeignKey,
)
from pyramid.security import Allow, Everyone
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relationship,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class Entry(Base):
"""Our Journal Entry class."""
__tablename__ = 'entries'
id = Column(Integer, primary_key=True)
title = Column(Unicode(128), unique=True)
text = Column(UnicodeText)
created = Column(DateTime, default=datetime.datetime.utcnow)
author_id = Column(Integer, ForeignKey('users.id'))
#Ties User model to Entry model
author = relationship('User', back_populates='entries')
@property
def __acl__(self):
"""Add permissions for specific instance of Entry object.
self.author.username is the user who created this Entry instance.
"""
return [
(Allow, Everyone, 'view'),
(Allow, self.author.username, 'edit')
]
|
import datetime
import psycopg2
from sqlalchemy import (
Column,
DateTime,
Integer,
Unicode,
UnicodeText,
)
from pyramid.security import Allow, Everyone
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class Entry(Base):
"""Our Journal Entry class."""
__tablename__ = 'entries'
id = Column(Integer, primary_key=True)
title = Column(Unicode(128), unique=True)
text = Column(UnicodeText)
created = Column(DateTime, default=datetime.datetime.utcnow)
@property
def __acl__(self):
"""Add permissions for specific instance of Entry object.
self.author.username is the user who created this Entry instance."""
return [
(Allow, Everyone, 'view'),
(Allow, self.author.username, 'edit')
]
Add bridge between Entry and User(which will be created on the next commit)import datetime
import psycopg2
from sqlalchemy import (
Column,
DateTime,
Integer,
Unicode,
UnicodeText,
ForeignKey,
)
from pyramid.security import Allow, Everyone
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relationship,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class Entry(Base):
"""Our Journal Entry class."""
__tablename__ = 'entries'
id = Column(Integer, primary_key=True)
title = Column(Unicode(128), unique=True)
text = Column(UnicodeText)
created = Column(DateTime, default=datetime.datetime.utcnow)
author_id = Column(Integer, ForeignKey('users.id'))
#Ties User model to Entry model
author = relationship('User', back_populates='entries')
@property
def __acl__(self):
"""Add permissions for specific instance of Entry object.
self.author.username is the user who created this Entry instance.
"""
return [
(Allow, Everyone, 'view'),
(Allow, self.author.username, 'edit')
]
|
<commit_before>import datetime
import psycopg2
from sqlalchemy import (
Column,
DateTime,
Integer,
Unicode,
UnicodeText,
)
from pyramid.security import Allow, Everyone
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class Entry(Base):
"""Our Journal Entry class."""
__tablename__ = 'entries'
id = Column(Integer, primary_key=True)
title = Column(Unicode(128), unique=True)
text = Column(UnicodeText)
created = Column(DateTime, default=datetime.datetime.utcnow)
@property
def __acl__(self):
"""Add permissions for specific instance of Entry object.
self.author.username is the user who created this Entry instance."""
return [
(Allow, Everyone, 'view'),
(Allow, self.author.username, 'edit')
]
<commit_msg>Add bridge between Entry and User(which will be created on the next commit)<commit_after>import datetime
import psycopg2
from sqlalchemy import (
Column,
DateTime,
Integer,
Unicode,
UnicodeText,
ForeignKey,
)
from pyramid.security import Allow, Everyone
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relationship,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
class Entry(Base):
"""Our Journal Entry class."""
__tablename__ = 'entries'
id = Column(Integer, primary_key=True)
title = Column(Unicode(128), unique=True)
text = Column(UnicodeText)
created = Column(DateTime, default=datetime.datetime.utcnow)
author_id = Column(Integer, ForeignKey('users.id'))
#Ties User model to Entry model
author = relationship('User', back_populates='entries')
@property
def __acl__(self):
"""Add permissions for specific instance of Entry object.
self.author.username is the user who created this Entry instance.
"""
return [
(Allow, Everyone, 'view'),
(Allow, self.author.username, 'edit')
]
|
e8ee7ad6e2560a4fd0ca287adc55155f066eb815
|
akanda/horizon/routers/views.py
|
akanda/horizon/routers/views.py
|
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.neutron.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
Remove wrong reference to quantum
|
Remove wrong reference to quantum
Change-Id: Ic3d8b26e061e85c1d128a79b115fd2da4412e705
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com>
|
Python
|
apache-2.0
|
dreamhost/akanda-horizon,dreamhost/akanda-horizon
|
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
Remove wrong reference to quantum
Change-Id: Ic3d8b26e061e85c1d128a79b115fd2da4412e705
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com>
|
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.neutron.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
<commit_before>from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
<commit_msg>Remove wrong reference to quantum
Change-Id: Ic3d8b26e061e85c1d128a79b115fd2da4412e705
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com><commit_after>
|
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.neutron.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
Remove wrong reference to quantum
Change-Id: Ic3d8b26e061e85c1d128a79b115fd2da4412e705
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com>from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.neutron.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
<commit_before>from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
<commit_msg>Remove wrong reference to quantum
Change-Id: Ic3d8b26e061e85c1d128a79b115fd2da4412e705
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com><commit_after>from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.neutron.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
35bb090dd926d4327fa046ee2da64c4cb5b38a47
|
app/notify_client/email_branding_client.py
|
app/notify_client/email_branding_client.py
|
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self, sort_key=None):
brandings = self.get(url="/email-branding")["email_branding"]
if sort_key and sort_key in brandings[0]:
brandings.sort(key=lambda branding: branding[sort_key].lower())
return brandings
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self):
return self.get(url="/email-branding")["email_branding"]
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
Remove old way of sorting
|
Remove old way of sorting
This is redundant since the model layer has built-in sorting.
It’s also not a good separation of concerns for something presentational
(sort order) to be in the API client layer.
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self, sort_key=None):
brandings = self.get(url="/email-branding")["email_branding"]
if sort_key and sort_key in brandings[0]:
brandings.sort(key=lambda branding: branding[sort_key].lower())
return brandings
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
Remove old way of sorting
This is redundant since the model layer has built-in sorting.
It’s also not a good separation of concerns for something presentational
(sort order) to be in the API client layer.
|
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self):
return self.get(url="/email-branding")["email_branding"]
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
<commit_before>from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self, sort_key=None):
brandings = self.get(url="/email-branding")["email_branding"]
if sort_key and sort_key in brandings[0]:
brandings.sort(key=lambda branding: branding[sort_key].lower())
return brandings
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
<commit_msg>Remove old way of sorting
This is redundant since the model layer has built-in sorting.
It’s also not a good separation of concerns for something presentational
(sort order) to be in the API client layer.<commit_after>
|
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self):
return self.get(url="/email-branding")["email_branding"]
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self, sort_key=None):
brandings = self.get(url="/email-branding")["email_branding"]
if sort_key and sort_key in brandings[0]:
brandings.sort(key=lambda branding: branding[sort_key].lower())
return brandings
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
Remove old way of sorting
This is redundant since the model layer has built-in sorting.
It’s also not a good separation of concerns for something presentational
(sort order) to be in the API client layer.from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self):
return self.get(url="/email-branding")["email_branding"]
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
<commit_before>from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self, sort_key=None):
brandings = self.get(url="/email-branding")["email_branding"]
if sort_key and sort_key in brandings[0]:
brandings.sort(key=lambda branding: branding[sort_key].lower())
return brandings
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
<commit_msg>Remove old way of sorting
This is redundant since the model layer has built-in sorting.
It’s also not a good separation of concerns for something presentational
(sort order) to be in the API client layer.<commit_after>from app.notify_client import NotifyAdminAPIClient, cache
class EmailBrandingClient(NotifyAdminAPIClient):
@cache.set("email_branding-{branding_id}")
def get_email_branding(self, branding_id):
return self.get(url="/email-branding/{}".format(branding_id))
@cache.set("email_branding")
def get_all_email_branding(self):
return self.get(url="/email-branding")["email_branding"]
@cache.delete("email_branding")
def create_email_branding(self, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding", data=data)
@cache.delete("email_branding")
@cache.delete("email_branding-{branding_id}")
def update_email_branding(self, branding_id, logo, name, text, colour, brand_type):
data = {"logo": logo, "name": name, "text": text, "colour": colour, "brand_type": brand_type}
return self.post(url="/email-branding/{}".format(branding_id), data=data)
email_branding_client = EmailBrandingClient()
|
6c3a1d011ff51b99a7975ef186cff042aea086d4
|
poller.py
|
poller.py
|
#!/usr/bin/env python
import urllib2
import ssl
# Define the sites we want to poll and the timeout.
SITES = (
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
)
TIMEOUT = 5
try:
import gntp.notifier
notification = gntp.notifier.mini
except ImportError:
try:
import pygtk
pygtk.require('2.0')
import pynotify
def gtk_out(data):
pynotify.Notification(
'Koodilehto Service Error',
data
)
notification = gtk_out
except ImportError:
def out(data):
print data
notification = out
def poll(sites, timeout, ok, error):
"""Checks if the given URLs are online."""
for site in sites:
ok('Polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
code = str(e.code) if hasattr(e, 'code') else ''
error(site + ' ' + code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
except Exception as e:
error(site + ' ' + e.message)
else:
ok('OK')
def empty(data):
pass
if __name__ == '__main__':
poll(SITES, timeout=TIMEOUT, ok=empty, error=notification)
|
#!/usr/bin/env python
import urllib2
import ssl
# Define the sites we want to poll and the timeout.
SITES = (
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
)
TIMEOUT = 5
try:
import gntp.notifier
notification = gntp.notifier.mini
except ImportError:
try:
import pygtk
pygtk.require('2.0')
import pynotify
def gtk_out(data):
n = pynotify.Notification(
'Koodilehto Service Error',
data
)
n.show()
notification = gtk_out
except ImportError:
def out(data):
print data
notification = out
def poll(sites, timeout, ok, error):
"""Checks if the given URLs are online."""
for site in sites:
ok('Polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
code = str(e.code) if hasattr(e, 'code') else ''
error(site + ' ' + code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
except Exception as e:
error(site + ' ' + e.message)
else:
ok('OK')
def empty(data):
pass
if __name__ == '__main__':
poll(SITES, timeout=TIMEOUT, ok=empty, error=notification)
|
Add call to show at pynotify
|
Add call to show at pynotify
|
Python
|
mit
|
koodilehto/website-poller,koodilehto/website-poller
|
#!/usr/bin/env python
import urllib2
import ssl
# Define the sites we want to poll and the timeout.
SITES = (
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
)
TIMEOUT = 5
try:
import gntp.notifier
notification = gntp.notifier.mini
except ImportError:
try:
import pygtk
pygtk.require('2.0')
import pynotify
def gtk_out(data):
pynotify.Notification(
'Koodilehto Service Error',
data
)
notification = gtk_out
except ImportError:
def out(data):
print data
notification = out
def poll(sites, timeout, ok, error):
"""Checks if the given URLs are online."""
for site in sites:
ok('Polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
code = str(e.code) if hasattr(e, 'code') else ''
error(site + ' ' + code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
except Exception as e:
error(site + ' ' + e.message)
else:
ok('OK')
def empty(data):
pass
if __name__ == '__main__':
poll(SITES, timeout=TIMEOUT, ok=empty, error=notification)
Add call to show at pynotify
|
#!/usr/bin/env python
import urllib2
import ssl
# Define the sites we want to poll and the timeout.
SITES = (
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
)
TIMEOUT = 5
try:
import gntp.notifier
notification = gntp.notifier.mini
except ImportError:
try:
import pygtk
pygtk.require('2.0')
import pynotify
def gtk_out(data):
n = pynotify.Notification(
'Koodilehto Service Error',
data
)
n.show()
notification = gtk_out
except ImportError:
def out(data):
print data
notification = out
def poll(sites, timeout, ok, error):
"""Checks if the given URLs are online."""
for site in sites:
ok('Polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
code = str(e.code) if hasattr(e, 'code') else ''
error(site + ' ' + code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
except Exception as e:
error(site + ' ' + e.message)
else:
ok('OK')
def empty(data):
pass
if __name__ == '__main__':
poll(SITES, timeout=TIMEOUT, ok=empty, error=notification)
|
<commit_before>#!/usr/bin/env python
import urllib2
import ssl
# Define the sites we want to poll and the timeout.
SITES = (
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
)
TIMEOUT = 5
try:
import gntp.notifier
notification = gntp.notifier.mini
except ImportError:
try:
import pygtk
pygtk.require('2.0')
import pynotify
def gtk_out(data):
pynotify.Notification(
'Koodilehto Service Error',
data
)
notification = gtk_out
except ImportError:
def out(data):
print data
notification = out
def poll(sites, timeout, ok, error):
"""Checks if the given URLs are online."""
for site in sites:
ok('Polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
code = str(e.code) if hasattr(e, 'code') else ''
error(site + ' ' + code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
except Exception as e:
error(site + ' ' + e.message)
else:
ok('OK')
def empty(data):
pass
if __name__ == '__main__':
poll(SITES, timeout=TIMEOUT, ok=empty, error=notification)
<commit_msg>Add call to show at pynotify<commit_after>
|
#!/usr/bin/env python
import urllib2
import ssl
# Define the sites we want to poll and the timeout.
SITES = (
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
)
TIMEOUT = 5
try:
import gntp.notifier
notification = gntp.notifier.mini
except ImportError:
try:
import pygtk
pygtk.require('2.0')
import pynotify
def gtk_out(data):
n = pynotify.Notification(
'Koodilehto Service Error',
data
)
n.show()
notification = gtk_out
except ImportError:
def out(data):
print data
notification = out
def poll(sites, timeout, ok, error):
"""Checks if the given URLs are online."""
for site in sites:
ok('Polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
code = str(e.code) if hasattr(e, 'code') else ''
error(site + ' ' + code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
except Exception as e:
error(site + ' ' + e.message)
else:
ok('OK')
def empty(data):
pass
if __name__ == '__main__':
poll(SITES, timeout=TIMEOUT, ok=empty, error=notification)
|
#!/usr/bin/env python
import urllib2
import ssl
# Define the sites we want to poll and the timeout.
SITES = (
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
)
TIMEOUT = 5
try:
import gntp.notifier
notification = gntp.notifier.mini
except ImportError:
try:
import pygtk
pygtk.require('2.0')
import pynotify
def gtk_out(data):
pynotify.Notification(
'Koodilehto Service Error',
data
)
notification = gtk_out
except ImportError:
def out(data):
print data
notification = out
def poll(sites, timeout, ok, error):
"""Checks if the given URLs are online."""
for site in sites:
ok('Polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
code = str(e.code) if hasattr(e, 'code') else ''
error(site + ' ' + code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
except Exception as e:
error(site + ' ' + e.message)
else:
ok('OK')
def empty(data):
pass
if __name__ == '__main__':
poll(SITES, timeout=TIMEOUT, ok=empty, error=notification)
Add call to show at pynotify#!/usr/bin/env python
import urllib2
import ssl
# Define the sites we want to poll and the timeout.
SITES = (
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
)
TIMEOUT = 5
try:
import gntp.notifier
notification = gntp.notifier.mini
except ImportError:
try:
import pygtk
pygtk.require('2.0')
import pynotify
def gtk_out(data):
n = pynotify.Notification(
'Koodilehto Service Error',
data
)
n.show()
notification = gtk_out
except ImportError:
def out(data):
print data
notification = out
def poll(sites, timeout, ok, error):
"""Checks if the given URLs are online."""
for site in sites:
ok('Polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
code = str(e.code) if hasattr(e, 'code') else ''
error(site + ' ' + code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
except Exception as e:
error(site + ' ' + e.message)
else:
ok('OK')
def empty(data):
pass
if __name__ == '__main__':
poll(SITES, timeout=TIMEOUT, ok=empty, error=notification)
|
<commit_before>#!/usr/bin/env python
import urllib2
import ssl
# Define the sites we want to poll and the timeout.
SITES = (
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
)
TIMEOUT = 5
try:
import gntp.notifier
notification = gntp.notifier.mini
except ImportError:
try:
import pygtk
pygtk.require('2.0')
import pynotify
def gtk_out(data):
pynotify.Notification(
'Koodilehto Service Error',
data
)
notification = gtk_out
except ImportError:
def out(data):
print data
notification = out
def poll(sites, timeout, ok, error):
"""Checks if the given URLs are online."""
for site in sites:
ok('Polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
code = str(e.code) if hasattr(e, 'code') else ''
error(site + ' ' + code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
except Exception as e:
error(site + ' ' + e.message)
else:
ok('OK')
def empty(data):
pass
if __name__ == '__main__':
poll(SITES, timeout=TIMEOUT, ok=empty, error=notification)
<commit_msg>Add call to show at pynotify<commit_after>#!/usr/bin/env python
import urllib2
import ssl
# Define the sites we want to poll and the timeout.
SITES = (
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
)
TIMEOUT = 5
try:
import gntp.notifier
notification = gntp.notifier.mini
except ImportError:
try:
import pygtk
pygtk.require('2.0')
import pynotify
def gtk_out(data):
n = pynotify.Notification(
'Koodilehto Service Error',
data
)
n.show()
notification = gtk_out
except ImportError:
def out(data):
print data
notification = out
def poll(sites, timeout, ok, error):
"""Checks if the given URLs are online."""
for site in sites:
ok('Polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
code = str(e.code) if hasattr(e, 'code') else ''
error(site + ' ' + code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
except Exception as e:
error(site + ' ' + e.message)
else:
ok('OK')
def empty(data):
pass
if __name__ == '__main__':
poll(SITES, timeout=TIMEOUT, ok=empty, error=notification)
|
7f24d458d4953542ad481920642016f482978009
|
pyautoupdate/_file_glob.py
|
pyautoupdate/_file_glob.py
|
import glob
import shutil
import os
if os.name == "nt": # pragma: no branch
from .ntcommonpath import commonpath
else:
from .posixcommonpath import commonpath
# def move_glob(src,dst):
# """Moves files from src to dest.
# src may be any glob to recognize files. dst must be a folder.
# """
# for obj in glob.iglob(src):
# shutil.move(obj,dst)
def copy_glob(src,dst):
"""Copies files from src to dest.
src may be any glob to recognize files. dst must be a folder.
"""
for obj in glob.iglob(src):
if os.path.isdir(obj):
start_part=commonpath([src,obj])
end_part=os.path.relpath(obj,start_part)
ctree_dst=os.path.join(dst,end_part)
if not os.path.isdir(ctree_dst):
shutil.copytree(obj,ctree_dst)
else:
copy_glob(os.path.join(obj,"*"),ctree_dst)
else:
shutil.copy2(obj,dst)
|
import glob
import shutil
import os
if os.name == "nt": # pragma: no branch
from .ntcommonpath import commonpath
else: # pragma: no branch
from .posixcommonpath import commonpath
# def move_glob(src,dst):
# """Moves files from src to dest.
# src may be any glob to recognize files. dst must be a folder.
# """
# for obj in glob.iglob(src):
# shutil.move(obj,dst)
def copy_glob(src, dst):
"""Copies files from src to dest.
src may be any glob to recognize files. dst must be a folder.
"""
for obj in glob.iglob(src):
if os.path.isdir(obj):
start_part = commonpath([src, obj])
end_part = os.path.relpath(obj, start_part)
ctree_dst = os.path.join(dst, end_part)
if not os.path.isdir(ctree_dst):
shutil.copytree(obj, ctree_dst)
else:
copy_glob(os.path.join(obj, "*"), ctree_dst)
else:
shutil.copy2(obj, dst)
|
Fix whitespace in file_glob and attempt coverage modification
|
Fix whitespace in file_glob and attempt coverage modification
|
Python
|
lgpl-2.1
|
rlee287/pyautoupdate,rlee287/pyautoupdate
|
import glob
import shutil
import os
if os.name == "nt": # pragma: no branch
from .ntcommonpath import commonpath
else:
from .posixcommonpath import commonpath
# def move_glob(src,dst):
# """Moves files from src to dest.
# src may be any glob to recognize files. dst must be a folder.
# """
# for obj in glob.iglob(src):
# shutil.move(obj,dst)
def copy_glob(src,dst):
"""Copies files from src to dest.
src may be any glob to recognize files. dst must be a folder.
"""
for obj in glob.iglob(src):
if os.path.isdir(obj):
start_part=commonpath([src,obj])
end_part=os.path.relpath(obj,start_part)
ctree_dst=os.path.join(dst,end_part)
if not os.path.isdir(ctree_dst):
shutil.copytree(obj,ctree_dst)
else:
copy_glob(os.path.join(obj,"*"),ctree_dst)
else:
shutil.copy2(obj,dst)
Fix whitespace in file_glob and attempt coverage modification
|
import glob
import shutil
import os
if os.name == "nt": # pragma: no branch
from .ntcommonpath import commonpath
else: # pragma: no branch
from .posixcommonpath import commonpath
# def move_glob(src,dst):
# """Moves files from src to dest.
# src may be any glob to recognize files. dst must be a folder.
# """
# for obj in glob.iglob(src):
# shutil.move(obj,dst)
def copy_glob(src, dst):
"""Copies files from src to dest.
src may be any glob to recognize files. dst must be a folder.
"""
for obj in glob.iglob(src):
if os.path.isdir(obj):
start_part = commonpath([src, obj])
end_part = os.path.relpath(obj, start_part)
ctree_dst = os.path.join(dst, end_part)
if not os.path.isdir(ctree_dst):
shutil.copytree(obj, ctree_dst)
else:
copy_glob(os.path.join(obj, "*"), ctree_dst)
else:
shutil.copy2(obj, dst)
|
<commit_before>import glob
import shutil
import os
if os.name == "nt": # pragma: no branch
from .ntcommonpath import commonpath
else:
from .posixcommonpath import commonpath
# def move_glob(src,dst):
# """Moves files from src to dest.
# src may be any glob to recognize files. dst must be a folder.
# """
# for obj in glob.iglob(src):
# shutil.move(obj,dst)
def copy_glob(src,dst):
"""Copies files from src to dest.
src may be any glob to recognize files. dst must be a folder.
"""
for obj in glob.iglob(src):
if os.path.isdir(obj):
start_part=commonpath([src,obj])
end_part=os.path.relpath(obj,start_part)
ctree_dst=os.path.join(dst,end_part)
if not os.path.isdir(ctree_dst):
shutil.copytree(obj,ctree_dst)
else:
copy_glob(os.path.join(obj,"*"),ctree_dst)
else:
shutil.copy2(obj,dst)
<commit_msg>Fix whitespace in file_glob and attempt coverage modification<commit_after>
|
import glob
import shutil
import os
if os.name == "nt": # pragma: no branch
from .ntcommonpath import commonpath
else: # pragma: no branch
from .posixcommonpath import commonpath
# def move_glob(src,dst):
# """Moves files from src to dest.
# src may be any glob to recognize files. dst must be a folder.
# """
# for obj in glob.iglob(src):
# shutil.move(obj,dst)
def copy_glob(src, dst):
"""Copies files from src to dest.
src may be any glob to recognize files. dst must be a folder.
"""
for obj in glob.iglob(src):
if os.path.isdir(obj):
start_part = commonpath([src, obj])
end_part = os.path.relpath(obj, start_part)
ctree_dst = os.path.join(dst, end_part)
if not os.path.isdir(ctree_dst):
shutil.copytree(obj, ctree_dst)
else:
copy_glob(os.path.join(obj, "*"), ctree_dst)
else:
shutil.copy2(obj, dst)
|
import glob
import shutil
import os
if os.name == "nt": # pragma: no branch
from .ntcommonpath import commonpath
else:
from .posixcommonpath import commonpath
# def move_glob(src,dst):
# """Moves files from src to dest.
# src may be any glob to recognize files. dst must be a folder.
# """
# for obj in glob.iglob(src):
# shutil.move(obj,dst)
def copy_glob(src,dst):
"""Copies files from src to dest.
src may be any glob to recognize files. dst must be a folder.
"""
for obj in glob.iglob(src):
if os.path.isdir(obj):
start_part=commonpath([src,obj])
end_part=os.path.relpath(obj,start_part)
ctree_dst=os.path.join(dst,end_part)
if not os.path.isdir(ctree_dst):
shutil.copytree(obj,ctree_dst)
else:
copy_glob(os.path.join(obj,"*"),ctree_dst)
else:
shutil.copy2(obj,dst)
Fix whitespace in file_glob and attempt coverage modificationimport glob
import shutil
import os
if os.name == "nt": # pragma: no branch
from .ntcommonpath import commonpath
else: # pragma: no branch
from .posixcommonpath import commonpath
# def move_glob(src,dst):
# """Moves files from src to dest.
# src may be any glob to recognize files. dst must be a folder.
# """
# for obj in glob.iglob(src):
# shutil.move(obj,dst)
def copy_glob(src, dst):
"""Copies files from src to dest.
src may be any glob to recognize files. dst must be a folder.
"""
for obj in glob.iglob(src):
if os.path.isdir(obj):
start_part = commonpath([src, obj])
end_part = os.path.relpath(obj, start_part)
ctree_dst = os.path.join(dst, end_part)
if not os.path.isdir(ctree_dst):
shutil.copytree(obj, ctree_dst)
else:
copy_glob(os.path.join(obj, "*"), ctree_dst)
else:
shutil.copy2(obj, dst)
|
<commit_before>import glob
import shutil
import os
if os.name == "nt": # pragma: no branch
from .ntcommonpath import commonpath
else:
from .posixcommonpath import commonpath
# def move_glob(src,dst):
# """Moves files from src to dest.
# src may be any glob to recognize files. dst must be a folder.
# """
# for obj in glob.iglob(src):
# shutil.move(obj,dst)
def copy_glob(src,dst):
"""Copies files from src to dest.
src may be any glob to recognize files. dst must be a folder.
"""
for obj in glob.iglob(src):
if os.path.isdir(obj):
start_part=commonpath([src,obj])
end_part=os.path.relpath(obj,start_part)
ctree_dst=os.path.join(dst,end_part)
if not os.path.isdir(ctree_dst):
shutil.copytree(obj,ctree_dst)
else:
copy_glob(os.path.join(obj,"*"),ctree_dst)
else:
shutil.copy2(obj,dst)
<commit_msg>Fix whitespace in file_glob and attempt coverage modification<commit_after>import glob
import shutil
import os
if os.name == "nt": # pragma: no branch
from .ntcommonpath import commonpath
else: # pragma: no branch
from .posixcommonpath import commonpath
# def move_glob(src,dst):
# """Moves files from src to dest.
# src may be any glob to recognize files. dst must be a folder.
# """
# for obj in glob.iglob(src):
# shutil.move(obj,dst)
def copy_glob(src, dst):
"""Copies files from src to dest.
src may be any glob to recognize files. dst must be a folder.
"""
for obj in glob.iglob(src):
if os.path.isdir(obj):
start_part = commonpath([src, obj])
end_part = os.path.relpath(obj, start_part)
ctree_dst = os.path.join(dst, end_part)
if not os.path.isdir(ctree_dst):
shutil.copytree(obj, ctree_dst)
else:
copy_glob(os.path.join(obj, "*"), ctree_dst)
else:
shutil.copy2(obj, dst)
|
eb674c9bd91ff1c8baf95ad440d4a3897b2a030d
|
magpie/main.py
|
magpie/main.py
|
# -*- coding: utf-8 -*-
import nltk
from hazm import word_tokenize
from hazm.HamshahriReader import HamshahriReader
import config
def doc_features(doc, dist_words):
words_set = set(word_tokenize(doc['text']))
features = {}
for word in dist_words:
features['contains(%s)' % word] = (word in words_set)
return features
if __name__ == '__main__':
rd = HamshahriReader(config.corpora_root)
docs = [doc for doc in rd.docs()]
all_words = []
for doc in docs:
all_words.extend(word_tokenize(doc['text']))
dist = nltk.FreqDist(word for word in all_words)
word_features = dist.keys()[:200]
features_set = [(doc_features(doc, word_features), doc['categories_en'][0]) for doc in docs]
train_set, test_set = features_set[:40], features_set[40:80]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print nltk.classify.accuracy(classifier, test_set)
classifier.show_most_informative_features(5)
|
# -*- coding: utf-8 -*-
import nltk
from hazm import Normalizer, Stemmer, word_tokenize
from hazm.HamshahriReader import HamshahriReader
import config
def doc_features(doc, dist_words):
words_set = set(doc['words'])
features = {}
for word in dist_words:
features['contains(%s)' % word] = (word in words_set)
return features
if __name__ == '__main__':
rd = HamshahriReader(config.corpora_root)
#docs = [doc for doc in rd.docs()]
docs = []
normalizer = Normalizer()
stemmer = Stemmer()
for doc in rd.docs():
doc['text'] = normalizer.normalize(doc['text'])
doc['words'] = [stemmer.stem(word) for word in word_tokenize(doc['text'])]
docs.append(doc)
all_words = []
for doc in docs:
all_words.extend(doc['words'])
dist = nltk.FreqDist(word for word in all_words)
word_features = [word for word in set(all_words) if len(word) > 4 and dist[word] > 10]
features_set = [(doc_features(doc, word_features), doc['categories_en'][0]) for doc in docs]
train_set, test_set = features_set[:60], features_set[60:100]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print nltk.classify.accuracy(classifier, test_set)
classifier.show_most_informative_features(10)
|
Add normalization and stemming, use fine grained selection of words.
|
Add normalization and stemming, use fine grained selection of words.
|
Python
|
mit
|
s1na/magpie
|
# -*- coding: utf-8 -*-
import nltk
from hazm import word_tokenize
from hazm.HamshahriReader import HamshahriReader
import config
def doc_features(doc, dist_words):
words_set = set(word_tokenize(doc['text']))
features = {}
for word in dist_words:
features['contains(%s)' % word] = (word in words_set)
return features
if __name__ == '__main__':
rd = HamshahriReader(config.corpora_root)
docs = [doc for doc in rd.docs()]
all_words = []
for doc in docs:
all_words.extend(word_tokenize(doc['text']))
dist = nltk.FreqDist(word for word in all_words)
word_features = dist.keys()[:200]
features_set = [(doc_features(doc, word_features), doc['categories_en'][0]) for doc in docs]
train_set, test_set = features_set[:40], features_set[40:80]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print nltk.classify.accuracy(classifier, test_set)
classifier.show_most_informative_features(5)
Add normalization and stemming, use fine grained selection of words.
|
# -*- coding: utf-8 -*-
import nltk
from hazm import Normalizer, Stemmer, word_tokenize
from hazm.HamshahriReader import HamshahriReader
import config
def doc_features(doc, dist_words):
words_set = set(doc['words'])
features = {}
for word in dist_words:
features['contains(%s)' % word] = (word in words_set)
return features
if __name__ == '__main__':
rd = HamshahriReader(config.corpora_root)
#docs = [doc for doc in rd.docs()]
docs = []
normalizer = Normalizer()
stemmer = Stemmer()
for doc in rd.docs():
doc['text'] = normalizer.normalize(doc['text'])
doc['words'] = [stemmer.stem(word) for word in word_tokenize(doc['text'])]
docs.append(doc)
all_words = []
for doc in docs:
all_words.extend(doc['words'])
dist = nltk.FreqDist(word for word in all_words)
word_features = [word for word in set(all_words) if len(word) > 4 and dist[word] > 10]
features_set = [(doc_features(doc, word_features), doc['categories_en'][0]) for doc in docs]
train_set, test_set = features_set[:60], features_set[60:100]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print nltk.classify.accuracy(classifier, test_set)
classifier.show_most_informative_features(10)
|
<commit_before># -*- coding: utf-8 -*-
import nltk
from hazm import word_tokenize
from hazm.HamshahriReader import HamshahriReader
import config
def doc_features(doc, dist_words):
words_set = set(word_tokenize(doc['text']))
features = {}
for word in dist_words:
features['contains(%s)' % word] = (word in words_set)
return features
if __name__ == '__main__':
rd = HamshahriReader(config.corpora_root)
docs = [doc for doc in rd.docs()]
all_words = []
for doc in docs:
all_words.extend(word_tokenize(doc['text']))
dist = nltk.FreqDist(word for word in all_words)
word_features = dist.keys()[:200]
features_set = [(doc_features(doc, word_features), doc['categories_en'][0]) for doc in docs]
train_set, test_set = features_set[:40], features_set[40:80]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print nltk.classify.accuracy(classifier, test_set)
classifier.show_most_informative_features(5)
<commit_msg>Add normalization and stemming, use fine grained selection of words.<commit_after>
|
# -*- coding: utf-8 -*-
import nltk
from hazm import Normalizer, Stemmer, word_tokenize
from hazm.HamshahriReader import HamshahriReader
import config
def doc_features(doc, dist_words):
words_set = set(doc['words'])
features = {}
for word in dist_words:
features['contains(%s)' % word] = (word in words_set)
return features
if __name__ == '__main__':
rd = HamshahriReader(config.corpora_root)
#docs = [doc for doc in rd.docs()]
docs = []
normalizer = Normalizer()
stemmer = Stemmer()
for doc in rd.docs():
doc['text'] = normalizer.normalize(doc['text'])
doc['words'] = [stemmer.stem(word) for word in word_tokenize(doc['text'])]
docs.append(doc)
all_words = []
for doc in docs:
all_words.extend(doc['words'])
dist = nltk.FreqDist(word for word in all_words)
word_features = [word for word in set(all_words) if len(word) > 4 and dist[word] > 10]
features_set = [(doc_features(doc, word_features), doc['categories_en'][0]) for doc in docs]
train_set, test_set = features_set[:60], features_set[60:100]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print nltk.classify.accuracy(classifier, test_set)
classifier.show_most_informative_features(10)
|
# -*- coding: utf-8 -*-
import nltk
from hazm import word_tokenize
from hazm.HamshahriReader import HamshahriReader
import config
def doc_features(doc, dist_words):
words_set = set(word_tokenize(doc['text']))
features = {}
for word in dist_words:
features['contains(%s)' % word] = (word in words_set)
return features
if __name__ == '__main__':
rd = HamshahriReader(config.corpora_root)
docs = [doc for doc in rd.docs()]
all_words = []
for doc in docs:
all_words.extend(word_tokenize(doc['text']))
dist = nltk.FreqDist(word for word in all_words)
word_features = dist.keys()[:200]
features_set = [(doc_features(doc, word_features), doc['categories_en'][0]) for doc in docs]
train_set, test_set = features_set[:40], features_set[40:80]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print nltk.classify.accuracy(classifier, test_set)
classifier.show_most_informative_features(5)
Add normalization and stemming, use fine grained selection of words.# -*- coding: utf-8 -*-
import nltk
from hazm import Normalizer, Stemmer, word_tokenize
from hazm.HamshahriReader import HamshahriReader
import config
def doc_features(doc, dist_words):
words_set = set(doc['words'])
features = {}
for word in dist_words:
features['contains(%s)' % word] = (word in words_set)
return features
if __name__ == '__main__':
rd = HamshahriReader(config.corpora_root)
#docs = [doc for doc in rd.docs()]
docs = []
normalizer = Normalizer()
stemmer = Stemmer()
for doc in rd.docs():
doc['text'] = normalizer.normalize(doc['text'])
doc['words'] = [stemmer.stem(word) for word in word_tokenize(doc['text'])]
docs.append(doc)
all_words = []
for doc in docs:
all_words.extend(doc['words'])
dist = nltk.FreqDist(word for word in all_words)
word_features = [word for word in set(all_words) if len(word) > 4 and dist[word] > 10]
features_set = [(doc_features(doc, word_features), doc['categories_en'][0]) for doc in docs]
train_set, test_set = features_set[:60], features_set[60:100]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print nltk.classify.accuracy(classifier, test_set)
classifier.show_most_informative_features(10)
|
<commit_before># -*- coding: utf-8 -*-
import nltk
from hazm import word_tokenize
from hazm.HamshahriReader import HamshahriReader
import config
def doc_features(doc, dist_words):
words_set = set(word_tokenize(doc['text']))
features = {}
for word in dist_words:
features['contains(%s)' % word] = (word in words_set)
return features
if __name__ == '__main__':
rd = HamshahriReader(config.corpora_root)
docs = [doc for doc in rd.docs()]
all_words = []
for doc in docs:
all_words.extend(word_tokenize(doc['text']))
dist = nltk.FreqDist(word for word in all_words)
word_features = dist.keys()[:200]
features_set = [(doc_features(doc, word_features), doc['categories_en'][0]) for doc in docs]
train_set, test_set = features_set[:40], features_set[40:80]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print nltk.classify.accuracy(classifier, test_set)
classifier.show_most_informative_features(5)
<commit_msg>Add normalization and stemming, use fine grained selection of words.<commit_after># -*- coding: utf-8 -*-
import nltk
from hazm import Normalizer, Stemmer, word_tokenize
from hazm.HamshahriReader import HamshahriReader
import config
def doc_features(doc, dist_words):
words_set = set(doc['words'])
features = {}
for word in dist_words:
features['contains(%s)' % word] = (word in words_set)
return features
if __name__ == '__main__':
rd = HamshahriReader(config.corpora_root)
#docs = [doc for doc in rd.docs()]
docs = []
normalizer = Normalizer()
stemmer = Stemmer()
for doc in rd.docs():
doc['text'] = normalizer.normalize(doc['text'])
doc['words'] = [stemmer.stem(word) for word in word_tokenize(doc['text'])]
docs.append(doc)
all_words = []
for doc in docs:
all_words.extend(doc['words'])
dist = nltk.FreqDist(word for word in all_words)
word_features = [word for word in set(all_words) if len(word) > 4 and dist[word] > 10]
features_set = [(doc_features(doc, word_features), doc['categories_en'][0]) for doc in docs]
train_set, test_set = features_set[:60], features_set[60:100]
classifier = nltk.NaiveBayesClassifier.train(train_set)
print nltk.classify.accuracy(classifier, test_set)
classifier.show_most_informative_features(10)
|
90633f6aa401b40c6d94e623bac4268f752db430
|
flask_dynstatic.py
|
flask_dynstatic.py
|
__author__ = 'mkaplenko'
views = []
def to_static_html(func):
def wrapper(*args, **kwargs):
if func not in views:
views.append(func)
print views
return func(*args, **kwargs)
return wrapper
|
from functools import wraps
import os
static_root = os.path.join(os.path.dirname(__file__), 'static')
views = []
def to_static_html(path):
def decorator(func):
if func not in views:
views.append((path, func))
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorator
def construct_html():
for view in views:
with open(os.path.join(static_root, view[0]), 'w') as html_file:
html_file.write(view[1]())
|
Add to static_html decorator code
|
Add to static_html decorator code
|
Python
|
bsd-3-clause
|
mkaplenko/flask-dynstatic
|
__author__ = 'mkaplenko'
views = []
def to_static_html(func):
def wrapper(*args, **kwargs):
if func not in views:
views.append(func)
print views
return func(*args, **kwargs)
return wrapper
Add to static_html decorator code
|
from functools import wraps
import os
static_root = os.path.join(os.path.dirname(__file__), 'static')
views = []
def to_static_html(path):
def decorator(func):
if func not in views:
views.append((path, func))
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorator
def construct_html():
for view in views:
with open(os.path.join(static_root, view[0]), 'w') as html_file:
html_file.write(view[1]())
|
<commit_before>__author__ = 'mkaplenko'
views = []
def to_static_html(func):
def wrapper(*args, **kwargs):
if func not in views:
views.append(func)
print views
return func(*args, **kwargs)
return wrapper
<commit_msg>Add to static_html decorator code<commit_after>
|
from functools import wraps
import os
static_root = os.path.join(os.path.dirname(__file__), 'static')
views = []
def to_static_html(path):
def decorator(func):
if func not in views:
views.append((path, func))
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorator
def construct_html():
for view in views:
with open(os.path.join(static_root, view[0]), 'w') as html_file:
html_file.write(view[1]())
|
__author__ = 'mkaplenko'
views = []
def to_static_html(func):
def wrapper(*args, **kwargs):
if func not in views:
views.append(func)
print views
return func(*args, **kwargs)
return wrapper
Add to static_html decorator codefrom functools import wraps
import os
static_root = os.path.join(os.path.dirname(__file__), 'static')
views = []
def to_static_html(path):
def decorator(func):
if func not in views:
views.append((path, func))
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorator
def construct_html():
for view in views:
with open(os.path.join(static_root, view[0]), 'w') as html_file:
html_file.write(view[1]())
|
<commit_before>__author__ = 'mkaplenko'
views = []
def to_static_html(func):
def wrapper(*args, **kwargs):
if func not in views:
views.append(func)
print views
return func(*args, **kwargs)
return wrapper
<commit_msg>Add to static_html decorator code<commit_after>from functools import wraps
import os
static_root = os.path.join(os.path.dirname(__file__), 'static')
views = []
def to_static_html(path):
def decorator(func):
if func not in views:
views.append((path, func))
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorator
def construct_html():
for view in views:
with open(os.path.join(static_root, view[0]), 'w') as html_file:
html_file.write(view[1]())
|
eec8f84aa12d692c8e042ac00eaca39faefb96f6
|
armstrong/core/arm_sections/backends.py
|
armstrong/core/arm_sections/backends.py
|
from django.db.models import Q
from .utils import get_section_relations, get_item_model_class
class ItemFilter(object):
manager_attr = 'objects'
def get_manager(self, model):
"""Return the desired manager for the item model."""
return getattr(model, self.manager_attr)
def get_section_relations(self, section):
return get_section_relations(section.__class__)
def filter_objects_by_section(self, rels, section):
"""Build a queryset containing all objects in the section subtree."""
subtree = section.get_descendants(include_self=True)
kwargs_list = [{'%s__in' % rel.field.name: subtree} for rel in rels]
q = Q(**kwargs_list[0])
for kwargs in kwargs_list[1:]:
q |= Q(**kwargs)
return self.get_manager(get_item_model_class()).filter(q)
def process_items(self, items):
"""
Perform extra actions on the filtered items.
Example: Further filtering the items in the section to meet a
custom need.
"""
if hasattr(items, 'select_subclasses'):
items = items.select_subclasses()
return items
def __call__(self, section):
relations = self.get_section_relations(section)
items = self.filter_objects_by_section(relations, section)
return self.process_items(items)
find_related_models = ItemFilter()
|
from django.db.models import Q
from .utils import get_section_relations, get_item_model_class
class ItemFilter(object):
manager_attr = 'objects'
def get_manager(self, model):
"""Return the desired manager for the item model."""
return getattr(model, self.manager_attr)
def get_section_relations(self, section):
return get_section_relations(section.__class__)
def filter_objects_by_section(self, rels, section):
"""Build a queryset containing all objects in the section subtree."""
subtree = section.get_descendants(include_self=True)
kwargs_list = [{'%s__in' % rel.field.name: subtree} for rel in rels]
q = Q(**kwargs_list[0])
for kwargs in kwargs_list[1:]:
q |= Q(**kwargs)
return self.get_manager(get_item_model_class()).filter(q).distinct()
def process_items(self, items):
"""
Perform extra actions on the filtered items.
Example: Further filtering the items in the section to meet a
custom need.
"""
if hasattr(items, 'select_subclasses'):
items = items.select_subclasses()
return items
def __call__(self, section):
relations = self.get_section_relations(section)
items = self.filter_objects_by_section(relations, section)
return self.process_items(items)
find_related_models = ItemFilter()
|
Use distinct() when getting section items
|
Use distinct() when getting section items
|
Python
|
apache-2.0
|
texastribune/armstrong.core.tt_sections,texastribune/armstrong.core.tt_sections,armstrong/armstrong.core.arm_sections,armstrong/armstrong.core.arm_sections,texastribune/armstrong.core.tt_sections
|
from django.db.models import Q
from .utils import get_section_relations, get_item_model_class
class ItemFilter(object):
manager_attr = 'objects'
def get_manager(self, model):
"""Return the desired manager for the item model."""
return getattr(model, self.manager_attr)
def get_section_relations(self, section):
return get_section_relations(section.__class__)
def filter_objects_by_section(self, rels, section):
"""Build a queryset containing all objects in the section subtree."""
subtree = section.get_descendants(include_self=True)
kwargs_list = [{'%s__in' % rel.field.name: subtree} for rel in rels]
q = Q(**kwargs_list[0])
for kwargs in kwargs_list[1:]:
q |= Q(**kwargs)
return self.get_manager(get_item_model_class()).filter(q)
def process_items(self, items):
"""
Perform extra actions on the filtered items.
Example: Further filtering the items in the section to meet a
custom need.
"""
if hasattr(items, 'select_subclasses'):
items = items.select_subclasses()
return items
def __call__(self, section):
relations = self.get_section_relations(section)
items = self.filter_objects_by_section(relations, section)
return self.process_items(items)
find_related_models = ItemFilter()
Use distinct() when getting section items
|
from django.db.models import Q
from .utils import get_section_relations, get_item_model_class
class ItemFilter(object):
manager_attr = 'objects'
def get_manager(self, model):
"""Return the desired manager for the item model."""
return getattr(model, self.manager_attr)
def get_section_relations(self, section):
return get_section_relations(section.__class__)
def filter_objects_by_section(self, rels, section):
"""Build a queryset containing all objects in the section subtree."""
subtree = section.get_descendants(include_self=True)
kwargs_list = [{'%s__in' % rel.field.name: subtree} for rel in rels]
q = Q(**kwargs_list[0])
for kwargs in kwargs_list[1:]:
q |= Q(**kwargs)
return self.get_manager(get_item_model_class()).filter(q).distinct()
def process_items(self, items):
"""
Perform extra actions on the filtered items.
Example: Further filtering the items in the section to meet a
custom need.
"""
if hasattr(items, 'select_subclasses'):
items = items.select_subclasses()
return items
def __call__(self, section):
relations = self.get_section_relations(section)
items = self.filter_objects_by_section(relations, section)
return self.process_items(items)
find_related_models = ItemFilter()
|
<commit_before>from django.db.models import Q
from .utils import get_section_relations, get_item_model_class
class ItemFilter(object):
manager_attr = 'objects'
def get_manager(self, model):
"""Return the desired manager for the item model."""
return getattr(model, self.manager_attr)
def get_section_relations(self, section):
return get_section_relations(section.__class__)
def filter_objects_by_section(self, rels, section):
"""Build a queryset containing all objects in the section subtree."""
subtree = section.get_descendants(include_self=True)
kwargs_list = [{'%s__in' % rel.field.name: subtree} for rel in rels]
q = Q(**kwargs_list[0])
for kwargs in kwargs_list[1:]:
q |= Q(**kwargs)
return self.get_manager(get_item_model_class()).filter(q)
def process_items(self, items):
"""
Perform extra actions on the filtered items.
Example: Further filtering the items in the section to meet a
custom need.
"""
if hasattr(items, 'select_subclasses'):
items = items.select_subclasses()
return items
def __call__(self, section):
relations = self.get_section_relations(section)
items = self.filter_objects_by_section(relations, section)
return self.process_items(items)
find_related_models = ItemFilter()
<commit_msg>Use distinct() when getting section items<commit_after>
|
from django.db.models import Q
from .utils import get_section_relations, get_item_model_class
class ItemFilter(object):
manager_attr = 'objects'
def get_manager(self, model):
"""Return the desired manager for the item model."""
return getattr(model, self.manager_attr)
def get_section_relations(self, section):
return get_section_relations(section.__class__)
def filter_objects_by_section(self, rels, section):
"""Build a queryset containing all objects in the section subtree."""
subtree = section.get_descendants(include_self=True)
kwargs_list = [{'%s__in' % rel.field.name: subtree} for rel in rels]
q = Q(**kwargs_list[0])
for kwargs in kwargs_list[1:]:
q |= Q(**kwargs)
return self.get_manager(get_item_model_class()).filter(q).distinct()
def process_items(self, items):
"""
Perform extra actions on the filtered items.
Example: Further filtering the items in the section to meet a
custom need.
"""
if hasattr(items, 'select_subclasses'):
items = items.select_subclasses()
return items
def __call__(self, section):
relations = self.get_section_relations(section)
items = self.filter_objects_by_section(relations, section)
return self.process_items(items)
find_related_models = ItemFilter()
|
from django.db.models import Q
from .utils import get_section_relations, get_item_model_class
class ItemFilter(object):
manager_attr = 'objects'
def get_manager(self, model):
"""Return the desired manager for the item model."""
return getattr(model, self.manager_attr)
def get_section_relations(self, section):
return get_section_relations(section.__class__)
def filter_objects_by_section(self, rels, section):
"""Build a queryset containing all objects in the section subtree."""
subtree = section.get_descendants(include_self=True)
kwargs_list = [{'%s__in' % rel.field.name: subtree} for rel in rels]
q = Q(**kwargs_list[0])
for kwargs in kwargs_list[1:]:
q |= Q(**kwargs)
return self.get_manager(get_item_model_class()).filter(q)
def process_items(self, items):
"""
Perform extra actions on the filtered items.
Example: Further filtering the items in the section to meet a
custom need.
"""
if hasattr(items, 'select_subclasses'):
items = items.select_subclasses()
return items
def __call__(self, section):
relations = self.get_section_relations(section)
items = self.filter_objects_by_section(relations, section)
return self.process_items(items)
find_related_models = ItemFilter()
Use distinct() when getting section itemsfrom django.db.models import Q
from .utils import get_section_relations, get_item_model_class
class ItemFilter(object):
manager_attr = 'objects'
def get_manager(self, model):
"""Return the desired manager for the item model."""
return getattr(model, self.manager_attr)
def get_section_relations(self, section):
return get_section_relations(section.__class__)
def filter_objects_by_section(self, rels, section):
"""Build a queryset containing all objects in the section subtree."""
subtree = section.get_descendants(include_self=True)
kwargs_list = [{'%s__in' % rel.field.name: subtree} for rel in rels]
q = Q(**kwargs_list[0])
for kwargs in kwargs_list[1:]:
q |= Q(**kwargs)
return self.get_manager(get_item_model_class()).filter(q).distinct()
def process_items(self, items):
"""
Perform extra actions on the filtered items.
Example: Further filtering the items in the section to meet a
custom need.
"""
if hasattr(items, 'select_subclasses'):
items = items.select_subclasses()
return items
def __call__(self, section):
relations = self.get_section_relations(section)
items = self.filter_objects_by_section(relations, section)
return self.process_items(items)
find_related_models = ItemFilter()
|
<commit_before>from django.db.models import Q
from .utils import get_section_relations, get_item_model_class
class ItemFilter(object):
manager_attr = 'objects'
def get_manager(self, model):
"""Return the desired manager for the item model."""
return getattr(model, self.manager_attr)
def get_section_relations(self, section):
return get_section_relations(section.__class__)
def filter_objects_by_section(self, rels, section):
"""Build a queryset containing all objects in the section subtree."""
subtree = section.get_descendants(include_self=True)
kwargs_list = [{'%s__in' % rel.field.name: subtree} for rel in rels]
q = Q(**kwargs_list[0])
for kwargs in kwargs_list[1:]:
q |= Q(**kwargs)
return self.get_manager(get_item_model_class()).filter(q)
def process_items(self, items):
"""
Perform extra actions on the filtered items.
Example: Further filtering the items in the section to meet a
custom need.
"""
if hasattr(items, 'select_subclasses'):
items = items.select_subclasses()
return items
def __call__(self, section):
relations = self.get_section_relations(section)
items = self.filter_objects_by_section(relations, section)
return self.process_items(items)
find_related_models = ItemFilter()
<commit_msg>Use distinct() when getting section items<commit_after>from django.db.models import Q
from .utils import get_section_relations, get_item_model_class
class ItemFilter(object):
manager_attr = 'objects'
def get_manager(self, model):
"""Return the desired manager for the item model."""
return getattr(model, self.manager_attr)
def get_section_relations(self, section):
return get_section_relations(section.__class__)
def filter_objects_by_section(self, rels, section):
"""Build a queryset containing all objects in the section subtree."""
subtree = section.get_descendants(include_self=True)
kwargs_list = [{'%s__in' % rel.field.name: subtree} for rel in rels]
q = Q(**kwargs_list[0])
for kwargs in kwargs_list[1:]:
q |= Q(**kwargs)
return self.get_manager(get_item_model_class()).filter(q).distinct()
def process_items(self, items):
"""
Perform extra actions on the filtered items.
Example: Further filtering the items in the section to meet a
custom need.
"""
if hasattr(items, 'select_subclasses'):
items = items.select_subclasses()
return items
def __call__(self, section):
relations = self.get_section_relations(section)
items = self.filter_objects_by_section(relations, section)
return self.process_items(items)
find_related_models = ItemFilter()
|
12519845ea1e74276261a5fb4f6b07fe3fb2f82c
|
exploratory_analysis/console.py
|
exploratory_analysis/console.py
|
import os
from utils import Reader
import code
if __name__ == '__main__':
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
print '{} available data files'.format(len(files))
code.interact(local=dict(globals(), **locals()))
|
import os
from utils import Reader
import code
import sys
if __name__ == '__main__':
# coding=utf-8
reload(sys)
sys.setdefaultencoding('utf-8')
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
print '{} available data files'.format(len(files))
code.interact(local=dict(globals(), **locals()))
|
Set default encoding to utf-8
|
Set default encoding to utf-8
|
Python
|
apache-2.0
|
chuajiesheng/twitter-sentiment-analysis
|
import os
from utils import Reader
import code
if __name__ == '__main__':
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
print '{} available data files'.format(len(files))
code.interact(local=dict(globals(), **locals()))
Set default encoding to utf-8
|
import os
from utils import Reader
import code
import sys
if __name__ == '__main__':
# coding=utf-8
reload(sys)
sys.setdefaultencoding('utf-8')
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
print '{} available data files'.format(len(files))
code.interact(local=dict(globals(), **locals()))
|
<commit_before>import os
from utils import Reader
import code
if __name__ == '__main__':
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
print '{} available data files'.format(len(files))
code.interact(local=dict(globals(), **locals()))
<commit_msg>Set default encoding to utf-8<commit_after>
|
import os
from utils import Reader
import code
import sys
if __name__ == '__main__':
# coding=utf-8
reload(sys)
sys.setdefaultencoding('utf-8')
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
print '{} available data files'.format(len(files))
code.interact(local=dict(globals(), **locals()))
|
import os
from utils import Reader
import code
if __name__ == '__main__':
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
print '{} available data files'.format(len(files))
code.interact(local=dict(globals(), **locals()))
Set default encoding to utf-8import os
from utils import Reader
import code
import sys
if __name__ == '__main__':
# coding=utf-8
reload(sys)
sys.setdefaultencoding('utf-8')
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
print '{} available data files'.format(len(files))
code.interact(local=dict(globals(), **locals()))
|
<commit_before>import os
from utils import Reader
import code
if __name__ == '__main__':
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
print '{} available data files'.format(len(files))
code.interact(local=dict(globals(), **locals()))
<commit_msg>Set default encoding to utf-8<commit_after>import os
from utils import Reader
import code
import sys
if __name__ == '__main__':
# coding=utf-8
reload(sys)
sys.setdefaultencoding('utf-8')
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
print '{} available data files'.format(len(files))
code.interact(local=dict(globals(), **locals()))
|
6273ec6444577d756c91da2a9ab97fba3802d03a
|
src/build_osx.py
|
src/build_osx.py
|
from setuptools import setup
name = "Bitmessage"
version = "0.4.2"
mainscript = ["bitmessagemain.py"]
setup(
name = name,
version = version,
app = mainscript,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
resources = ["images", "translations"],
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
)
)
)
|
from setuptools import setup
name = "Bitmessage"
version = "0.4.3"
mainscript = ["bitmessagemain.py"]
setup(
name = name,
version = version,
app = mainscript,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
resources = ["images", "translations"],
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
)
)
)
|
Increment version number to 0.4.3
|
Increment version number to 0.4.3
|
Python
|
mit
|
metamarcdw/PyBitmessage-I2P,lightrabbit/PyBitmessage,Atheros1/PyBitmessage,hb9kns/PyBitmessage,torifier/PyBitmessage,debguy0x/PyBitmessage,lightrabbit/PyBitmessage,krytarowski/PyBitmessage,debguy0x/PyBitmessage,timothyparez/PyBitmessage,debguy0x/PyBitmessage,timothyparez/PyBitmessage,krytarowski/PyBitmessage,gnu3ra/PyBitmessage,bmng-dev/PyBitmessage,timothyparez/PyBitmessage,torifier/PyBitmessage,mailchuck/PyBitmessage,Atheros1/PyBitmessage,rzr/PyBitmessage,Erkan-Yilmaz/PyBitmessage,kyucrane/PyBitmessage,JosephGoulden/PyBitmessageF2F,kyucrane/PyBitmessage,Atheros1/PyBitmessage,xeddmc/PyBitmessage,torifier/PyBitmessage,metamarcdw/PyBitmessage-I2P,bmng-dev/PyBitmessage,lightrabbit/PyBitmessage,JosephGoulden/PyBitmessageF2F,hb9kns/PyBitmessage,xeddmc/PyBitmessage,gnu3ra/PyBitmessage,hb9kns/PyBitmessage,bmng-dev/PyBitmessage,mailchuck/PyBitmessage,timothyparez/PyBitmessage,lightrabbit/PyBitmessage,rzr/PyBitmessage,Erkan-Yilmaz/PyBitmessage,Atheros1/PyBitmessage,torifier/PyBitmessage,mailchuck/PyBitmessage,mailchuck/PyBitmessage,debguy0x/PyBitmessage,hb9kns/PyBitmessage
|
from setuptools import setup
name = "Bitmessage"
version = "0.4.2"
mainscript = ["bitmessagemain.py"]
setup(
name = name,
version = version,
app = mainscript,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
resources = ["images", "translations"],
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
)
)
)
Increment version number to 0.4.3
|
from setuptools import setup
name = "Bitmessage"
version = "0.4.3"
mainscript = ["bitmessagemain.py"]
setup(
name = name,
version = version,
app = mainscript,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
resources = ["images", "translations"],
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
)
)
)
|
<commit_before>from setuptools import setup
name = "Bitmessage"
version = "0.4.2"
mainscript = ["bitmessagemain.py"]
setup(
name = name,
version = version,
app = mainscript,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
resources = ["images", "translations"],
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
)
)
)
<commit_msg>Increment version number to 0.4.3<commit_after>
|
from setuptools import setup
name = "Bitmessage"
version = "0.4.3"
mainscript = ["bitmessagemain.py"]
setup(
name = name,
version = version,
app = mainscript,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
resources = ["images", "translations"],
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
)
)
)
|
from setuptools import setup
name = "Bitmessage"
version = "0.4.2"
mainscript = ["bitmessagemain.py"]
setup(
name = name,
version = version,
app = mainscript,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
resources = ["images", "translations"],
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
)
)
)
Increment version number to 0.4.3from setuptools import setup
name = "Bitmessage"
version = "0.4.3"
mainscript = ["bitmessagemain.py"]
setup(
name = name,
version = version,
app = mainscript,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
resources = ["images", "translations"],
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
)
)
)
|
<commit_before>from setuptools import setup
name = "Bitmessage"
version = "0.4.2"
mainscript = ["bitmessagemain.py"]
setup(
name = name,
version = version,
app = mainscript,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
resources = ["images", "translations"],
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
)
)
)
<commit_msg>Increment version number to 0.4.3<commit_after>from setuptools import setup
name = "Bitmessage"
version = "0.4.3"
mainscript = ["bitmessagemain.py"]
setup(
name = name,
version = version,
app = mainscript,
setup_requires = ["py2app"],
options = dict(
py2app = dict(
resources = ["images", "translations"],
includes = ['sip', 'PyQt4._qt'],
iconfile = "images/bitmessage.icns"
)
)
)
|
9c87a6889a0dc4ecf0c1034243c344022b2e32bf
|
redis_sessions/settings.py
|
redis_sessions/settings.py
|
from django.conf import settings
SESSION_REDIS_HOST = getattr(settings, 'SESSION_REDIS_HOST', 'localhost')
SESSION_REDIS_PORT = getattr(settings, 'SESSION_REDIS_PORT', 6379)
SESSION_REDIS_DB = getattr(settings, 'SESSION_REDIS_DB', 0)
SESSION_REDIS_PREFIX = getattr(settings, 'SESSION_REDIS_PREFIX', 'session')
SESSION_REDIS_PASSWORD = getattr(settings, 'SESSION_REDIS_PASSWORD', None)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings, 'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH', None
)
|
from django.conf import settings
SESSION_REDIS_HOST = getattr(settings, 'SESSION_REDIS_HOST', 'localhost')
SESSION_REDIS_PORT = getattr(settings, 'SESSION_REDIS_PORT', 6379)
SESSION_REDIS_DB = getattr(settings, 'SESSION_REDIS_DB', 0)
SESSION_REDIS_PREFIX = getattr(settings, 'SESSION_REDIS_PREFIX', '')
SESSION_REDIS_PASSWORD = getattr(settings, 'SESSION_REDIS_PASSWORD', None)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings, 'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH', None
)
|
Fix redis prefix for existing sessions
|
Fix redis prefix for existing sessions
|
Python
|
bsd-3-clause
|
gamechanger/django-redis-sessions,martinrusev/django-redis-sessions,izquierdo/django-redis-sessions,ProDG/django-redis-sessions-fork,hellysmile/django-redis-sessions-fork,mbodock/django-redis-sessions
|
from django.conf import settings
SESSION_REDIS_HOST = getattr(settings, 'SESSION_REDIS_HOST', 'localhost')
SESSION_REDIS_PORT = getattr(settings, 'SESSION_REDIS_PORT', 6379)
SESSION_REDIS_DB = getattr(settings, 'SESSION_REDIS_DB', 0)
SESSION_REDIS_PREFIX = getattr(settings, 'SESSION_REDIS_PREFIX', 'session')
SESSION_REDIS_PASSWORD = getattr(settings, 'SESSION_REDIS_PASSWORD', None)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings, 'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH', None
)
Fix redis prefix for existing sessions
|
from django.conf import settings
SESSION_REDIS_HOST = getattr(settings, 'SESSION_REDIS_HOST', 'localhost')
SESSION_REDIS_PORT = getattr(settings, 'SESSION_REDIS_PORT', 6379)
SESSION_REDIS_DB = getattr(settings, 'SESSION_REDIS_DB', 0)
SESSION_REDIS_PREFIX = getattr(settings, 'SESSION_REDIS_PREFIX', '')
SESSION_REDIS_PASSWORD = getattr(settings, 'SESSION_REDIS_PASSWORD', None)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings, 'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH', None
)
|
<commit_before>from django.conf import settings
SESSION_REDIS_HOST = getattr(settings, 'SESSION_REDIS_HOST', 'localhost')
SESSION_REDIS_PORT = getattr(settings, 'SESSION_REDIS_PORT', 6379)
SESSION_REDIS_DB = getattr(settings, 'SESSION_REDIS_DB', 0)
SESSION_REDIS_PREFIX = getattr(settings, 'SESSION_REDIS_PREFIX', 'session')
SESSION_REDIS_PASSWORD = getattr(settings, 'SESSION_REDIS_PASSWORD', None)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings, 'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH', None
)
<commit_msg>Fix redis prefix for existing sessions<commit_after>
|
from django.conf import settings
SESSION_REDIS_HOST = getattr(settings, 'SESSION_REDIS_HOST', 'localhost')
SESSION_REDIS_PORT = getattr(settings, 'SESSION_REDIS_PORT', 6379)
SESSION_REDIS_DB = getattr(settings, 'SESSION_REDIS_DB', 0)
SESSION_REDIS_PREFIX = getattr(settings, 'SESSION_REDIS_PREFIX', '')
SESSION_REDIS_PASSWORD = getattr(settings, 'SESSION_REDIS_PASSWORD', None)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings, 'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH', None
)
|
from django.conf import settings
SESSION_REDIS_HOST = getattr(settings, 'SESSION_REDIS_HOST', 'localhost')
SESSION_REDIS_PORT = getattr(settings, 'SESSION_REDIS_PORT', 6379)
SESSION_REDIS_DB = getattr(settings, 'SESSION_REDIS_DB', 0)
SESSION_REDIS_PREFIX = getattr(settings, 'SESSION_REDIS_PREFIX', 'session')
SESSION_REDIS_PASSWORD = getattr(settings, 'SESSION_REDIS_PASSWORD', None)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings, 'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH', None
)
Fix redis prefix for existing sessionsfrom django.conf import settings
SESSION_REDIS_HOST = getattr(settings, 'SESSION_REDIS_HOST', 'localhost')
SESSION_REDIS_PORT = getattr(settings, 'SESSION_REDIS_PORT', 6379)
SESSION_REDIS_DB = getattr(settings, 'SESSION_REDIS_DB', 0)
SESSION_REDIS_PREFIX = getattr(settings, 'SESSION_REDIS_PREFIX', '')
SESSION_REDIS_PASSWORD = getattr(settings, 'SESSION_REDIS_PASSWORD', None)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings, 'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH', None
)
|
<commit_before>from django.conf import settings
SESSION_REDIS_HOST = getattr(settings, 'SESSION_REDIS_HOST', 'localhost')
SESSION_REDIS_PORT = getattr(settings, 'SESSION_REDIS_PORT', 6379)
SESSION_REDIS_DB = getattr(settings, 'SESSION_REDIS_DB', 0)
SESSION_REDIS_PREFIX = getattr(settings, 'SESSION_REDIS_PREFIX', 'session')
SESSION_REDIS_PASSWORD = getattr(settings, 'SESSION_REDIS_PASSWORD', None)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings, 'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH', None
)
<commit_msg>Fix redis prefix for existing sessions<commit_after>from django.conf import settings
SESSION_REDIS_HOST = getattr(settings, 'SESSION_REDIS_HOST', 'localhost')
SESSION_REDIS_PORT = getattr(settings, 'SESSION_REDIS_PORT', 6379)
SESSION_REDIS_DB = getattr(settings, 'SESSION_REDIS_DB', 0)
SESSION_REDIS_PREFIX = getattr(settings, 'SESSION_REDIS_PREFIX', '')
SESSION_REDIS_PASSWORD = getattr(settings, 'SESSION_REDIS_PASSWORD', None)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings, 'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH', None
)
|
c06ab929e1f7a55ddc0ed978939ea604cad003cb
|
hamper/plugins/roulette.py
|
hamper/plugins/roulette.py
|
import random, datetime
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1,6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
import random
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1, 6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
Revert "This should break the flakes8 check on Travis"
|
Revert "This should break the flakes8 check on Travis"
This reverts commit 91c3d6c30d75ce66228d52c74bf8a4d8e7628670.
|
Python
|
mit
|
hamperbot/hamper,maxking/hamper,iankronquist/hamper
|
import random, datetime
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1,6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
Revert "This should break the flakes8 check on Travis"
This reverts commit 91c3d6c30d75ce66228d52c74bf8a4d8e7628670.
|
import random
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1, 6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
<commit_before>import random, datetime
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1,6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
<commit_msg>Revert "This should break the flakes8 check on Travis"
This reverts commit 91c3d6c30d75ce66228d52c74bf8a4d8e7628670.<commit_after>
|
import random
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1, 6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
import random, datetime
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1,6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
Revert "This should break the flakes8 check on Travis"
This reverts commit 91c3d6c30d75ce66228d52c74bf8a4d8e7628670.import random
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1, 6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
<commit_before>import random, datetime
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1,6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
<commit_msg>Revert "This should break the flakes8 check on Travis"
This reverts commit 91c3d6c30d75ce66228d52c74bf8a4d8e7628670.<commit_after>import random
from hamper.interfaces import ChatCommandPlugin, Command
class Roulette(ChatCommandPlugin):
"""Feeling lucky? !roulette to see how lucky"""
name = 'roulette'
priority = 0
class Roulette(Command):
'''Try not to die'''
regex = r'^roulette$'
name = 'roulette'
short_desc = 'feeling lucky?'
long_desc = "See how lucky you are, just don't bleed everywhere"
def command(self, bot, comm, groups):
if comm['pm']:
return False
dice = random.randint(1, 6)
if dice == 6:
bot.kick(comm["channel"], comm["user"], "You shot yourself!")
else:
bot.reply(comm, "*click*")
return True
roulette = Roulette()
|
4c1164e3816af43c3a10dc5a68c579ef9ae574f8
|
ooni/output.py
|
ooni/output.py
|
import yaml
class data:
def __init__(self, name=None):
if name:
self.name = name
def output(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'w')
yaml.dump(data, stream)
stream.close()
def append(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'a')
yaml.dump(data, stream)
stream.close()
|
import yaml
class data:
def __init__(self, name=None):
if name:
self.name = name
def output(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'w')
yaml.dump(data, stream)
stream.close()
def append(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'a')
yaml.dump([data], stream)
stream.close()
|
Fix error in ouputting system
|
Fix error in ouputting system
|
Python
|
bsd-2-clause
|
hackerberry/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,hackerberry/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe
|
import yaml
class data:
def __init__(self, name=None):
if name:
self.name = name
def output(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'w')
yaml.dump(data, stream)
stream.close()
def append(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'a')
yaml.dump(data, stream)
stream.close()
Fix error in ouputting system
|
import yaml
class data:
def __init__(self, name=None):
if name:
self.name = name
def output(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'w')
yaml.dump(data, stream)
stream.close()
def append(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'a')
yaml.dump([data], stream)
stream.close()
|
<commit_before>import yaml
class data:
def __init__(self, name=None):
if name:
self.name = name
def output(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'w')
yaml.dump(data, stream)
stream.close()
def append(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'a')
yaml.dump(data, stream)
stream.close()
<commit_msg>Fix error in ouputting system<commit_after>
|
import yaml
class data:
def __init__(self, name=None):
if name:
self.name = name
def output(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'w')
yaml.dump(data, stream)
stream.close()
def append(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'a')
yaml.dump([data], stream)
stream.close()
|
import yaml
class data:
def __init__(self, name=None):
if name:
self.name = name
def output(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'w')
yaml.dump(data, stream)
stream.close()
def append(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'a')
yaml.dump(data, stream)
stream.close()
Fix error in ouputting systemimport yaml
class data:
def __init__(self, name=None):
if name:
self.name = name
def output(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'w')
yaml.dump(data, stream)
stream.close()
def append(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'a')
yaml.dump([data], stream)
stream.close()
|
<commit_before>import yaml
class data:
def __init__(self, name=None):
if name:
self.name = name
def output(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'w')
yaml.dump(data, stream)
stream.close()
def append(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'a')
yaml.dump(data, stream)
stream.close()
<commit_msg>Fix error in ouputting system<commit_after>import yaml
class data:
def __init__(self, name=None):
if name:
self.name = name
def output(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'w')
yaml.dump(data, stream)
stream.close()
def append(self, data, name=None):
if name:
self.name = name
stream = open(self.name, 'a')
yaml.dump([data], stream)
stream.close()
|
c2782795dc679897b333138482b99b21b4c60349
|
salt/modules/test.py
|
salt/modules/test.py
|
'''
Module for running arbitrairy tests
'''
import time
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
salt '*' test.echo 'foo bar baz quo qux'
'''
print 'Echo got called!'
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
salt '*' test.ping
'''
return True
def fib(num):
'''
Return a fibonachi sequence up to the passed number, and the time it took
to compute in seconds. Used for performance tests
CLI Example:
salt '*' test.fib 3
'''
start = time.time()
a, b = 0, 1
ret = [0]
while b < num:
ret.append(b)
a, b = b, a + b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number, returns
the sequence and the time it took to compute. Used for performance tests.
CLI Example:
salt '*' test.collatz 3
'''
start = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - start
|
'''
Module for running arbitrairy tests
'''
import time
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
salt '*' test.echo 'foo bar baz quo qux'
'''
print 'Echo got called!'
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
salt '*' test.ping
'''
return True
def fib(num):
'''
Return a fibonachi sequence up to the passed number, and the time it took
to compute in seconds. Used for performance tests
CLI Example:
salt '*' test.fib 3
'''
start = time.time()
a, b = 0, 1
ret = [0]
while b < num:
ret.append(b)
a, b = b, a + b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number, returns
the sequence and the time it took to compute. Used for performance tests.
CLI Example:
salt '*' test.collatz 3
'''
begin = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - begin
|
Fix assignment issue in coallatz
|
Fix assignment issue in coallatz
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Module for running arbitrairy tests
'''
import time
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
salt '*' test.echo 'foo bar baz quo qux'
'''
print 'Echo got called!'
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
salt '*' test.ping
'''
return True
def fib(num):
'''
Return a fibonachi sequence up to the passed number, and the time it took
to compute in seconds. Used for performance tests
CLI Example:
salt '*' test.fib 3
'''
start = time.time()
a, b = 0, 1
ret = [0]
while b < num:
ret.append(b)
a, b = b, a + b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number, returns
the sequence and the time it took to compute. Used for performance tests.
CLI Example:
salt '*' test.collatz 3
'''
start = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - start
Fix assignment issue in coallatz
|
'''
Module for running arbitrairy tests
'''
import time
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
salt '*' test.echo 'foo bar baz quo qux'
'''
print 'Echo got called!'
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
salt '*' test.ping
'''
return True
def fib(num):
'''
Return a fibonachi sequence up to the passed number, and the time it took
to compute in seconds. Used for performance tests
CLI Example:
salt '*' test.fib 3
'''
start = time.time()
a, b = 0, 1
ret = [0]
while b < num:
ret.append(b)
a, b = b, a + b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number, returns
the sequence and the time it took to compute. Used for performance tests.
CLI Example:
salt '*' test.collatz 3
'''
begin = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - begin
|
<commit_before>'''
Module for running arbitrairy tests
'''
import time
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
salt '*' test.echo 'foo bar baz quo qux'
'''
print 'Echo got called!'
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
salt '*' test.ping
'''
return True
def fib(num):
'''
Return a fibonachi sequence up to the passed number, and the time it took
to compute in seconds. Used for performance tests
CLI Example:
salt '*' test.fib 3
'''
start = time.time()
a, b = 0, 1
ret = [0]
while b < num:
ret.append(b)
a, b = b, a + b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number, returns
the sequence and the time it took to compute. Used for performance tests.
CLI Example:
salt '*' test.collatz 3
'''
start = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - start
<commit_msg>Fix assignment issue in coallatz<commit_after>
|
'''
Module for running arbitrairy tests
'''
import time
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
salt '*' test.echo 'foo bar baz quo qux'
'''
print 'Echo got called!'
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
salt '*' test.ping
'''
return True
def fib(num):
'''
Return a fibonachi sequence up to the passed number, and the time it took
to compute in seconds. Used for performance tests
CLI Example:
salt '*' test.fib 3
'''
start = time.time()
a, b = 0, 1
ret = [0]
while b < num:
ret.append(b)
a, b = b, a + b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number, returns
the sequence and the time it took to compute. Used for performance tests.
CLI Example:
salt '*' test.collatz 3
'''
begin = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - begin
|
'''
Module for running arbitrairy tests
'''
import time
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
salt '*' test.echo 'foo bar baz quo qux'
'''
print 'Echo got called!'
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
salt '*' test.ping
'''
return True
def fib(num):
'''
Return a fibonachi sequence up to the passed number, and the time it took
to compute in seconds. Used for performance tests
CLI Example:
salt '*' test.fib 3
'''
start = time.time()
a, b = 0, 1
ret = [0]
while b < num:
ret.append(b)
a, b = b, a + b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number, returns
the sequence and the time it took to compute. Used for performance tests.
CLI Example:
salt '*' test.collatz 3
'''
start = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - start
Fix assignment issue in coallatz'''
Module for running arbitrairy tests
'''
import time
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
salt '*' test.echo 'foo bar baz quo qux'
'''
print 'Echo got called!'
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
salt '*' test.ping
'''
return True
def fib(num):
'''
Return a fibonachi sequence up to the passed number, and the time it took
to compute in seconds. Used for performance tests
CLI Example:
salt '*' test.fib 3
'''
start = time.time()
a, b = 0, 1
ret = [0]
while b < num:
ret.append(b)
a, b = b, a + b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number, returns
the sequence and the time it took to compute. Used for performance tests.
CLI Example:
salt '*' test.collatz 3
'''
begin = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - begin
|
<commit_before>'''
Module for running arbitrairy tests
'''
import time
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
salt '*' test.echo 'foo bar baz quo qux'
'''
print 'Echo got called!'
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
salt '*' test.ping
'''
return True
def fib(num):
'''
Return a fibonachi sequence up to the passed number, and the time it took
to compute in seconds. Used for performance tests
CLI Example:
salt '*' test.fib 3
'''
start = time.time()
a, b = 0, 1
ret = [0]
while b < num:
ret.append(b)
a, b = b, a + b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number, returns
the sequence and the time it took to compute. Used for performance tests.
CLI Example:
salt '*' test.collatz 3
'''
start = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - start
<commit_msg>Fix assignment issue in coallatz<commit_after>'''
Module for running arbitrairy tests
'''
import time
def echo(text):
'''
Return a string - used for testing the connection
CLI Example:
salt '*' test.echo 'foo bar baz quo qux'
'''
print 'Echo got called!'
return text
def ping():
'''
Just used to make sure the minion is up and responding
Return True
CLI Example:
salt '*' test.ping
'''
return True
def fib(num):
'''
Return a fibonachi sequence up to the passed number, and the time it took
to compute in seconds. Used for performance tests
CLI Example:
salt '*' test.fib 3
'''
start = time.time()
a, b = 0, 1
ret = [0]
while b < num:
ret.append(b)
a, b = b, a + b
return ret, time.time() - start
def collatz(start):
'''
Execute the collatz conjecture from the passed starting number, returns
the sequence and the time it took to compute. Used for performance tests.
CLI Example:
salt '*' test.collatz 3
'''
begin = time.time()
steps = []
while start != 1:
steps.append(start)
if start > 1:
if start % 2 == 0:
start = start / 2
else:
start = start * 3 + 1
return steps, time.time() - begin
|
5796093cecbe9e671dfe3b056f6e907a452694e5
|
autodbperftool/ADT/tpccmysql.py
|
autodbperftool/ADT/tpccmysql.py
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
Created on 2015-06-19
@author: mizhon
'''
#from common import CommonActions
from Logs import logger
log = logger.Log()
class TpccmysqlActions(object):
@classmethod
def ta_get_cmds(cls, cmd_action):
try:
cmds = None
if cmd_action == 'prepare':
pass
elif cmd_action == 'run':
pass
elif cmd_action == 'cleanup':
pass
return cmds
except Exception as e:
log.error(e)
@classmethod
def ta_get_scenario_info(cls):
pass
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
Created on 2015-06-19
@author: mizhon
'''
#from common import CommonActions
from Logs import logger
log = logger.Log()
class TpccmysqlActions(object):
@classmethod
def ta_get_cmds(cls, cmd_action):
try:
cmds = None
if cmd_action == 'prepare':
pass
elif cmd_action == 'run':
pass
elif cmd_action == 'cleanup':
pass
return cmds
except Exception as e:
log.error(e)
@classmethod
def ta_get_scenario_info(cls):
pass
@classmethod
def ta_save_results(cls, result):
pass
|
Add function to save tpcc-mysql results
|
Add function to save tpcc-mysql results
|
Python
|
apache-2.0
|
mizhon/tools
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
Created on 2015-06-19
@author: mizhon
'''
#from common import CommonActions
from Logs import logger
log = logger.Log()
class TpccmysqlActions(object):
@classmethod
def ta_get_cmds(cls, cmd_action):
try:
cmds = None
if cmd_action == 'prepare':
pass
elif cmd_action == 'run':
pass
elif cmd_action == 'cleanup':
pass
return cmds
except Exception as e:
log.error(e)
@classmethod
def ta_get_scenario_info(cls):
passAdd function to save tpcc-mysql results
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
Created on 2015-06-19
@author: mizhon
'''
#from common import CommonActions
from Logs import logger
log = logger.Log()
class TpccmysqlActions(object):
@classmethod
def ta_get_cmds(cls, cmd_action):
try:
cmds = None
if cmd_action == 'prepare':
pass
elif cmd_action == 'run':
pass
elif cmd_action == 'cleanup':
pass
return cmds
except Exception as e:
log.error(e)
@classmethod
def ta_get_scenario_info(cls):
pass
@classmethod
def ta_save_results(cls, result):
pass
|
<commit_before>#!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
Created on 2015-06-19
@author: mizhon
'''
#from common import CommonActions
from Logs import logger
log = logger.Log()
class TpccmysqlActions(object):
@classmethod
def ta_get_cmds(cls, cmd_action):
try:
cmds = None
if cmd_action == 'prepare':
pass
elif cmd_action == 'run':
pass
elif cmd_action == 'cleanup':
pass
return cmds
except Exception as e:
log.error(e)
@classmethod
def ta_get_scenario_info(cls):
pass<commit_msg>Add function to save tpcc-mysql results<commit_after>
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
Created on 2015-06-19
@author: mizhon
'''
#from common import CommonActions
from Logs import logger
log = logger.Log()
class TpccmysqlActions(object):
@classmethod
def ta_get_cmds(cls, cmd_action):
try:
cmds = None
if cmd_action == 'prepare':
pass
elif cmd_action == 'run':
pass
elif cmd_action == 'cleanup':
pass
return cmds
except Exception as e:
log.error(e)
@classmethod
def ta_get_scenario_info(cls):
pass
@classmethod
def ta_save_results(cls, result):
pass
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
Created on 2015-06-19
@author: mizhon
'''
#from common import CommonActions
from Logs import logger
log = logger.Log()
class TpccmysqlActions(object):
@classmethod
def ta_get_cmds(cls, cmd_action):
try:
cmds = None
if cmd_action == 'prepare':
pass
elif cmd_action == 'run':
pass
elif cmd_action == 'cleanup':
pass
return cmds
except Exception as e:
log.error(e)
@classmethod
def ta_get_scenario_info(cls):
passAdd function to save tpcc-mysql results#!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
Created on 2015-06-19
@author: mizhon
'''
#from common import CommonActions
from Logs import logger
log = logger.Log()
class TpccmysqlActions(object):
@classmethod
def ta_get_cmds(cls, cmd_action):
try:
cmds = None
if cmd_action == 'prepare':
pass
elif cmd_action == 'run':
pass
elif cmd_action == 'cleanup':
pass
return cmds
except Exception as e:
log.error(e)
@classmethod
def ta_get_scenario_info(cls):
pass
@classmethod
def ta_save_results(cls, result):
pass
|
<commit_before>#!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
Created on 2015-06-19
@author: mizhon
'''
#from common import CommonActions
from Logs import logger
log = logger.Log()
class TpccmysqlActions(object):
@classmethod
def ta_get_cmds(cls, cmd_action):
try:
cmds = None
if cmd_action == 'prepare':
pass
elif cmd_action == 'run':
pass
elif cmd_action == 'cleanup':
pass
return cmds
except Exception as e:
log.error(e)
@classmethod
def ta_get_scenario_info(cls):
pass<commit_msg>Add function to save tpcc-mysql results<commit_after>#!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
Created on 2015-06-19
@author: mizhon
'''
#from common import CommonActions
from Logs import logger
log = logger.Log()
class TpccmysqlActions(object):
@classmethod
def ta_get_cmds(cls, cmd_action):
try:
cmds = None
if cmd_action == 'prepare':
pass
elif cmd_action == 'run':
pass
elif cmd_action == 'cleanup':
pass
return cmds
except Exception as e:
log.error(e)
@classmethod
def ta_get_scenario_info(cls):
pass
@classmethod
def ta_save_results(cls, result):
pass
|
f7a420fa865ea2fcd871ad20800c2e21112ce2ec
|
examples/map/plot_frameless_image.py
|
examples/map/plot_frameless_image.py
|
"""
===============================
Plotting a Map without any Axes
===============================
This examples shows you how to plot a Map without any annotations at all, i.e.
to save as an image.
"""
##############################################################################
# Start by importing the necessary modules.
import astropy.units as u
import matplotlib.pyplot as plt
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
##############################################################################
# Create a `sunpy.map.GenericMap`.
smap = sunpy.map.Map(AIA_171_IMAGE)
##############################################################################
# Plot the Map without a frame.
# Setup a frameless figure and an axes which spans the whole canvas.
figure = plt.figure(frameon=False)
axes = plt.Axes(figure, [0., 0., 1., 1.])
# Disable the axis and add them to the figure.
axes.set_axis_off()
figure.add_axes(axes)
# Plot the map without any annotations
# This might raise a warning about the axes being wrong but we can ignore this
# as we are not plotting any axes.
im = smap.plot(axes=axes, annotate=False)
##############################################################################
# At this point you could save the figure with ``plt.savefig()`` or show it:
plt.show()
|
"""
===============================
Plotting a Map without any Axes
===============================
This examples shows you how to plot a Map without any annotations at all, i.e.
to save as an image.
"""
##############################################################################
# Start by importing the necessary modules.
import astropy.units as u
import matplotlib.pyplot as plt
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
##############################################################################
# Create a `sunpy.map.GenericMap`.
smap = sunpy.map.Map(AIA_171_IMAGE)
##############################################################################
# Plot the Map without a frame.
# Setup a frameless figure and an axes which spans the whole canvas.
figure = plt.figure(frameon=False)
axes = plt.Axes(figure, [0., 0., 1., 1.])
# Disable the axis and add them to the figure.
axes.set_axis_off()
figure.add_axes(axes)
# Plot the map without any annotations
# This might raise a warning about the axes being wrong but we can ignore this
# as we are not plotting any axes.
im = smap.plot(axes=axes, annotate=False, clip_interval=(1, 99.99)*u.percent)
##############################################################################
# At this point you could save the figure with ``plt.savefig()`` or show it:
plt.show()
|
Add a clip to the frameless example
|
Add a clip to the frameless example
|
Python
|
bsd-2-clause
|
dpshelio/sunpy,dpshelio/sunpy,dpshelio/sunpy
|
"""
===============================
Plotting a Map without any Axes
===============================
This examples shows you how to plot a Map without any annotations at all, i.e.
to save as an image.
"""
##############################################################################
# Start by importing the necessary modules.
import astropy.units as u
import matplotlib.pyplot as plt
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
##############################################################################
# Create a `sunpy.map.GenericMap`.
smap = sunpy.map.Map(AIA_171_IMAGE)
##############################################################################
# Plot the Map without a frame.
# Setup a frameless figure and an axes which spans the whole canvas.
figure = plt.figure(frameon=False)
axes = plt.Axes(figure, [0., 0., 1., 1.])
# Disable the axis and add them to the figure.
axes.set_axis_off()
figure.add_axes(axes)
# Plot the map without any annotations
# This might raise a warning about the axes being wrong but we can ignore this
# as we are not plotting any axes.
im = smap.plot(axes=axes, annotate=False)
##############################################################################
# At this point you could save the figure with ``plt.savefig()`` or show it:
plt.show()
Add a clip to the frameless example
|
"""
===============================
Plotting a Map without any Axes
===============================
This examples shows you how to plot a Map without any annotations at all, i.e.
to save as an image.
"""
##############################################################################
# Start by importing the necessary modules.
import astropy.units as u
import matplotlib.pyplot as plt
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
##############################################################################
# Create a `sunpy.map.GenericMap`.
smap = sunpy.map.Map(AIA_171_IMAGE)
##############################################################################
# Plot the Map without a frame.
# Setup a frameless figure and an axes which spans the whole canvas.
figure = plt.figure(frameon=False)
axes = plt.Axes(figure, [0., 0., 1., 1.])
# Disable the axis and add them to the figure.
axes.set_axis_off()
figure.add_axes(axes)
# Plot the map without any annotations
# This might raise a warning about the axes being wrong but we can ignore this
# as we are not plotting any axes.
im = smap.plot(axes=axes, annotate=False, clip_interval=(1, 99.99)*u.percent)
##############################################################################
# At this point you could save the figure with ``plt.savefig()`` or show it:
plt.show()
|
<commit_before>"""
===============================
Plotting a Map without any Axes
===============================
This examples shows you how to plot a Map without any annotations at all, i.e.
to save as an image.
"""
##############################################################################
# Start by importing the necessary modules.
import astropy.units as u
import matplotlib.pyplot as plt
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
##############################################################################
# Create a `sunpy.map.GenericMap`.
smap = sunpy.map.Map(AIA_171_IMAGE)
##############################################################################
# Plot the Map without a frame.
# Setup a frameless figure and an axes which spans the whole canvas.
figure = plt.figure(frameon=False)
axes = plt.Axes(figure, [0., 0., 1., 1.])
# Disable the axis and add them to the figure.
axes.set_axis_off()
figure.add_axes(axes)
# Plot the map without any annotations
# This might raise a warning about the axes being wrong but we can ignore this
# as we are not plotting any axes.
im = smap.plot(axes=axes, annotate=False)
##############################################################################
# At this point you could save the figure with ``plt.savefig()`` or show it:
plt.show()
<commit_msg>Add a clip to the frameless example<commit_after>
|
"""
===============================
Plotting a Map without any Axes
===============================
This examples shows you how to plot a Map without any annotations at all, i.e.
to save as an image.
"""
##############################################################################
# Start by importing the necessary modules.
import astropy.units as u
import matplotlib.pyplot as plt
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
##############################################################################
# Create a `sunpy.map.GenericMap`.
smap = sunpy.map.Map(AIA_171_IMAGE)
##############################################################################
# Plot the Map without a frame.
# Setup a frameless figure and an axes which spans the whole canvas.
figure = plt.figure(frameon=False)
axes = plt.Axes(figure, [0., 0., 1., 1.])
# Disable the axis and add them to the figure.
axes.set_axis_off()
figure.add_axes(axes)
# Plot the map without any annotations
# This might raise a warning about the axes being wrong but we can ignore this
# as we are not plotting any axes.
im = smap.plot(axes=axes, annotate=False, clip_interval=(1, 99.99)*u.percent)
##############################################################################
# At this point you could save the figure with ``plt.savefig()`` or show it:
plt.show()
|
"""
===============================
Plotting a Map without any Axes
===============================
This examples shows you how to plot a Map without any annotations at all, i.e.
to save as an image.
"""
##############################################################################
# Start by importing the necessary modules.
import astropy.units as u
import matplotlib.pyplot as plt
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
##############################################################################
# Create a `sunpy.map.GenericMap`.
smap = sunpy.map.Map(AIA_171_IMAGE)
##############################################################################
# Plot the Map without a frame.
# Setup a frameless figure and an axes which spans the whole canvas.
figure = plt.figure(frameon=False)
axes = plt.Axes(figure, [0., 0., 1., 1.])
# Disable the axis and add them to the figure.
axes.set_axis_off()
figure.add_axes(axes)
# Plot the map without any annotations
# This might raise a warning about the axes being wrong but we can ignore this
# as we are not plotting any axes.
im = smap.plot(axes=axes, annotate=False)
##############################################################################
# At this point you could save the figure with ``plt.savefig()`` or show it:
plt.show()
Add a clip to the frameless example"""
===============================
Plotting a Map without any Axes
===============================
This examples shows you how to plot a Map without any annotations at all, i.e.
to save as an image.
"""
##############################################################################
# Start by importing the necessary modules.
import astropy.units as u
import matplotlib.pyplot as plt
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
##############################################################################
# Create a `sunpy.map.GenericMap`.
smap = sunpy.map.Map(AIA_171_IMAGE)
##############################################################################
# Plot the Map without a frame.
# Setup a frameless figure and an axes which spans the whole canvas.
figure = plt.figure(frameon=False)
axes = plt.Axes(figure, [0., 0., 1., 1.])
# Disable the axis and add them to the figure.
axes.set_axis_off()
figure.add_axes(axes)
# Plot the map without any annotations
# This might raise a warning about the axes being wrong but we can ignore this
# as we are not plotting any axes.
im = smap.plot(axes=axes, annotate=False, clip_interval=(1, 99.99)*u.percent)
##############################################################################
# At this point you could save the figure with ``plt.savefig()`` or show it:
plt.show()
|
<commit_before>"""
===============================
Plotting a Map without any Axes
===============================
This examples shows you how to plot a Map without any annotations at all, i.e.
to save as an image.
"""
##############################################################################
# Start by importing the necessary modules.
import astropy.units as u
import matplotlib.pyplot as plt
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
##############################################################################
# Create a `sunpy.map.GenericMap`.
smap = sunpy.map.Map(AIA_171_IMAGE)
##############################################################################
# Plot the Map without a frame.
# Setup a frameless figure and an axes which spans the whole canvas.
figure = plt.figure(frameon=False)
axes = plt.Axes(figure, [0., 0., 1., 1.])
# Disable the axis and add them to the figure.
axes.set_axis_off()
figure.add_axes(axes)
# Plot the map without any annotations
# This might raise a warning about the axes being wrong but we can ignore this
# as we are not plotting any axes.
im = smap.plot(axes=axes, annotate=False)
##############################################################################
# At this point you could save the figure with ``plt.savefig()`` or show it:
plt.show()
<commit_msg>Add a clip to the frameless example<commit_after>"""
===============================
Plotting a Map without any Axes
===============================
This examples shows you how to plot a Map without any annotations at all, i.e.
to save as an image.
"""
##############################################################################
# Start by importing the necessary modules.
import astropy.units as u
import matplotlib.pyplot as plt
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
##############################################################################
# Create a `sunpy.map.GenericMap`.
smap = sunpy.map.Map(AIA_171_IMAGE)
##############################################################################
# Plot the Map without a frame.
# Setup a frameless figure and an axes which spans the whole canvas.
figure = plt.figure(frameon=False)
axes = plt.Axes(figure, [0., 0., 1., 1.])
# Disable the axis and add them to the figure.
axes.set_axis_off()
figure.add_axes(axes)
# Plot the map without any annotations
# This might raise a warning about the axes being wrong but we can ignore this
# as we are not plotting any axes.
im = smap.plot(axes=axes, annotate=False, clip_interval=(1, 99.99)*u.percent)
##############################################################################
# At this point you could save the figure with ``plt.savefig()`` or show it:
plt.show()
|
b1963f00e5290c11654eefbd24fbce185bbcd8b4
|
packages/Preferences/define.py
|
packages/Preferences/define.py
|
import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
|
import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
config_name = 'mantle_config.ini'
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
|
Add config ini file name.
|
Add config ini file name.
|
Python
|
mit
|
takavfx/Mantle
|
import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
Add config ini file name.
|
import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
config_name = 'mantle_config.ini'
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
|
<commit_before>import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
<commit_msg>Add config ini file name.<commit_after>
|
import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
config_name = 'mantle_config.ini'
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
|
import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
Add config ini file name.import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
config_name = 'mantle_config.ini'
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
|
<commit_before>import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
<commit_msg>Add config ini file name.<commit_after>import os
_CURRENTPATH = os.path.dirname(os.path.realpath(__file__))
config_name = 'mantle_config.ini'
preferencesIconPath = os.path.join(_CURRENTPATH, 'static', 'gear.svg')
preferencesUIPath = os.path.join(_CURRENTPATH, 'ui', 'preferences.ui')
version = '0.1.0'
|
c1a65072863fb4e21a7cd2259090996496450a42
|
code/processors/google_closure_compiler.py
|
code/processors/google_closure_compiler.py
|
__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
from processor import *
import os
import os.path
class GoogleClosureCompiler(Processor):
"""compresses .js files with Google Closure Compiler"""
valid_extensions = (".js")
def run(self):
# We don't rename the file, so we can use the default output file.
# Run Google Closure Compiler on the file.
compiler_path = os.path.join(self.processors_path, "compiler.jar")
(stdout, stderr) = self.run_command("java -jar %s --js %s --js_output_file %s" % (compiler_path, self.input_file, self.input_file))
# Raise an exception if an error occurred.
if not stderr == "":
raise ProcessorError(stderr)
return self.output_file
|
__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
from processor import *
import os
import os.path
class GoogleClosureCompiler(Processor):
"""compresses .js files with Google Closure Compiler"""
valid_extensions = (".js")
def run(self):
# We don't rename the file, so we can use the default output file.
# Run Google Closure Compiler on the file.
compiler_path = os.path.join(self.processors_path, "compiler.jar")
(stdout, stderr) = self.run_command("java -jar %s --js %s --js_output_file %s" % (compiler_path, self.input_file, self.output_file))
# Raise an exception if an error occurred.
if not stderr == "":
raise ProcessorError(stderr)
return self.output_file
|
Fix a stupid typo in the Google Closure Compiler processor that prevented it from working properly.
|
Fix a stupid typo in the Google Closure Compiler processor that prevented it from working properly.
|
Python
|
unlicense
|
WadiInternet/fileconveyor,edx/fileconveyor,wimleers/fileconveyor
|
__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
from processor import *
import os
import os.path
class GoogleClosureCompiler(Processor):
"""compresses .js files with Google Closure Compiler"""
valid_extensions = (".js")
def run(self):
# We don't rename the file, so we can use the default output file.
# Run Google Closure Compiler on the file.
compiler_path = os.path.join(self.processors_path, "compiler.jar")
(stdout, stderr) = self.run_command("java -jar %s --js %s --js_output_file %s" % (compiler_path, self.input_file, self.input_file))
# Raise an exception if an error occurred.
if not stderr == "":
raise ProcessorError(stderr)
return self.output_file
Fix a stupid typo in the Google Closure Compiler processor that prevented it from working properly.
|
__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
from processor import *
import os
import os.path
class GoogleClosureCompiler(Processor):
"""compresses .js files with Google Closure Compiler"""
valid_extensions = (".js")
def run(self):
# We don't rename the file, so we can use the default output file.
# Run Google Closure Compiler on the file.
compiler_path = os.path.join(self.processors_path, "compiler.jar")
(stdout, stderr) = self.run_command("java -jar %s --js %s --js_output_file %s" % (compiler_path, self.input_file, self.output_file))
# Raise an exception if an error occurred.
if not stderr == "":
raise ProcessorError(stderr)
return self.output_file
|
<commit_before>__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
from processor import *
import os
import os.path
class GoogleClosureCompiler(Processor):
"""compresses .js files with Google Closure Compiler"""
valid_extensions = (".js")
def run(self):
# We don't rename the file, so we can use the default output file.
# Run Google Closure Compiler on the file.
compiler_path = os.path.join(self.processors_path, "compiler.jar")
(stdout, stderr) = self.run_command("java -jar %s --js %s --js_output_file %s" % (compiler_path, self.input_file, self.input_file))
# Raise an exception if an error occurred.
if not stderr == "":
raise ProcessorError(stderr)
return self.output_file
<commit_msg>Fix a stupid typo in the Google Closure Compiler processor that prevented it from working properly.<commit_after>
|
__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
from processor import *
import os
import os.path
class GoogleClosureCompiler(Processor):
"""compresses .js files with Google Closure Compiler"""
valid_extensions = (".js")
def run(self):
# We don't rename the file, so we can use the default output file.
# Run Google Closure Compiler on the file.
compiler_path = os.path.join(self.processors_path, "compiler.jar")
(stdout, stderr) = self.run_command("java -jar %s --js %s --js_output_file %s" % (compiler_path, self.input_file, self.output_file))
# Raise an exception if an error occurred.
if not stderr == "":
raise ProcessorError(stderr)
return self.output_file
|
__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
from processor import *
import os
import os.path
class GoogleClosureCompiler(Processor):
"""compresses .js files with Google Closure Compiler"""
valid_extensions = (".js")
def run(self):
# We don't rename the file, so we can use the default output file.
# Run Google Closure Compiler on the file.
compiler_path = os.path.join(self.processors_path, "compiler.jar")
(stdout, stderr) = self.run_command("java -jar %s --js %s --js_output_file %s" % (compiler_path, self.input_file, self.input_file))
# Raise an exception if an error occurred.
if not stderr == "":
raise ProcessorError(stderr)
return self.output_file
Fix a stupid typo in the Google Closure Compiler processor that prevented it from working properly.__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
from processor import *
import os
import os.path
class GoogleClosureCompiler(Processor):
"""compresses .js files with Google Closure Compiler"""
valid_extensions = (".js")
def run(self):
# We don't rename the file, so we can use the default output file.
# Run Google Closure Compiler on the file.
compiler_path = os.path.join(self.processors_path, "compiler.jar")
(stdout, stderr) = self.run_command("java -jar %s --js %s --js_output_file %s" % (compiler_path, self.input_file, self.output_file))
# Raise an exception if an error occurred.
if not stderr == "":
raise ProcessorError(stderr)
return self.output_file
|
<commit_before>__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
from processor import *
import os
import os.path
class GoogleClosureCompiler(Processor):
"""compresses .js files with Google Closure Compiler"""
valid_extensions = (".js")
def run(self):
# We don't rename the file, so we can use the default output file.
# Run Google Closure Compiler on the file.
compiler_path = os.path.join(self.processors_path, "compiler.jar")
(stdout, stderr) = self.run_command("java -jar %s --js %s --js_output_file %s" % (compiler_path, self.input_file, self.input_file))
# Raise an exception if an error occurred.
if not stderr == "":
raise ProcessorError(stderr)
return self.output_file
<commit_msg>Fix a stupid typo in the Google Closure Compiler processor that prevented it from working properly.<commit_after>__author__ = "Wim Leers (work@wimleers.com)"
__version__ = "$Rev$"
__date__ = "$Date$"
__license__ = "GPL"
from processor import *
import os
import os.path
class GoogleClosureCompiler(Processor):
"""compresses .js files with Google Closure Compiler"""
valid_extensions = (".js")
def run(self):
# We don't rename the file, so we can use the default output file.
# Run Google Closure Compiler on the file.
compiler_path = os.path.join(self.processors_path, "compiler.jar")
(stdout, stderr) = self.run_command("java -jar %s --js %s --js_output_file %s" % (compiler_path, self.input_file, self.output_file))
# Raise an exception if an error occurred.
if not stderr == "":
raise ProcessorError(stderr)
return self.output_file
|
07df1678c620820053663cf0d07d79fff4fe2333
|
heroku.py
|
heroku.py
|
#!/usr/bin/env python
from evesrp import create_app
from evesrp.killmail import CRESTMail, ShipURLMixin
import evesrp.auth.testauth
from flask.ext.heroku import Heroku
from os import environ as env
from binascii import unhexlify
skel_url = 'https://wiki.eveonline.com/en/wiki/{name}'
class EOWikiCREST(CRESTMail, ShipURLMixin(skel_url)): pass
app = create_app()
heroku = Heroku(app)
app.config['SECRET_KEY'] = unhexlify(env['SECRET_KEY'])
app.config['USER_AGENT_EMAIL'] = 'paxswill@paxswill.com'
app.config['AUTH_METHODS'] = ['evesrp.auth.testauth.TestAuth']
app.config['CORE_AUTH_PRIVATE_KEY'] = env.get('CORE_PRIVATE_KEY')
app.config['CORE_AUTH_PUBLIC_KEY'] = env.get('CORE_PUBLIC_KEY')
app.config['CORE_AUTH_IDENTIFIER'] = env.get('CORE_IDENTIFIER')
app.config['KILLMAIL_SOURCES'] = [EOWikiCREST]
if __name__ == '__main__':
print("Creating databases...")
app.extensions['sqlalchemy'].db.create_all(app=app)
|
#!/usr/bin/env python
from evesrp import create_app
from evesrp.killmail import CRESTMail, ShipURLMixin
import evesrp.auth.testauth
from flask.ext.heroku import Heroku
from os import environ as env
from binascii import unhexlify
skel_url = 'https://wiki.eveonline.com/en/wiki/{name}'
class EOWikiCREST(CRESTMail, ShipURLMixin(skel_url)): pass
app = create_app()
heroku = Heroku(app)
app.config['SECRET_KEY'] = unhexlify(env['SECRET_KEY'])
app.config['USER_AGENT_EMAIL'] = 'paxswill@paxswill.com'
app.config['AUTH_METHODS'] = ['evesrp.auth.testauth.TestAuth']
app.config['CORE_AUTH_PRIVATE_KEY'] = env.get('CORE_PRIVATE_KEY')
app.config['CORE_AUTH_PUBLIC_KEY'] = env.get('CORE_PUBLIC_KEY')
app.config['CORE_AUTH_IDENTIFIER'] = env.get('CORE_IDENTIFIER')
app.config['KILLMAIL_SOURCES'] = [EOWikiCREST]
if env.get('DEBUG') is not None:
app.debug = True
if __name__ == '__main__':
print("Creating databases...")
app.extensions['sqlalchemy'].db.create_all(app=app)
|
Add the option to enable debug mode with Heroku
|
Add the option to enable debug mode with Heroku
Don't do this public servers, use it locally with foreman.
|
Python
|
bsd-2-clause
|
paxswill/evesrp,eskwire/evesrp,eskwire/evesrp,eskwire/evesrp,paxswill/evesrp,eskwire/evesrp,paxswill/evesrp
|
#!/usr/bin/env python
from evesrp import create_app
from evesrp.killmail import CRESTMail, ShipURLMixin
import evesrp.auth.testauth
from flask.ext.heroku import Heroku
from os import environ as env
from binascii import unhexlify
skel_url = 'https://wiki.eveonline.com/en/wiki/{name}'
class EOWikiCREST(CRESTMail, ShipURLMixin(skel_url)): pass
app = create_app()
heroku = Heroku(app)
app.config['SECRET_KEY'] = unhexlify(env['SECRET_KEY'])
app.config['USER_AGENT_EMAIL'] = 'paxswill@paxswill.com'
app.config['AUTH_METHODS'] = ['evesrp.auth.testauth.TestAuth']
app.config['CORE_AUTH_PRIVATE_KEY'] = env.get('CORE_PRIVATE_KEY')
app.config['CORE_AUTH_PUBLIC_KEY'] = env.get('CORE_PUBLIC_KEY')
app.config['CORE_AUTH_IDENTIFIER'] = env.get('CORE_IDENTIFIER')
app.config['KILLMAIL_SOURCES'] = [EOWikiCREST]
if __name__ == '__main__':
print("Creating databases...")
app.extensions['sqlalchemy'].db.create_all(app=app)
Add the option to enable debug mode with Heroku
Don't do this public servers, use it locally with foreman.
|
#!/usr/bin/env python
from evesrp import create_app
from evesrp.killmail import CRESTMail, ShipURLMixin
import evesrp.auth.testauth
from flask.ext.heroku import Heroku
from os import environ as env
from binascii import unhexlify
skel_url = 'https://wiki.eveonline.com/en/wiki/{name}'
class EOWikiCREST(CRESTMail, ShipURLMixin(skel_url)): pass
app = create_app()
heroku = Heroku(app)
app.config['SECRET_KEY'] = unhexlify(env['SECRET_KEY'])
app.config['USER_AGENT_EMAIL'] = 'paxswill@paxswill.com'
app.config['AUTH_METHODS'] = ['evesrp.auth.testauth.TestAuth']
app.config['CORE_AUTH_PRIVATE_KEY'] = env.get('CORE_PRIVATE_KEY')
app.config['CORE_AUTH_PUBLIC_KEY'] = env.get('CORE_PUBLIC_KEY')
app.config['CORE_AUTH_IDENTIFIER'] = env.get('CORE_IDENTIFIER')
app.config['KILLMAIL_SOURCES'] = [EOWikiCREST]
if env.get('DEBUG') is not None:
app.debug = True
if __name__ == '__main__':
print("Creating databases...")
app.extensions['sqlalchemy'].db.create_all(app=app)
|
<commit_before>#!/usr/bin/env python
from evesrp import create_app
from evesrp.killmail import CRESTMail, ShipURLMixin
import evesrp.auth.testauth
from flask.ext.heroku import Heroku
from os import environ as env
from binascii import unhexlify
skel_url = 'https://wiki.eveonline.com/en/wiki/{name}'
class EOWikiCREST(CRESTMail, ShipURLMixin(skel_url)): pass
app = create_app()
heroku = Heroku(app)
app.config['SECRET_KEY'] = unhexlify(env['SECRET_KEY'])
app.config['USER_AGENT_EMAIL'] = 'paxswill@paxswill.com'
app.config['AUTH_METHODS'] = ['evesrp.auth.testauth.TestAuth']
app.config['CORE_AUTH_PRIVATE_KEY'] = env.get('CORE_PRIVATE_KEY')
app.config['CORE_AUTH_PUBLIC_KEY'] = env.get('CORE_PUBLIC_KEY')
app.config['CORE_AUTH_IDENTIFIER'] = env.get('CORE_IDENTIFIER')
app.config['KILLMAIL_SOURCES'] = [EOWikiCREST]
if __name__ == '__main__':
print("Creating databases...")
app.extensions['sqlalchemy'].db.create_all(app=app)
<commit_msg>Add the option to enable debug mode with Heroku
Don't do this public servers, use it locally with foreman.<commit_after>
|
#!/usr/bin/env python
from evesrp import create_app
from evesrp.killmail import CRESTMail, ShipURLMixin
import evesrp.auth.testauth
from flask.ext.heroku import Heroku
from os import environ as env
from binascii import unhexlify
skel_url = 'https://wiki.eveonline.com/en/wiki/{name}'
class EOWikiCREST(CRESTMail, ShipURLMixin(skel_url)): pass
app = create_app()
heroku = Heroku(app)
app.config['SECRET_KEY'] = unhexlify(env['SECRET_KEY'])
app.config['USER_AGENT_EMAIL'] = 'paxswill@paxswill.com'
app.config['AUTH_METHODS'] = ['evesrp.auth.testauth.TestAuth']
app.config['CORE_AUTH_PRIVATE_KEY'] = env.get('CORE_PRIVATE_KEY')
app.config['CORE_AUTH_PUBLIC_KEY'] = env.get('CORE_PUBLIC_KEY')
app.config['CORE_AUTH_IDENTIFIER'] = env.get('CORE_IDENTIFIER')
app.config['KILLMAIL_SOURCES'] = [EOWikiCREST]
if env.get('DEBUG') is not None:
app.debug = True
if __name__ == '__main__':
print("Creating databases...")
app.extensions['sqlalchemy'].db.create_all(app=app)
|
#!/usr/bin/env python
from evesrp import create_app
from evesrp.killmail import CRESTMail, ShipURLMixin
import evesrp.auth.testauth
from flask.ext.heroku import Heroku
from os import environ as env
from binascii import unhexlify
skel_url = 'https://wiki.eveonline.com/en/wiki/{name}'
class EOWikiCREST(CRESTMail, ShipURLMixin(skel_url)): pass
app = create_app()
heroku = Heroku(app)
app.config['SECRET_KEY'] = unhexlify(env['SECRET_KEY'])
app.config['USER_AGENT_EMAIL'] = 'paxswill@paxswill.com'
app.config['AUTH_METHODS'] = ['evesrp.auth.testauth.TestAuth']
app.config['CORE_AUTH_PRIVATE_KEY'] = env.get('CORE_PRIVATE_KEY')
app.config['CORE_AUTH_PUBLIC_KEY'] = env.get('CORE_PUBLIC_KEY')
app.config['CORE_AUTH_IDENTIFIER'] = env.get('CORE_IDENTIFIER')
app.config['KILLMAIL_SOURCES'] = [EOWikiCREST]
if __name__ == '__main__':
print("Creating databases...")
app.extensions['sqlalchemy'].db.create_all(app=app)
Add the option to enable debug mode with Heroku
Don't do this public servers, use it locally with foreman.#!/usr/bin/env python
from evesrp import create_app
from evesrp.killmail import CRESTMail, ShipURLMixin
import evesrp.auth.testauth
from flask.ext.heroku import Heroku
from os import environ as env
from binascii import unhexlify
skel_url = 'https://wiki.eveonline.com/en/wiki/{name}'
class EOWikiCREST(CRESTMail, ShipURLMixin(skel_url)): pass
app = create_app()
heroku = Heroku(app)
app.config['SECRET_KEY'] = unhexlify(env['SECRET_KEY'])
app.config['USER_AGENT_EMAIL'] = 'paxswill@paxswill.com'
app.config['AUTH_METHODS'] = ['evesrp.auth.testauth.TestAuth']
app.config['CORE_AUTH_PRIVATE_KEY'] = env.get('CORE_PRIVATE_KEY')
app.config['CORE_AUTH_PUBLIC_KEY'] = env.get('CORE_PUBLIC_KEY')
app.config['CORE_AUTH_IDENTIFIER'] = env.get('CORE_IDENTIFIER')
app.config['KILLMAIL_SOURCES'] = [EOWikiCREST]
if env.get('DEBUG') is not None:
app.debug = True
if __name__ == '__main__':
print("Creating databases...")
app.extensions['sqlalchemy'].db.create_all(app=app)
|
<commit_before>#!/usr/bin/env python
from evesrp import create_app
from evesrp.killmail import CRESTMail, ShipURLMixin
import evesrp.auth.testauth
from flask.ext.heroku import Heroku
from os import environ as env
from binascii import unhexlify
skel_url = 'https://wiki.eveonline.com/en/wiki/{name}'
class EOWikiCREST(CRESTMail, ShipURLMixin(skel_url)): pass
app = create_app()
heroku = Heroku(app)
app.config['SECRET_KEY'] = unhexlify(env['SECRET_KEY'])
app.config['USER_AGENT_EMAIL'] = 'paxswill@paxswill.com'
app.config['AUTH_METHODS'] = ['evesrp.auth.testauth.TestAuth']
app.config['CORE_AUTH_PRIVATE_KEY'] = env.get('CORE_PRIVATE_KEY')
app.config['CORE_AUTH_PUBLIC_KEY'] = env.get('CORE_PUBLIC_KEY')
app.config['CORE_AUTH_IDENTIFIER'] = env.get('CORE_IDENTIFIER')
app.config['KILLMAIL_SOURCES'] = [EOWikiCREST]
if __name__ == '__main__':
print("Creating databases...")
app.extensions['sqlalchemy'].db.create_all(app=app)
<commit_msg>Add the option to enable debug mode with Heroku
Don't do this public servers, use it locally with foreman.<commit_after>#!/usr/bin/env python
from evesrp import create_app
from evesrp.killmail import CRESTMail, ShipURLMixin
import evesrp.auth.testauth
from flask.ext.heroku import Heroku
from os import environ as env
from binascii import unhexlify
skel_url = 'https://wiki.eveonline.com/en/wiki/{name}'
class EOWikiCREST(CRESTMail, ShipURLMixin(skel_url)): pass
app = create_app()
heroku = Heroku(app)
app.config['SECRET_KEY'] = unhexlify(env['SECRET_KEY'])
app.config['USER_AGENT_EMAIL'] = 'paxswill@paxswill.com'
app.config['AUTH_METHODS'] = ['evesrp.auth.testauth.TestAuth']
app.config['CORE_AUTH_PRIVATE_KEY'] = env.get('CORE_PRIVATE_KEY')
app.config['CORE_AUTH_PUBLIC_KEY'] = env.get('CORE_PUBLIC_KEY')
app.config['CORE_AUTH_IDENTIFIER'] = env.get('CORE_IDENTIFIER')
app.config['KILLMAIL_SOURCES'] = [EOWikiCREST]
if env.get('DEBUG') is not None:
app.debug = True
if __name__ == '__main__':
print("Creating databases...")
app.extensions['sqlalchemy'].db.create_all(app=app)
|
8c691948069157e02ff00ca5cab427657e36487c
|
lights/patterns/scrollsmooth.py
|
lights/patterns/scrollsmooth.py
|
"""
Smooth scroll pattern
"""
from .pattern import Pattern
import time
import math
# fast linear sin approx
def fastApprox(val):
return 1.0 - math.fabs( math.fmod(val, 2.0) - 1.0)
def constrain_int(value):
return int(min(255, max(value, 0)))
class ScrollSmooth(Pattern):
def __init__(self):
super(Pattern, self).__init__()
@classmethod
def get_id(self):
return 7
@classmethod
def update(self, strip, state):
# logging.info("Smooth")
for x in range(len(strip)):
c1 = [c * (x / float(state.length + 1) + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color1]
# c2 is out of phase by 1
c2 = [c * (x / float(state.length + 1) + 1 + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color2]
c3 = [constrain_int(a + b) for a, b in zip(c1, c2)]
strip[x] = c3
|
"""
Smooth scroll pattern
"""
from .pattern import Pattern
import time
import math
# fast linear sin approx
def fastApprox(val):
return 1.0 - math.fabs( math.fmod(val, 2.0) - 1.0)
def constrain_int(value):
return int(min(255, max(value, 0)))
class ScrollSmooth(Pattern):
def __init__(self):
super(Pattern, self).__init__()
@classmethod
def get_id(self):
return 7
@classmethod
def update(self, strip, state):
# logging.info("Smooth")
for x in range(len(strip)):
c1 = [c * (x / float(state.length + 1) + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color1]
# c2 is out of phase by 1
c2 = [c * (x / float(state.length + 1) + 1 + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color2]
c3 = tuple(constrain_int(a + b) for a, b in zip(c1, c2))
strip[x] = c3
|
Fix issue with smoothscroll pattern
|
Fix issue with smoothscroll pattern
|
Python
|
mit
|
Chris-Johnston/Internet-Xmas-Tree,Chris-Johnston/Internet-Xmas-Tree,Chris-Johnston/Internet-Xmas-Tree,Chris-Johnston/Internet-Xmas-Tree
|
"""
Smooth scroll pattern
"""
from .pattern import Pattern
import time
import math
# fast linear sin approx
def fastApprox(val):
return 1.0 - math.fabs( math.fmod(val, 2.0) - 1.0)
def constrain_int(value):
return int(min(255, max(value, 0)))
class ScrollSmooth(Pattern):
def __init__(self):
super(Pattern, self).__init__()
@classmethod
def get_id(self):
return 7
@classmethod
def update(self, strip, state):
# logging.info("Smooth")
for x in range(len(strip)):
c1 = [c * (x / float(state.length + 1) + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color1]
# c2 is out of phase by 1
c2 = [c * (x / float(state.length + 1) + 1 + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color2]
c3 = [constrain_int(a + b) for a, b in zip(c1, c2)]
strip[x] = c3Fix issue with smoothscroll pattern
|
"""
Smooth scroll pattern
"""
from .pattern import Pattern
import time
import math
# fast linear sin approx
def fastApprox(val):
return 1.0 - math.fabs( math.fmod(val, 2.0) - 1.0)
def constrain_int(value):
return int(min(255, max(value, 0)))
class ScrollSmooth(Pattern):
def __init__(self):
super(Pattern, self).__init__()
@classmethod
def get_id(self):
return 7
@classmethod
def update(self, strip, state):
# logging.info("Smooth")
for x in range(len(strip)):
c1 = [c * (x / float(state.length + 1) + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color1]
# c2 is out of phase by 1
c2 = [c * (x / float(state.length + 1) + 1 + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color2]
c3 = tuple(constrain_int(a + b) for a, b in zip(c1, c2))
strip[x] = c3
|
<commit_before>"""
Smooth scroll pattern
"""
from .pattern import Pattern
import time
import math
# fast linear sin approx
def fastApprox(val):
return 1.0 - math.fabs( math.fmod(val, 2.0) - 1.0)
def constrain_int(value):
return int(min(255, max(value, 0)))
class ScrollSmooth(Pattern):
def __init__(self):
super(Pattern, self).__init__()
@classmethod
def get_id(self):
return 7
@classmethod
def update(self, strip, state):
# logging.info("Smooth")
for x in range(len(strip)):
c1 = [c * (x / float(state.length + 1) + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color1]
# c2 is out of phase by 1
c2 = [c * (x / float(state.length + 1) + 1 + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color2]
c3 = [constrain_int(a + b) for a, b in zip(c1, c2)]
strip[x] = c3<commit_msg>Fix issue with smoothscroll pattern<commit_after>
|
"""
Smooth scroll pattern
"""
from .pattern import Pattern
import time
import math
# fast linear sin approx
def fastApprox(val):
return 1.0 - math.fabs( math.fmod(val, 2.0) - 1.0)
def constrain_int(value):
return int(min(255, max(value, 0)))
class ScrollSmooth(Pattern):
def __init__(self):
super(Pattern, self).__init__()
@classmethod
def get_id(self):
return 7
@classmethod
def update(self, strip, state):
# logging.info("Smooth")
for x in range(len(strip)):
c1 = [c * (x / float(state.length + 1) + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color1]
# c2 is out of phase by 1
c2 = [c * (x / float(state.length + 1) + 1 + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color2]
c3 = tuple(constrain_int(a + b) for a, b in zip(c1, c2))
strip[x] = c3
|
"""
Smooth scroll pattern
"""
from .pattern import Pattern
import time
import math
# fast linear sin approx
def fastApprox(val):
return 1.0 - math.fabs( math.fmod(val, 2.0) - 1.0)
def constrain_int(value):
return int(min(255, max(value, 0)))
class ScrollSmooth(Pattern):
def __init__(self):
super(Pattern, self).__init__()
@classmethod
def get_id(self):
return 7
@classmethod
def update(self, strip, state):
# logging.info("Smooth")
for x in range(len(strip)):
c1 = [c * (x / float(state.length + 1) + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color1]
# c2 is out of phase by 1
c2 = [c * (x / float(state.length + 1) + 1 + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color2]
c3 = [constrain_int(a + b) for a, b in zip(c1, c2)]
strip[x] = c3Fix issue with smoothscroll pattern"""
Smooth scroll pattern
"""
from .pattern import Pattern
import time
import math
# fast linear sin approx
def fastApprox(val):
return 1.0 - math.fabs( math.fmod(val, 2.0) - 1.0)
def constrain_int(value):
return int(min(255, max(value, 0)))
class ScrollSmooth(Pattern):
def __init__(self):
super(Pattern, self).__init__()
@classmethod
def get_id(self):
return 7
@classmethod
def update(self, strip, state):
# logging.info("Smooth")
for x in range(len(strip)):
c1 = [c * (x / float(state.length + 1) + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color1]
# c2 is out of phase by 1
c2 = [c * (x / float(state.length + 1) + 1 + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color2]
c3 = tuple(constrain_int(a + b) for a, b in zip(c1, c2))
strip[x] = c3
|
<commit_before>"""
Smooth scroll pattern
"""
from .pattern import Pattern
import time
import math
# fast linear sin approx
def fastApprox(val):
return 1.0 - math.fabs( math.fmod(val, 2.0) - 1.0)
def constrain_int(value):
return int(min(255, max(value, 0)))
class ScrollSmooth(Pattern):
def __init__(self):
super(Pattern, self).__init__()
@classmethod
def get_id(self):
return 7
@classmethod
def update(self, strip, state):
# logging.info("Smooth")
for x in range(len(strip)):
c1 = [c * (x / float(state.length + 1) + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color1]
# c2 is out of phase by 1
c2 = [c * (x / float(state.length + 1) + 1 + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color2]
c3 = [constrain_int(a + b) for a, b in zip(c1, c2)]
strip[x] = c3<commit_msg>Fix issue with smoothscroll pattern<commit_after>"""
Smooth scroll pattern
"""
from .pattern import Pattern
import time
import math
# fast linear sin approx
def fastApprox(val):
return 1.0 - math.fabs( math.fmod(val, 2.0) - 1.0)
def constrain_int(value):
return int(min(255, max(value, 0)))
class ScrollSmooth(Pattern):
def __init__(self):
super(Pattern, self).__init__()
@classmethod
def get_id(self):
return 7
@classmethod
def update(self, strip, state):
# logging.info("Smooth")
for x in range(len(strip)):
c1 = [c * (x / float(state.length + 1) + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color1]
# c2 is out of phase by 1
c2 = [c * (x / float(state.length + 1) + 1 + float(time.time()) * 1000.0 / float(state.delay)) for c in state.color2]
c3 = tuple(constrain_int(a + b) for a, b in zip(c1, c2))
strip[x] = c3
|
7ef473cf7aa76168509c99e2d09428146cd599c5
|
server.py
|
server.py
|
from flask import Flask, request
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=True)
|
import argparse
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
Add switch (-d or --debug) for debug mode
|
Add switch (-d or --debug) for debug mode
|
Python
|
mit
|
blindstore/blindstore-old-scarab
|
from flask import Flask, request
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=True)
Add switch (-d or --debug) for debug mode
|
import argparse
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
<commit_before>from flask import Flask, request
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Add switch (-d or --debug) for debug mode<commit_after>
|
import argparse
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
from flask import Flask, request
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=True)
Add switch (-d or --debug) for debug modeimport argparse
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
<commit_before>from flask import Flask, request
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Add switch (-d or --debug) for debug mode<commit_after>import argparse
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
15964c974220c88a1b2fbca353d4a11b180e2bd8
|
_launch.py
|
_launch.py
|
from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
def Command(cmd):
def ex(application=''):
# print 'execute', cmd + application
send_command(cmd + application)
return Function(ex)
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
|
from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command, Command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
|
Refactor Command action to dragonglue.command
|
Refactor Command action to dragonglue.command
|
Python
|
mit
|
drocco007/vox_commands
|
from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
def Command(cmd):
def ex(application=''):
# print 'execute', cmd + application
send_command(cmd + application)
return Function(ex)
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
Refactor Command action to dragonglue.command
|
from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command, Command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
|
<commit_before>from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
def Command(cmd):
def ex(application=''):
# print 'execute', cmd + application
send_command(cmd + application)
return Function(ex)
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
<commit_msg>Refactor Command action to dragonglue.command<commit_after>
|
from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command, Command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
|
from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
def Command(cmd):
def ex(application=''):
# print 'execute', cmd + application
send_command(cmd + application)
return Function(ex)
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
Refactor Command action to dragonglue.commandfrom dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command, Command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
|
<commit_before>from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
def Command(cmd):
def ex(application=''):
# print 'execute', cmd + application
send_command(cmd + application)
return Function(ex)
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
<commit_msg>Refactor Command action to dragonglue.command<commit_after>from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command, Command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
|
18fa2a02b073ec0cf7fb82152389c312844b5fda
|
wsgi.py
|
wsgi.py
|
"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
from suddendev import create_app, socketio
app = create_app()
socketio.run(app)
|
"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
import os
from suddendev import create_app, socketio
app = create_app()
port = int(os.environ.get("PORT", 5000))
socketio.run(app, host='0.0.0.0', port=port)
|
Read Heroku port envvar and use if given.
|
[NG] Read Heroku port envvar and use if given.
|
Python
|
mit
|
SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev
|
"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
from suddendev import create_app, socketio
app = create_app()
socketio.run(app)
[NG] Read Heroku port envvar and use if given.
|
"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
import os
from suddendev import create_app, socketio
app = create_app()
port = int(os.environ.get("PORT", 5000))
socketio.run(app, host='0.0.0.0', port=port)
|
<commit_before>"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
from suddendev import create_app, socketio
app = create_app()
socketio.run(app)
<commit_msg>[NG] Read Heroku port envvar and use if given.<commit_after>
|
"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
import os
from suddendev import create_app, socketio
app = create_app()
port = int(os.environ.get("PORT", 5000))
socketio.run(app, host='0.0.0.0', port=port)
|
"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
from suddendev import create_app, socketio
app = create_app()
socketio.run(app)
[NG] Read Heroku port envvar and use if given."""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
import os
from suddendev import create_app, socketio
app = create_app()
port = int(os.environ.get("PORT", 5000))
socketio.run(app, host='0.0.0.0', port=port)
|
<commit_before>"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
from suddendev import create_app, socketio
app = create_app()
socketio.run(app)
<commit_msg>[NG] Read Heroku port envvar and use if given.<commit_after>"""
WSGI script run on Heroku using gunicorn.
Exposes the app and configures it to use Heroku environment vars.
"""
import os
from suddendev import create_app, socketio
app = create_app()
port = int(os.environ.get("PORT", 5000))
socketio.run(app, host='0.0.0.0', port=port)
|
8c1fa5b134bf6f64dca258c087dc01f9e19e6ca4
|
tests/__init__.py
|
tests/__init__.py
|
"""
distutilazy.tests
-----------------
Tests for distutilazy
:license: MIT, see LICENSE for more details.
"""
import os
__all__ = ["test_util", "test_clean"]
for file_ in os.listdir(os.path.dirname(__file__)):
if file_.startswith('test_') and file_.endswith('.py'):
__all__.append(file_.rsplit('.py', 1)[0])
|
"""
distutilazy.tests
-----------------
Tests for distutilazy
:license: MIT, see LICENSE for more details.
"""
from os import path
import glob
test_modules = [path.splitext(path.basename(filename))[0] for filename in glob.glob(path.join(path.dirname(__file__), 'test*.py'))]
__all__ = test_modules
|
Improve detecting available test modules in test package
|
Improve detecting available test modules in test package
By using glob and path.splitext instead of manually iterating over files
in the directory and filtering out files based on their names.
|
Python
|
mit
|
farzadghanei/distutilazy
|
"""
distutilazy.tests
-----------------
Tests for distutilazy
:license: MIT, see LICENSE for more details.
"""
import os
__all__ = ["test_util", "test_clean"]
for file_ in os.listdir(os.path.dirname(__file__)):
if file_.startswith('test_') and file_.endswith('.py'):
__all__.append(file_.rsplit('.py', 1)[0])
Improve detecting available test modules in test package
By using glob and path.splitext instead of manually iterating over files
in the directory and filtering out files based on their names.
|
"""
distutilazy.tests
-----------------
Tests for distutilazy
:license: MIT, see LICENSE for more details.
"""
from os import path
import glob
test_modules = [path.splitext(path.basename(filename))[0] for filename in glob.glob(path.join(path.dirname(__file__), 'test*.py'))]
__all__ = test_modules
|
<commit_before>"""
distutilazy.tests
-----------------
Tests for distutilazy
:license: MIT, see LICENSE for more details.
"""
import os
__all__ = ["test_util", "test_clean"]
for file_ in os.listdir(os.path.dirname(__file__)):
if file_.startswith('test_') and file_.endswith('.py'):
__all__.append(file_.rsplit('.py', 1)[0])
<commit_msg>Improve detecting available test modules in test package
By using glob and path.splitext instead of manually iterating over files
in the directory and filtering out files based on their names.<commit_after>
|
"""
distutilazy.tests
-----------------
Tests for distutilazy
:license: MIT, see LICENSE for more details.
"""
from os import path
import glob
test_modules = [path.splitext(path.basename(filename))[0] for filename in glob.glob(path.join(path.dirname(__file__), 'test*.py'))]
__all__ = test_modules
|
"""
distutilazy.tests
-----------------
Tests for distutilazy
:license: MIT, see LICENSE for more details.
"""
import os
__all__ = ["test_util", "test_clean"]
for file_ in os.listdir(os.path.dirname(__file__)):
if file_.startswith('test_') and file_.endswith('.py'):
__all__.append(file_.rsplit('.py', 1)[0])
Improve detecting available test modules in test package
By using glob and path.splitext instead of manually iterating over files
in the directory and filtering out files based on their names."""
distutilazy.tests
-----------------
Tests for distutilazy
:license: MIT, see LICENSE for more details.
"""
from os import path
import glob
test_modules = [path.splitext(path.basename(filename))[0] for filename in glob.glob(path.join(path.dirname(__file__), 'test*.py'))]
__all__ = test_modules
|
<commit_before>"""
distutilazy.tests
-----------------
Tests for distutilazy
:license: MIT, see LICENSE for more details.
"""
import os
__all__ = ["test_util", "test_clean"]
for file_ in os.listdir(os.path.dirname(__file__)):
if file_.startswith('test_') and file_.endswith('.py'):
__all__.append(file_.rsplit('.py', 1)[0])
<commit_msg>Improve detecting available test modules in test package
By using glob and path.splitext instead of manually iterating over files
in the directory and filtering out files based on their names.<commit_after>"""
distutilazy.tests
-----------------
Tests for distutilazy
:license: MIT, see LICENSE for more details.
"""
from os import path
import glob
test_modules = [path.splitext(path.basename(filename))[0] for filename in glob.glob(path.join(path.dirname(__file__), 'test*.py'))]
__all__ = test_modules
|
76e1565200dda04e4091be761c737042f9a15e67
|
synapse/media/v1/__init__.py
|
synapse/media/v1/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import PIL.Image
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
)
except Exception:
# any other exception is fine
pass
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
)
except Exception:
# any other exception is fine
pass
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import PIL.Image
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
" pip install pillow --user'"
)
except Exception:
# any other exception is fine
pass
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
" pip install pillow --user'"
)
except Exception:
# any other exception is fine
pass
|
Change error message for missing pillow libs.
|
Change error message for missing pillow libs.
|
Python
|
apache-2.0
|
illicitonion/synapse,howethomas/synapse,iot-factory/synapse,TribeMedia/synapse,matrix-org/synapse,iot-factory/synapse,illicitonion/synapse,matrix-org/synapse,matrix-org/synapse,rzr/synapse,howethomas/synapse,rzr/synapse,howethomas/synapse,howethomas/synapse,illicitonion/synapse,illicitonion/synapse,rzr/synapse,matrix-org/synapse,matrix-org/synapse,rzr/synapse,howethomas/synapse,TribeMedia/synapse,TribeMedia/synapse,illicitonion/synapse,iot-factory/synapse,TribeMedia/synapse,iot-factory/synapse,rzr/synapse,matrix-org/synapse,iot-factory/synapse,TribeMedia/synapse
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import PIL.Image
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
)
except Exception:
# any other exception is fine
pass
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
)
except Exception:
# any other exception is fine
pass
Change error message for missing pillow libs.
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import PIL.Image
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
" pip install pillow --user'"
)
except Exception:
# any other exception is fine
pass
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
" pip install pillow --user'"
)
except Exception:
# any other exception is fine
pass
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import PIL.Image
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
)
except Exception:
# any other exception is fine
pass
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
)
except Exception:
# any other exception is fine
pass
<commit_msg>Change error message for missing pillow libs.<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import PIL.Image
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
" pip install pillow --user'"
)
except Exception:
# any other exception is fine
pass
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
" pip install pillow --user'"
)
except Exception:
# any other exception is fine
pass
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import PIL.Image
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
)
except Exception:
# any other exception is fine
pass
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
)
except Exception:
# any other exception is fine
pass
Change error message for missing pillow libs.# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import PIL.Image
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
" pip install pillow --user'"
)
except Exception:
# any other exception is fine
pass
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
" pip install pillow --user'"
)
except Exception:
# any other exception is fine
pass
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import PIL.Image
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
)
except Exception:
# any other exception is fine
pass
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
)
except Exception:
# any other exception is fine
pass
<commit_msg>Change error message for missing pillow libs.<commit_after># -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import PIL.Image
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
" pip install pillow --user'"
)
except Exception:
# any other exception is fine
pass
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
" pip install pillow --user'"
)
except Exception:
# any other exception is fine
pass
|
e8bcd56727199de75a0dcefe7590d3866a14f39d
|
django_mailbox/tests/test_mailbox.py
|
django_mailbox/tests/test_mailbox.py
|
from django.test import TestCase
from django_mailbox.models import Mailbox
__all__ = ['TestMailbox']
class TestMailbox(TestCase):
def test_protocol_info(self):
mailbox = Mailbox()
mailbox.uri = 'alpha://test.com'
expected_protocol = 'alpha'
actual_protocol = mailbox._protocol_info.scheme
self.assertEqual(
expected_protocol,
actual_protocol,
)
|
import os
from django.test import TestCase
from django_mailbox.models import Mailbox
__all__ = ['TestMailbox']
class TestMailbox(TestCase):
def test_protocol_info(self):
mailbox = Mailbox()
mailbox.uri = 'alpha://test.com'
expected_protocol = 'alpha'
actual_protocol = mailbox._protocol_info.scheme
self.assertEqual(
expected_protocol,
actual_protocol,
)
def test_last_polling_field_exists(self):
mailbox = Mailbox()
self.assertTrue(hasattr(mailbox, 'last_polling'))
def test_get_new_mail_update_last_polling(self):
mailbox = Mailbox.objects.create(uri="mbox://" + os.path.join(
os.path.dirname(__file__),
'messages',
'generic_message.eml',
))
self.assertEqual(mailbox.last_polling, None)
mailbox.get_new_mail()
self.assertNotEqual(mailbox.last_polling, None)
|
Add tests to update Mailbox.last_polling
|
Add tests to update Mailbox.last_polling
|
Python
|
mit
|
coddingtonbear/django-mailbox,ad-m/django-mailbox
|
from django.test import TestCase
from django_mailbox.models import Mailbox
__all__ = ['TestMailbox']
class TestMailbox(TestCase):
def test_protocol_info(self):
mailbox = Mailbox()
mailbox.uri = 'alpha://test.com'
expected_protocol = 'alpha'
actual_protocol = mailbox._protocol_info.scheme
self.assertEqual(
expected_protocol,
actual_protocol,
)
Add tests to update Mailbox.last_polling
|
import os
from django.test import TestCase
from django_mailbox.models import Mailbox
__all__ = ['TestMailbox']
class TestMailbox(TestCase):
def test_protocol_info(self):
mailbox = Mailbox()
mailbox.uri = 'alpha://test.com'
expected_protocol = 'alpha'
actual_protocol = mailbox._protocol_info.scheme
self.assertEqual(
expected_protocol,
actual_protocol,
)
def test_last_polling_field_exists(self):
mailbox = Mailbox()
self.assertTrue(hasattr(mailbox, 'last_polling'))
def test_get_new_mail_update_last_polling(self):
mailbox = Mailbox.objects.create(uri="mbox://" + os.path.join(
os.path.dirname(__file__),
'messages',
'generic_message.eml',
))
self.assertEqual(mailbox.last_polling, None)
mailbox.get_new_mail()
self.assertNotEqual(mailbox.last_polling, None)
|
<commit_before>from django.test import TestCase
from django_mailbox.models import Mailbox
__all__ = ['TestMailbox']
class TestMailbox(TestCase):
def test_protocol_info(self):
mailbox = Mailbox()
mailbox.uri = 'alpha://test.com'
expected_protocol = 'alpha'
actual_protocol = mailbox._protocol_info.scheme
self.assertEqual(
expected_protocol,
actual_protocol,
)
<commit_msg>Add tests to update Mailbox.last_polling<commit_after>
|
import os
from django.test import TestCase
from django_mailbox.models import Mailbox
__all__ = ['TestMailbox']
class TestMailbox(TestCase):
def test_protocol_info(self):
mailbox = Mailbox()
mailbox.uri = 'alpha://test.com'
expected_protocol = 'alpha'
actual_protocol = mailbox._protocol_info.scheme
self.assertEqual(
expected_protocol,
actual_protocol,
)
def test_last_polling_field_exists(self):
mailbox = Mailbox()
self.assertTrue(hasattr(mailbox, 'last_polling'))
def test_get_new_mail_update_last_polling(self):
mailbox = Mailbox.objects.create(uri="mbox://" + os.path.join(
os.path.dirname(__file__),
'messages',
'generic_message.eml',
))
self.assertEqual(mailbox.last_polling, None)
mailbox.get_new_mail()
self.assertNotEqual(mailbox.last_polling, None)
|
from django.test import TestCase
from django_mailbox.models import Mailbox
__all__ = ['TestMailbox']
class TestMailbox(TestCase):
def test_protocol_info(self):
mailbox = Mailbox()
mailbox.uri = 'alpha://test.com'
expected_protocol = 'alpha'
actual_protocol = mailbox._protocol_info.scheme
self.assertEqual(
expected_protocol,
actual_protocol,
)
Add tests to update Mailbox.last_pollingimport os
from django.test import TestCase
from django_mailbox.models import Mailbox
__all__ = ['TestMailbox']
class TestMailbox(TestCase):
def test_protocol_info(self):
mailbox = Mailbox()
mailbox.uri = 'alpha://test.com'
expected_protocol = 'alpha'
actual_protocol = mailbox._protocol_info.scheme
self.assertEqual(
expected_protocol,
actual_protocol,
)
def test_last_polling_field_exists(self):
mailbox = Mailbox()
self.assertTrue(hasattr(mailbox, 'last_polling'))
def test_get_new_mail_update_last_polling(self):
mailbox = Mailbox.objects.create(uri="mbox://" + os.path.join(
os.path.dirname(__file__),
'messages',
'generic_message.eml',
))
self.assertEqual(mailbox.last_polling, None)
mailbox.get_new_mail()
self.assertNotEqual(mailbox.last_polling, None)
|
<commit_before>from django.test import TestCase
from django_mailbox.models import Mailbox
__all__ = ['TestMailbox']
class TestMailbox(TestCase):
def test_protocol_info(self):
mailbox = Mailbox()
mailbox.uri = 'alpha://test.com'
expected_protocol = 'alpha'
actual_protocol = mailbox._protocol_info.scheme
self.assertEqual(
expected_protocol,
actual_protocol,
)
<commit_msg>Add tests to update Mailbox.last_polling<commit_after>import os
from django.test import TestCase
from django_mailbox.models import Mailbox
__all__ = ['TestMailbox']
class TestMailbox(TestCase):
def test_protocol_info(self):
mailbox = Mailbox()
mailbox.uri = 'alpha://test.com'
expected_protocol = 'alpha'
actual_protocol = mailbox._protocol_info.scheme
self.assertEqual(
expected_protocol,
actual_protocol,
)
def test_last_polling_field_exists(self):
mailbox = Mailbox()
self.assertTrue(hasattr(mailbox, 'last_polling'))
def test_get_new_mail_update_last_polling(self):
mailbox = Mailbox.objects.create(uri="mbox://" + os.path.join(
os.path.dirname(__file__),
'messages',
'generic_message.eml',
))
self.assertEqual(mailbox.last_polling, None)
mailbox.get_new_mail()
self.assertNotEqual(mailbox.last_polling, None)
|
ccf9e48cf874e7970c5b2e587e797a0501483139
|
spec/data/anagram_index_spec.py
|
spec/data/anagram_index_spec.py
|
from data import anagram_index, warehouse
from spec.mamba import *
with description('anagram_index'):
with before.all:
self.subject = anagram_index.AnagramIndex(warehouse.get('/words/unigram'))
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
import collections
from data import anagram_index
from spec.data.fixtures import tries
from spec.mamba import *
with description('anagram_index'):
with before.all:
words = collections.OrderedDict(tries.kitchen_sink_data())
self.subject = anagram_index.AnagramIndex(words)
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
Update anagram index spec data source.
|
Update anagram index spec data source.
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
from data import anagram_index, warehouse
from spec.mamba import *
with description('anagram_index'):
with before.all:
self.subject = anagram_index.AnagramIndex(warehouse.get('/words/unigram'))
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
Update anagram index spec data source.
|
import collections
from data import anagram_index
from spec.data.fixtures import tries
from spec.mamba import *
with description('anagram_index'):
with before.all:
words = collections.OrderedDict(tries.kitchen_sink_data())
self.subject = anagram_index.AnagramIndex(words)
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
<commit_before>from data import anagram_index, warehouse
from spec.mamba import *
with description('anagram_index'):
with before.all:
self.subject = anagram_index.AnagramIndex(warehouse.get('/words/unigram'))
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
<commit_msg>Update anagram index spec data source.<commit_after>
|
import collections
from data import anagram_index
from spec.data.fixtures import tries
from spec.mamba import *
with description('anagram_index'):
with before.all:
words = collections.OrderedDict(tries.kitchen_sink_data())
self.subject = anagram_index.AnagramIndex(words)
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
from data import anagram_index, warehouse
from spec.mamba import *
with description('anagram_index'):
with before.all:
self.subject = anagram_index.AnagramIndex(warehouse.get('/words/unigram'))
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
Update anagram index spec data source.import collections
from data import anagram_index
from spec.data.fixtures import tries
from spec.mamba import *
with description('anagram_index'):
with before.all:
words = collections.OrderedDict(tries.kitchen_sink_data())
self.subject = anagram_index.AnagramIndex(words)
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
<commit_before>from data import anagram_index, warehouse
from spec.mamba import *
with description('anagram_index'):
with before.all:
self.subject = anagram_index.AnagramIndex(warehouse.get('/words/unigram'))
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
<commit_msg>Update anagram index spec data source.<commit_after>import collections
from data import anagram_index
from spec.data.fixtures import tries
from spec.mamba import *
with description('anagram_index'):
with before.all:
words = collections.OrderedDict(tries.kitchen_sink_data())
self.subject = anagram_index.AnagramIndex(words)
with it('instantiates'):
expect(len(self.subject)).to(be_above(0))
with it('accepts pre-sort-jumbled anagrams'):
expect(self.subject).to(have_key('low'))
with it('accepts anti-sort-jumbled anagrams'):
expect(self.subject).to(have_key('wlo'))
with it('returns multiple matches'):
expect(self.subject['snap']).to(equal(['snap', 'naps']))
|
5a885124432ccb33d180a8e73c753ceab54ffdf5
|
src/Itemizers.py
|
src/Itemizers.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
def iconForName(klass, name):
"""Return the NSImage instance representing a `name` item."""
imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
haskellBundleIdentifier = 'org.purl.net.mkhl.haskell'
def iconForName(name):
"""Return the NSImage instance representing a `name` item."""
bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)
imgpath = bundle.pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName('module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName('type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
|
Simplify the icon finder function.
|
Simplify the icon finder function.
We statically know our bundle identifier, so we don’t have too find the bundle by runtime class.
|
Python
|
mit
|
mkhl/haskell.sugar
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
def iconForName(klass, name):
"""Return the NSImage instance representing a `name` item."""
imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
Simplify the icon finder function.
We statically know our bundle identifier, so we don’t have too find the bundle by runtime class.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
haskellBundleIdentifier = 'org.purl.net.mkhl.haskell'
def iconForName(name):
"""Return the NSImage instance representing a `name` item."""
bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)
imgpath = bundle.pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName('module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName('type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
def iconForName(klass, name):
"""Return the NSImage instance representing a `name` item."""
imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
<commit_msg>Simplify the icon finder function.
We statically know our bundle identifier, so we don’t have too find the bundle by runtime class.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
haskellBundleIdentifier = 'org.purl.net.mkhl.haskell'
def iconForName(name):
"""Return the NSImage instance representing a `name` item."""
bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)
imgpath = bundle.pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName('module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName('type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
def iconForName(klass, name):
"""Return the NSImage instance representing a `name` item."""
imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
Simplify the icon finder function.
We statically know our bundle identifier, so we don’t have too find the bundle by runtime class.#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
haskellBundleIdentifier = 'org.purl.net.mkhl.haskell'
def iconForName(name):
"""Return the NSImage instance representing a `name` item."""
bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)
imgpath = bundle.pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName('module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName('type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
def iconForName(klass, name):
"""Return the NSImage instance representing a `name` item."""
imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
<commit_msg>Simplify the icon finder function.
We statically know our bundle identifier, so we don’t have too find the bundle by runtime class.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
haskellBundleIdentifier = 'org.purl.net.mkhl.haskell'
def iconForName(name):
"""Return the NSImage instance representing a `name` item."""
bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)
imgpath = bundle.pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName('module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName('type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
|
08cb044062abbc6442d687bdae8bd49cfb7b2d9f
|
test/test_obj.py
|
test/test_obj.py
|
import os
import numpy
import pytest
import helpers
import meshio
@pytest.mark.parametrize(
"mesh", [helpers.tri_mesh, helpers.quad_mesh, helpers.tri_quad_mesh]
)
def test_ply(mesh):
def writer(*args, **kwargs):
return meshio._obj.write(*args, **kwargs)
for key in mesh.cells:
mesh.cells[key] = mesh.cells[key].astype(numpy.int32)
helpers.write_read(writer, meshio._obj.read, mesh, 1.0e-12)
return
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells", [("elephav.obj", 3.678372172450000e05, 1148)]
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "meshes", "obj", filename)
mesh = meshio.read(filename)
tol = 1.0e-5
s = numpy.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.cells["triangle"]) == ref_num_cells
return
|
import os
import numpy
import pytest
import helpers
import meshio
@pytest.mark.parametrize(
"mesh", [helpers.tri_mesh, helpers.quad_mesh, helpers.tri_quad_mesh]
)
def test_obj(mesh):
def writer(*args, **kwargs):
return meshio._obj.write(*args, **kwargs)
for key in mesh.cells:
mesh.cells[key] = mesh.cells[key].astype(numpy.int32)
helpers.write_read(writer, meshio._obj.read, mesh, 1.0e-12)
return
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells", [("elephav.obj", 3.678372172450000e05, 1148)]
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "meshes", "obj", filename)
mesh = meshio.read(filename)
tol = 1.0e-5
s = numpy.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.cells["triangle"]) == ref_num_cells
return
|
Correct typo in test name
|
Correct typo in test name
|
Python
|
mit
|
nschloe/meshio
|
import os
import numpy
import pytest
import helpers
import meshio
@pytest.mark.parametrize(
"mesh", [helpers.tri_mesh, helpers.quad_mesh, helpers.tri_quad_mesh]
)
def test_ply(mesh):
def writer(*args, **kwargs):
return meshio._obj.write(*args, **kwargs)
for key in mesh.cells:
mesh.cells[key] = mesh.cells[key].astype(numpy.int32)
helpers.write_read(writer, meshio._obj.read, mesh, 1.0e-12)
return
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells", [("elephav.obj", 3.678372172450000e05, 1148)]
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "meshes", "obj", filename)
mesh = meshio.read(filename)
tol = 1.0e-5
s = numpy.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.cells["triangle"]) == ref_num_cells
return
Correct typo in test name
|
import os
import numpy
import pytest
import helpers
import meshio
@pytest.mark.parametrize(
"mesh", [helpers.tri_mesh, helpers.quad_mesh, helpers.tri_quad_mesh]
)
def test_obj(mesh):
def writer(*args, **kwargs):
return meshio._obj.write(*args, **kwargs)
for key in mesh.cells:
mesh.cells[key] = mesh.cells[key].astype(numpy.int32)
helpers.write_read(writer, meshio._obj.read, mesh, 1.0e-12)
return
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells", [("elephav.obj", 3.678372172450000e05, 1148)]
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "meshes", "obj", filename)
mesh = meshio.read(filename)
tol = 1.0e-5
s = numpy.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.cells["triangle"]) == ref_num_cells
return
|
<commit_before>import os
import numpy
import pytest
import helpers
import meshio
@pytest.mark.parametrize(
"mesh", [helpers.tri_mesh, helpers.quad_mesh, helpers.tri_quad_mesh]
)
def test_ply(mesh):
def writer(*args, **kwargs):
return meshio._obj.write(*args, **kwargs)
for key in mesh.cells:
mesh.cells[key] = mesh.cells[key].astype(numpy.int32)
helpers.write_read(writer, meshio._obj.read, mesh, 1.0e-12)
return
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells", [("elephav.obj", 3.678372172450000e05, 1148)]
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "meshes", "obj", filename)
mesh = meshio.read(filename)
tol = 1.0e-5
s = numpy.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.cells["triangle"]) == ref_num_cells
return
<commit_msg>Correct typo in test name<commit_after>
|
import os
import numpy
import pytest
import helpers
import meshio
@pytest.mark.parametrize(
"mesh", [helpers.tri_mesh, helpers.quad_mesh, helpers.tri_quad_mesh]
)
def test_obj(mesh):
def writer(*args, **kwargs):
return meshio._obj.write(*args, **kwargs)
for key in mesh.cells:
mesh.cells[key] = mesh.cells[key].astype(numpy.int32)
helpers.write_read(writer, meshio._obj.read, mesh, 1.0e-12)
return
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells", [("elephav.obj", 3.678372172450000e05, 1148)]
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "meshes", "obj", filename)
mesh = meshio.read(filename)
tol = 1.0e-5
s = numpy.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.cells["triangle"]) == ref_num_cells
return
|
import os
import numpy
import pytest
import helpers
import meshio
@pytest.mark.parametrize(
"mesh", [helpers.tri_mesh, helpers.quad_mesh, helpers.tri_quad_mesh]
)
def test_ply(mesh):
def writer(*args, **kwargs):
return meshio._obj.write(*args, **kwargs)
for key in mesh.cells:
mesh.cells[key] = mesh.cells[key].astype(numpy.int32)
helpers.write_read(writer, meshio._obj.read, mesh, 1.0e-12)
return
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells", [("elephav.obj", 3.678372172450000e05, 1148)]
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "meshes", "obj", filename)
mesh = meshio.read(filename)
tol = 1.0e-5
s = numpy.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.cells["triangle"]) == ref_num_cells
return
Correct typo in test nameimport os
import numpy
import pytest
import helpers
import meshio
@pytest.mark.parametrize(
"mesh", [helpers.tri_mesh, helpers.quad_mesh, helpers.tri_quad_mesh]
)
def test_obj(mesh):
def writer(*args, **kwargs):
return meshio._obj.write(*args, **kwargs)
for key in mesh.cells:
mesh.cells[key] = mesh.cells[key].astype(numpy.int32)
helpers.write_read(writer, meshio._obj.read, mesh, 1.0e-12)
return
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells", [("elephav.obj", 3.678372172450000e05, 1148)]
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "meshes", "obj", filename)
mesh = meshio.read(filename)
tol = 1.0e-5
s = numpy.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.cells["triangle"]) == ref_num_cells
return
|
<commit_before>import os
import numpy
import pytest
import helpers
import meshio
@pytest.mark.parametrize(
"mesh", [helpers.tri_mesh, helpers.quad_mesh, helpers.tri_quad_mesh]
)
def test_ply(mesh):
def writer(*args, **kwargs):
return meshio._obj.write(*args, **kwargs)
for key in mesh.cells:
mesh.cells[key] = mesh.cells[key].astype(numpy.int32)
helpers.write_read(writer, meshio._obj.read, mesh, 1.0e-12)
return
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells", [("elephav.obj", 3.678372172450000e05, 1148)]
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "meshes", "obj", filename)
mesh = meshio.read(filename)
tol = 1.0e-5
s = numpy.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.cells["triangle"]) == ref_num_cells
return
<commit_msg>Correct typo in test name<commit_after>import os
import numpy
import pytest
import helpers
import meshio
@pytest.mark.parametrize(
"mesh", [helpers.tri_mesh, helpers.quad_mesh, helpers.tri_quad_mesh]
)
def test_obj(mesh):
def writer(*args, **kwargs):
return meshio._obj.write(*args, **kwargs)
for key in mesh.cells:
mesh.cells[key] = mesh.cells[key].astype(numpy.int32)
helpers.write_read(writer, meshio._obj.read, mesh, 1.0e-12)
return
@pytest.mark.parametrize(
"filename, ref_sum, ref_num_cells", [("elephav.obj", 3.678372172450000e05, 1148)]
)
def test_reference_file(filename, ref_sum, ref_num_cells):
this_dir = os.path.dirname(os.path.abspath(__file__))
filename = os.path.join(this_dir, "meshes", "obj", filename)
mesh = meshio.read(filename)
tol = 1.0e-5
s = numpy.sum(mesh.points)
assert abs(s - ref_sum) < tol * abs(ref_sum)
assert len(mesh.cells["triangle"]) == ref_num_cells
return
|
92a3a4522968d170a8d19649bd6848187736f8f4
|
src/DeltaDetector.py
|
src/DeltaDetector.py
|
import numpy as N
import gtk.gdk
class DeltaDetector(object):
def __init__(self, active=False, threshold=20.0):
self._previous_frame = None
self._frame = None
self.active = active
self.threshold = threshold
def send_frame(self, frame):
self._previous_frame = self._frame
self._frame = N.array(frame, dtype=float)
if not self.active:
return
if self._previous_frame is None:
return
if(self._previous_frame.shape != self._frame.shape):
self._previous_frame = None
return
if N.max(N.abs(self._frame - self._previous_frame)) > self.threshold:
gtk.gdk.beep()
# Properties
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = bool(value)
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
@property
def average(self):
if self._frame is None or self._previous_frame is None:
return 0.0
return N.mean(self._frame - self._previous_frame)
|
import numpy as N
import gobject
import gtk.gdk
class DeltaDetector(object):
def __init__(self, active=False, threshold=20.0):
self._previous_frame = None
self._frame = None
self.active = active
self.threshold = threshold
self._timed_out = False
def send_frame(self, frame):
self._previous_frame = self._frame
self._frame = N.array(frame, dtype=float)
if self._timed_out:
return
if not self.active:
return
if self._previous_frame is None:
return
if(self._previous_frame.shape != self._frame.shape):
self._previous_frame = None
return
if N.max(N.abs(self._frame - self._previous_frame)) > self.threshold:
gtk.gdk.beep()
# Don't beep more than once per second
self._timed_out = True
gobject.timeout_add(1000, self._switch_on_timeout)
def _switch_on_timeout(self):
self._timed_out = False
return False
# Properties
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = bool(value)
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
@property
def average(self):
if self._frame is None or self._previous_frame is None:
return 0.0
return N.mean(self._frame - self._previous_frame)
|
Add a timeout to the delta detector
|
Add a timeout to the delta detector
Make it so that the detector doesn't beep more than
once per second. It would be even better if the beeping
occurred in another thread...
|
Python
|
mit
|
ptomato/Beams
|
import numpy as N
import gtk.gdk
class DeltaDetector(object):
def __init__(self, active=False, threshold=20.0):
self._previous_frame = None
self._frame = None
self.active = active
self.threshold = threshold
def send_frame(self, frame):
self._previous_frame = self._frame
self._frame = N.array(frame, dtype=float)
if not self.active:
return
if self._previous_frame is None:
return
if(self._previous_frame.shape != self._frame.shape):
self._previous_frame = None
return
if N.max(N.abs(self._frame - self._previous_frame)) > self.threshold:
gtk.gdk.beep()
# Properties
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = bool(value)
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
@property
def average(self):
if self._frame is None or self._previous_frame is None:
return 0.0
return N.mean(self._frame - self._previous_frame)
Add a timeout to the delta detector
Make it so that the detector doesn't beep more than
once per second. It would be even better if the beeping
occurred in another thread...
|
import numpy as N
import gobject
import gtk.gdk
class DeltaDetector(object):
def __init__(self, active=False, threshold=20.0):
self._previous_frame = None
self._frame = None
self.active = active
self.threshold = threshold
self._timed_out = False
def send_frame(self, frame):
self._previous_frame = self._frame
self._frame = N.array(frame, dtype=float)
if self._timed_out:
return
if not self.active:
return
if self._previous_frame is None:
return
if(self._previous_frame.shape != self._frame.shape):
self._previous_frame = None
return
if N.max(N.abs(self._frame - self._previous_frame)) > self.threshold:
gtk.gdk.beep()
# Don't beep more than once per second
self._timed_out = True
gobject.timeout_add(1000, self._switch_on_timeout)
def _switch_on_timeout(self):
self._timed_out = False
return False
# Properties
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = bool(value)
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
@property
def average(self):
if self._frame is None or self._previous_frame is None:
return 0.0
return N.mean(self._frame - self._previous_frame)
|
<commit_before>import numpy as N
import gtk.gdk
class DeltaDetector(object):
def __init__(self, active=False, threshold=20.0):
self._previous_frame = None
self._frame = None
self.active = active
self.threshold = threshold
def send_frame(self, frame):
self._previous_frame = self._frame
self._frame = N.array(frame, dtype=float)
if not self.active:
return
if self._previous_frame is None:
return
if(self._previous_frame.shape != self._frame.shape):
self._previous_frame = None
return
if N.max(N.abs(self._frame - self._previous_frame)) > self.threshold:
gtk.gdk.beep()
# Properties
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = bool(value)
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
@property
def average(self):
if self._frame is None or self._previous_frame is None:
return 0.0
return N.mean(self._frame - self._previous_frame)
<commit_msg>Add a timeout to the delta detector
Make it so that the detector doesn't beep more than
once per second. It would be even better if the beeping
occurred in another thread...<commit_after>
|
import numpy as N
import gobject
import gtk.gdk
class DeltaDetector(object):
def __init__(self, active=False, threshold=20.0):
self._previous_frame = None
self._frame = None
self.active = active
self.threshold = threshold
self._timed_out = False
def send_frame(self, frame):
self._previous_frame = self._frame
self._frame = N.array(frame, dtype=float)
if self._timed_out:
return
if not self.active:
return
if self._previous_frame is None:
return
if(self._previous_frame.shape != self._frame.shape):
self._previous_frame = None
return
if N.max(N.abs(self._frame - self._previous_frame)) > self.threshold:
gtk.gdk.beep()
# Don't beep more than once per second
self._timed_out = True
gobject.timeout_add(1000, self._switch_on_timeout)
def _switch_on_timeout(self):
self._timed_out = False
return False
# Properties
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = bool(value)
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
@property
def average(self):
if self._frame is None or self._previous_frame is None:
return 0.0
return N.mean(self._frame - self._previous_frame)
|
import numpy as N
import gtk.gdk
class DeltaDetector(object):
def __init__(self, active=False, threshold=20.0):
self._previous_frame = None
self._frame = None
self.active = active
self.threshold = threshold
def send_frame(self, frame):
self._previous_frame = self._frame
self._frame = N.array(frame, dtype=float)
if not self.active:
return
if self._previous_frame is None:
return
if(self._previous_frame.shape != self._frame.shape):
self._previous_frame = None
return
if N.max(N.abs(self._frame - self._previous_frame)) > self.threshold:
gtk.gdk.beep()
# Properties
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = bool(value)
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
@property
def average(self):
if self._frame is None or self._previous_frame is None:
return 0.0
return N.mean(self._frame - self._previous_frame)
Add a timeout to the delta detector
Make it so that the detector doesn't beep more than
once per second. It would be even better if the beeping
occurred in another thread...import numpy as N
import gobject
import gtk.gdk
class DeltaDetector(object):
def __init__(self, active=False, threshold=20.0):
self._previous_frame = None
self._frame = None
self.active = active
self.threshold = threshold
self._timed_out = False
def send_frame(self, frame):
self._previous_frame = self._frame
self._frame = N.array(frame, dtype=float)
if self._timed_out:
return
if not self.active:
return
if self._previous_frame is None:
return
if(self._previous_frame.shape != self._frame.shape):
self._previous_frame = None
return
if N.max(N.abs(self._frame - self._previous_frame)) > self.threshold:
gtk.gdk.beep()
# Don't beep more than once per second
self._timed_out = True
gobject.timeout_add(1000, self._switch_on_timeout)
def _switch_on_timeout(self):
self._timed_out = False
return False
# Properties
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = bool(value)
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
@property
def average(self):
if self._frame is None or self._previous_frame is None:
return 0.0
return N.mean(self._frame - self._previous_frame)
|
<commit_before>import numpy as N
import gtk.gdk
class DeltaDetector(object):
def __init__(self, active=False, threshold=20.0):
self._previous_frame = None
self._frame = None
self.active = active
self.threshold = threshold
def send_frame(self, frame):
self._previous_frame = self._frame
self._frame = N.array(frame, dtype=float)
if not self.active:
return
if self._previous_frame is None:
return
if(self._previous_frame.shape != self._frame.shape):
self._previous_frame = None
return
if N.max(N.abs(self._frame - self._previous_frame)) > self.threshold:
gtk.gdk.beep()
# Properties
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = bool(value)
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
@property
def average(self):
if self._frame is None or self._previous_frame is None:
return 0.0
return N.mean(self._frame - self._previous_frame)
<commit_msg>Add a timeout to the delta detector
Make it so that the detector doesn't beep more than
once per second. It would be even better if the beeping
occurred in another thread...<commit_after>import numpy as N
import gobject
import gtk.gdk
class DeltaDetector(object):
def __init__(self, active=False, threshold=20.0):
self._previous_frame = None
self._frame = None
self.active = active
self.threshold = threshold
self._timed_out = False
def send_frame(self, frame):
self._previous_frame = self._frame
self._frame = N.array(frame, dtype=float)
if self._timed_out:
return
if not self.active:
return
if self._previous_frame is None:
return
if(self._previous_frame.shape != self._frame.shape):
self._previous_frame = None
return
if N.max(N.abs(self._frame - self._previous_frame)) > self.threshold:
gtk.gdk.beep()
# Don't beep more than once per second
self._timed_out = True
gobject.timeout_add(1000, self._switch_on_timeout)
def _switch_on_timeout(self):
self._timed_out = False
return False
# Properties
@property
def active(self):
return self._active
@active.setter
def active(self, value):
self._active = bool(value)
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = float(value)
@property
def average(self):
if self._frame is None or self._previous_frame is None:
return 0.0
return N.mean(self._frame - self._previous_frame)
|
8eccb87791ca608be4508fee80f2de9454e4403c
|
pytask/urls.py
|
pytask/urls.py
|
from django.conf import settings
from django.conf.urls.defaults import *
from registration.views import register
from pytask.profile.forms import CustomRegistrationForm
from pytask.views import home_page
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^$', home_page),
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', register,
{'form_class': CustomRegistrationForm,
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_register'),
(r'^accounts/', include('registration.urls')),
(r'^profile/', include('pytask.profile.urls')),
(r'^task/', include('pytask.taskapp.urls')),
)
# Serve static files in DEVELOPMENT = True mode
if settings.DEVELOPMENT:
urlpatterns += patterns('',
(r'^pytask/media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^pytask/static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
|
from django.conf import settings
from django.conf.urls.defaults import *
from django.contrib import admin
from registration.views import register
from pytask.profile.forms import CustomRegistrationForm
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'pytask.views.home_page', name='home_page'),
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', register,
{'form_class': CustomRegistrationForm,
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_register'),
(r'^accounts/', include('registration.urls')),
(r'^profile/', include('pytask.profile.urls')),
(r'^task/', include('pytask.taskapp.urls')),
)
# Serve static files in DEVELOPMENT = True mode
if settings.DEVELOPMENT:
urlpatterns += patterns('',
(r'^pytask/media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^pytask/static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
|
Modify the home page URL mapper to be consistent with other mappers.
|
Modify the home page URL mapper to be consistent with other mappers.
|
Python
|
agpl-3.0
|
madhusudancs/pytask,madhusudancs/pytask,madhusudancs/pytask
|
from django.conf import settings
from django.conf.urls.defaults import *
from registration.views import register
from pytask.profile.forms import CustomRegistrationForm
from pytask.views import home_page
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^$', home_page),
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', register,
{'form_class': CustomRegistrationForm,
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_register'),
(r'^accounts/', include('registration.urls')),
(r'^profile/', include('pytask.profile.urls')),
(r'^task/', include('pytask.taskapp.urls')),
)
# Serve static files in DEVELOPMENT = True mode
if settings.DEVELOPMENT:
urlpatterns += patterns('',
(r'^pytask/media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^pytask/static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
Modify the home page URL mapper to be consistent with other mappers.
|
from django.conf import settings
from django.conf.urls.defaults import *
from django.contrib import admin
from registration.views import register
from pytask.profile.forms import CustomRegistrationForm
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'pytask.views.home_page', name='home_page'),
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', register,
{'form_class': CustomRegistrationForm,
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_register'),
(r'^accounts/', include('registration.urls')),
(r'^profile/', include('pytask.profile.urls')),
(r'^task/', include('pytask.taskapp.urls')),
)
# Serve static files in DEVELOPMENT = True mode
if settings.DEVELOPMENT:
urlpatterns += patterns('',
(r'^pytask/media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^pytask/static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
|
<commit_before>from django.conf import settings
from django.conf.urls.defaults import *
from registration.views import register
from pytask.profile.forms import CustomRegistrationForm
from pytask.views import home_page
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^$', home_page),
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', register,
{'form_class': CustomRegistrationForm,
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_register'),
(r'^accounts/', include('registration.urls')),
(r'^profile/', include('pytask.profile.urls')),
(r'^task/', include('pytask.taskapp.urls')),
)
# Serve static files in DEVELOPMENT = True mode
if settings.DEVELOPMENT:
urlpatterns += patterns('',
(r'^pytask/media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^pytask/static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
<commit_msg>Modify the home page URL mapper to be consistent with other mappers.<commit_after>
|
from django.conf import settings
from django.conf.urls.defaults import *
from django.contrib import admin
from registration.views import register
from pytask.profile.forms import CustomRegistrationForm
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'pytask.views.home_page', name='home_page'),
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', register,
{'form_class': CustomRegistrationForm,
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_register'),
(r'^accounts/', include('registration.urls')),
(r'^profile/', include('pytask.profile.urls')),
(r'^task/', include('pytask.taskapp.urls')),
)
# Serve static files in DEVELOPMENT = True mode
if settings.DEVELOPMENT:
urlpatterns += patterns('',
(r'^pytask/media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^pytask/static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
|
from django.conf import settings
from django.conf.urls.defaults import *
from registration.views import register
from pytask.profile.forms import CustomRegistrationForm
from pytask.views import home_page
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^$', home_page),
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', register,
{'form_class': CustomRegistrationForm,
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_register'),
(r'^accounts/', include('registration.urls')),
(r'^profile/', include('pytask.profile.urls')),
(r'^task/', include('pytask.taskapp.urls')),
)
# Serve static files in DEVELOPMENT = True mode
if settings.DEVELOPMENT:
urlpatterns += patterns('',
(r'^pytask/media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^pytask/static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
Modify the home page URL mapper to be consistent with other mappers.from django.conf import settings
from django.conf.urls.defaults import *
from django.contrib import admin
from registration.views import register
from pytask.profile.forms import CustomRegistrationForm
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'pytask.views.home_page', name='home_page'),
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', register,
{'form_class': CustomRegistrationForm,
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_register'),
(r'^accounts/', include('registration.urls')),
(r'^profile/', include('pytask.profile.urls')),
(r'^task/', include('pytask.taskapp.urls')),
)
# Serve static files in DEVELOPMENT = True mode
if settings.DEVELOPMENT:
urlpatterns += patterns('',
(r'^pytask/media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^pytask/static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
|
<commit_before>from django.conf import settings
from django.conf.urls.defaults import *
from registration.views import register
from pytask.profile.forms import CustomRegistrationForm
from pytask.views import home_page
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^$', home_page),
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', register,
{'form_class': CustomRegistrationForm,
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_register'),
(r'^accounts/', include('registration.urls')),
(r'^profile/', include('pytask.profile.urls')),
(r'^task/', include('pytask.taskapp.urls')),
)
# Serve static files in DEVELOPMENT = True mode
if settings.DEVELOPMENT:
urlpatterns += patterns('',
(r'^pytask/media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^pytask/static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
<commit_msg>Modify the home page URL mapper to be consistent with other mappers.<commit_after>from django.conf import settings
from django.conf.urls.defaults import *
from django.contrib import admin
from registration.views import register
from pytask.profile.forms import CustomRegistrationForm
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'pytask.views.home_page', name='home_page'),
(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', register,
{'form_class': CustomRegistrationForm,
'backend': 'registration.backends.default.DefaultBackend'},
name='registration_register'),
(r'^accounts/', include('registration.urls')),
(r'^profile/', include('pytask.profile.urls')),
(r'^task/', include('pytask.taskapp.urls')),
)
# Serve static files in DEVELOPMENT = True mode
if settings.DEVELOPMENT:
urlpatterns += patterns('',
(r'^pytask/media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^pytask/static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
|
9b35326243c3e6d991bba8dfc948600262ebc557
|
test/_helpers.py
|
test/_helpers.py
|
import datetime
import mock
from functools import wraps
from twisted.internet.defer import Deferred
class NewDate(datetime.date):
@classmethod
def today(cls):
return cls(2012, 12, 10)
class NewDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls(2012, 12, 10, 00, 00, 00, 00, None)
class DeferredHelper(object):
def __init__(self, data=None):
self.data = data
self.deferred = Deferred()
self.addCallback = self.deferred.addCallback
self.addCallbacks = self.deferred.addCallbacks
self.addErrback = self.deferred.addErrback
def callback(self, result=None):
if result is None:
result = self.data
self.deferred.callback(result)
def errback(self, failure=None):
if failure is None:
failure = self.data
self.deferred.errback(failure)
def __call__(self, *args):
self.args = args
return self
def mock_is_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
with mock.patch("lala.pluginmanager.is_admin") as mocked:
mocked.return_value = True
return f(*args, **kwargs)
return wrapper
|
import datetime
import mock
from functools import wraps
from twisted.internet.defer import Deferred
class NewDate(datetime.date):
@classmethod
def today(cls):
return cls(2012, 12, 10)
class NewDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls(2012, 12, 10, 00, 00, 00, 00, None)
class DeferredHelper(Deferred):
def __init__(self, data=None):
Deferred.__init__(self)
self.data = data
def callback(self, result=None):
if result is None:
result = self.data
return Deferred.callback(self, result)
def errback(self, failure=None):
if failure is None:
failure = self.data
return Deferred.errback(self, failure)
def __call__(self, *args):
self.args = args
return self
def mock_is_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
with mock.patch("lala.pluginmanager.is_admin") as mocked:
mocked.return_value = True
return f(*args, **kwargs)
return wrapper
|
Make DeferredHelper even more like a Deferred by subclassing
|
Make DeferredHelper even more like a Deferred by subclassing
|
Python
|
mit
|
mineo/lala,mineo/lala
|
import datetime
import mock
from functools import wraps
from twisted.internet.defer import Deferred
class NewDate(datetime.date):
@classmethod
def today(cls):
return cls(2012, 12, 10)
class NewDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls(2012, 12, 10, 00, 00, 00, 00, None)
class DeferredHelper(object):
def __init__(self, data=None):
self.data = data
self.deferred = Deferred()
self.addCallback = self.deferred.addCallback
self.addCallbacks = self.deferred.addCallbacks
self.addErrback = self.deferred.addErrback
def callback(self, result=None):
if result is None:
result = self.data
self.deferred.callback(result)
def errback(self, failure=None):
if failure is None:
failure = self.data
self.deferred.errback(failure)
def __call__(self, *args):
self.args = args
return self
def mock_is_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
with mock.patch("lala.pluginmanager.is_admin") as mocked:
mocked.return_value = True
return f(*args, **kwargs)
return wrapper
Make DeferredHelper even more like a Deferred by subclassing
|
import datetime
import mock
from functools import wraps
from twisted.internet.defer import Deferred
class NewDate(datetime.date):
@classmethod
def today(cls):
return cls(2012, 12, 10)
class NewDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls(2012, 12, 10, 00, 00, 00, 00, None)
class DeferredHelper(Deferred):
def __init__(self, data=None):
Deferred.__init__(self)
self.data = data
def callback(self, result=None):
if result is None:
result = self.data
return Deferred.callback(self, result)
def errback(self, failure=None):
if failure is None:
failure = self.data
return Deferred.errback(self, failure)
def __call__(self, *args):
self.args = args
return self
def mock_is_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
with mock.patch("lala.pluginmanager.is_admin") as mocked:
mocked.return_value = True
return f(*args, **kwargs)
return wrapper
|
<commit_before>import datetime
import mock
from functools import wraps
from twisted.internet.defer import Deferred
class NewDate(datetime.date):
@classmethod
def today(cls):
return cls(2012, 12, 10)
class NewDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls(2012, 12, 10, 00, 00, 00, 00, None)
class DeferredHelper(object):
def __init__(self, data=None):
self.data = data
self.deferred = Deferred()
self.addCallback = self.deferred.addCallback
self.addCallbacks = self.deferred.addCallbacks
self.addErrback = self.deferred.addErrback
def callback(self, result=None):
if result is None:
result = self.data
self.deferred.callback(result)
def errback(self, failure=None):
if failure is None:
failure = self.data
self.deferred.errback(failure)
def __call__(self, *args):
self.args = args
return self
def mock_is_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
with mock.patch("lala.pluginmanager.is_admin") as mocked:
mocked.return_value = True
return f(*args, **kwargs)
return wrapper
<commit_msg>Make DeferredHelper even more like a Deferred by subclassing<commit_after>
|
import datetime
import mock
from functools import wraps
from twisted.internet.defer import Deferred
class NewDate(datetime.date):
@classmethod
def today(cls):
return cls(2012, 12, 10)
class NewDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls(2012, 12, 10, 00, 00, 00, 00, None)
class DeferredHelper(Deferred):
def __init__(self, data=None):
Deferred.__init__(self)
self.data = data
def callback(self, result=None):
if result is None:
result = self.data
return Deferred.callback(self, result)
def errback(self, failure=None):
if failure is None:
failure = self.data
return Deferred.errback(self, failure)
def __call__(self, *args):
self.args = args
return self
def mock_is_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
with mock.patch("lala.pluginmanager.is_admin") as mocked:
mocked.return_value = True
return f(*args, **kwargs)
return wrapper
|
import datetime
import mock
from functools import wraps
from twisted.internet.defer import Deferred
class NewDate(datetime.date):
@classmethod
def today(cls):
return cls(2012, 12, 10)
class NewDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls(2012, 12, 10, 00, 00, 00, 00, None)
class DeferredHelper(object):
def __init__(self, data=None):
self.data = data
self.deferred = Deferred()
self.addCallback = self.deferred.addCallback
self.addCallbacks = self.deferred.addCallbacks
self.addErrback = self.deferred.addErrback
def callback(self, result=None):
if result is None:
result = self.data
self.deferred.callback(result)
def errback(self, failure=None):
if failure is None:
failure = self.data
self.deferred.errback(failure)
def __call__(self, *args):
self.args = args
return self
def mock_is_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
with mock.patch("lala.pluginmanager.is_admin") as mocked:
mocked.return_value = True
return f(*args, **kwargs)
return wrapper
Make DeferredHelper even more like a Deferred by subclassingimport datetime
import mock
from functools import wraps
from twisted.internet.defer import Deferred
class NewDate(datetime.date):
@classmethod
def today(cls):
return cls(2012, 12, 10)
class NewDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls(2012, 12, 10, 00, 00, 00, 00, None)
class DeferredHelper(Deferred):
def __init__(self, data=None):
Deferred.__init__(self)
self.data = data
def callback(self, result=None):
if result is None:
result = self.data
return Deferred.callback(self, result)
def errback(self, failure=None):
if failure is None:
failure = self.data
return Deferred.errback(self, failure)
def __call__(self, *args):
self.args = args
return self
def mock_is_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
with mock.patch("lala.pluginmanager.is_admin") as mocked:
mocked.return_value = True
return f(*args, **kwargs)
return wrapper
|
<commit_before>import datetime
import mock
from functools import wraps
from twisted.internet.defer import Deferred
class NewDate(datetime.date):
@classmethod
def today(cls):
return cls(2012, 12, 10)
class NewDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls(2012, 12, 10, 00, 00, 00, 00, None)
class DeferredHelper(object):
def __init__(self, data=None):
self.data = data
self.deferred = Deferred()
self.addCallback = self.deferred.addCallback
self.addCallbacks = self.deferred.addCallbacks
self.addErrback = self.deferred.addErrback
def callback(self, result=None):
if result is None:
result = self.data
self.deferred.callback(result)
def errback(self, failure=None):
if failure is None:
failure = self.data
self.deferred.errback(failure)
def __call__(self, *args):
self.args = args
return self
def mock_is_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
with mock.patch("lala.pluginmanager.is_admin") as mocked:
mocked.return_value = True
return f(*args, **kwargs)
return wrapper
<commit_msg>Make DeferredHelper even more like a Deferred by subclassing<commit_after>import datetime
import mock
from functools import wraps
from twisted.internet.defer import Deferred
class NewDate(datetime.date):
@classmethod
def today(cls):
return cls(2012, 12, 10)
class NewDateTime(datetime.datetime):
@classmethod
def now(cls):
return cls(2012, 12, 10, 00, 00, 00, 00, None)
class DeferredHelper(Deferred):
def __init__(self, data=None):
Deferred.__init__(self)
self.data = data
def callback(self, result=None):
if result is None:
result = self.data
return Deferred.callback(self, result)
def errback(self, failure=None):
if failure is None:
failure = self.data
return Deferred.errback(self, failure)
def __call__(self, *args):
self.args = args
return self
def mock_is_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
with mock.patch("lala.pluginmanager.is_admin") as mocked:
mocked.return_value = True
return f(*args, **kwargs)
return wrapper
|
b15bf76c9a3d3a55423923038e374695a7b302a8
|
microcosm_pubsub/chain/__init__.py
|
microcosm_pubsub/chain/__init__.py
|
from microcosm_pubsub.chain.chain import Chain # noqa: F401
from microcosm_pubsub.chain.decorators import binds, extracts # noqa: F401
from microcosm_pubsub.chain.statements import extract, when, switch, try_chain # noqa: F401
|
from microcosm_pubsub.chain.chain import Chain # noqa: F401
from microcosm_pubsub.chain.decorators import binds, extracts # noqa: F401
from microcosm_pubsub.chain.statements import extract, when, switch, try_chain, for_each # noqa: F401
|
Add for_each to chain exports
|
Add for_each to chain exports
|
Python
|
apache-2.0
|
globality-corp/microcosm-pubsub,globality-corp/microcosm-pubsub
|
from microcosm_pubsub.chain.chain import Chain # noqa: F401
from microcosm_pubsub.chain.decorators import binds, extracts # noqa: F401
from microcosm_pubsub.chain.statements import extract, when, switch, try_chain # noqa: F401
Add for_each to chain exports
|
from microcosm_pubsub.chain.chain import Chain # noqa: F401
from microcosm_pubsub.chain.decorators import binds, extracts # noqa: F401
from microcosm_pubsub.chain.statements import extract, when, switch, try_chain, for_each # noqa: F401
|
<commit_before>from microcosm_pubsub.chain.chain import Chain # noqa: F401
from microcosm_pubsub.chain.decorators import binds, extracts # noqa: F401
from microcosm_pubsub.chain.statements import extract, when, switch, try_chain # noqa: F401
<commit_msg>Add for_each to chain exports<commit_after>
|
from microcosm_pubsub.chain.chain import Chain # noqa: F401
from microcosm_pubsub.chain.decorators import binds, extracts # noqa: F401
from microcosm_pubsub.chain.statements import extract, when, switch, try_chain, for_each # noqa: F401
|
from microcosm_pubsub.chain.chain import Chain # noqa: F401
from microcosm_pubsub.chain.decorators import binds, extracts # noqa: F401
from microcosm_pubsub.chain.statements import extract, when, switch, try_chain # noqa: F401
Add for_each to chain exportsfrom microcosm_pubsub.chain.chain import Chain # noqa: F401
from microcosm_pubsub.chain.decorators import binds, extracts # noqa: F401
from microcosm_pubsub.chain.statements import extract, when, switch, try_chain, for_each # noqa: F401
|
<commit_before>from microcosm_pubsub.chain.chain import Chain # noqa: F401
from microcosm_pubsub.chain.decorators import binds, extracts # noqa: F401
from microcosm_pubsub.chain.statements import extract, when, switch, try_chain # noqa: F401
<commit_msg>Add for_each to chain exports<commit_after>from microcosm_pubsub.chain.chain import Chain # noqa: F401
from microcosm_pubsub.chain.decorators import binds, extracts # noqa: F401
from microcosm_pubsub.chain.statements import extract, when, switch, try_chain, for_each # noqa: F401
|
daafe2152e13d32e7e03533151feeeac9464dddf
|
mycli/packages/expanded.py
|
mycli/packages/expanded.py
|
from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
total_len = header_len + data_len + 1
sep = u"-[ RECORD {0} ]".format(num)
if len(sep) < header_len:
sep = pad(sep, header_len - 1, u"-") + u"+"
if len(sep) < total_len:
sep = pad(sep, total_len, u"-")
return sep + u"\n"
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
|
from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
|
Fix formatting issue for \G.
|
Fix formatting issue for \G.
Closes #49
|
Python
|
bsd-3-clause
|
ksmaheshkumar/mycli,douglasvegas/mycli,webwlsong/mycli,thanatoskira/mycli,tkuipers/mycli,qbdsoft/mycli,douglasvegas/mycli,chenpingzhao/mycli,D-e-e-m-o/mycli,oguzy/mycli,tkuipers/mycli,j-bennet/mycli,suzukaze/mycli,D-e-e-m-o/mycli,thanatoskira/mycli,suzukaze/mycli,mdsrosa/mycli,brewneaux/mycli,danieljwest/mycli,shaunstanislaus/mycli,chenpingzhao/mycli,oguzy/mycli,mattn/mycli,MnO2/rediscli,mdsrosa/mycli,ZuoGuocai/mycli,evook/mycli,evook/mycli,mattn/mycli,ksmaheshkumar/mycli,fw1121/mycli,steverobbins/mycli,MnO2/rediscli,adamchainz/mycli,jinstrive/mycli,nkhuyu/mycli,shoma/mycli,qbdsoft/mycli,martijnengler/mycli,shoma/mycli,webwlsong/mycli,fw1121/mycli,j-bennet/mycli,brewneaux/mycli,ZuoGuocai/mycli,martijnengler/mycli,danieljwest/mycli,nkhuyu/mycli,jinstrive/mycli,shaunstanislaus/mycli,adamchainz/mycli
|
from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
total_len = header_len + data_len + 1
sep = u"-[ RECORD {0} ]".format(num)
if len(sep) < header_len:
sep = pad(sep, header_len - 1, u"-") + u"+"
if len(sep) < total_len:
sep = pad(sep, total_len, u"-")
return sep + u"\n"
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
Fix formatting issue for \G.
Closes #49
|
from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
|
<commit_before>from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
total_len = header_len + data_len + 1
sep = u"-[ RECORD {0} ]".format(num)
if len(sep) < header_len:
sep = pad(sep, header_len - 1, u"-") + u"+"
if len(sep) < total_len:
sep = pad(sep, total_len, u"-")
return sep + u"\n"
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
<commit_msg>Fix formatting issue for \G.
Closes #49<commit_after>
|
from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
|
from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
total_len = header_len + data_len + 1
sep = u"-[ RECORD {0} ]".format(num)
if len(sep) < header_len:
sep = pad(sep, header_len - 1, u"-") + u"+"
if len(sep) < total_len:
sep = pad(sep, total_len, u"-")
return sep + u"\n"
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
Fix formatting issue for \G.
Closes #49from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
|
<commit_before>from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
total_len = header_len + data_len + 1
sep = u"-[ RECORD {0} ]".format(num)
if len(sep) < header_len:
sep = pad(sep, header_len - 1, u"-") + u"+"
if len(sep) < total_len:
sep = pad(sep, total_len, u"-")
return sep + u"\n"
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
<commit_msg>Fix formatting issue for \G.
Closes #49<commit_after>from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
|
40ae95e87e439645d35376942f8c48ce9e62b2ad
|
test/test_pluginmount.py
|
test/test_pluginmount.py
|
from JsonStats.FetchStats.Plugins import *
from . import TestCase
import JsonStats.FetchStats.Plugins
from JsonStats.FetchStats import Fetcher
class TestPluginMount(TestCase):
def setUp(self):
# Do stuff that has to happen on every test in this instance
self.fetcher = Fetcher
def test_get_plugins(self):
"""
Verify that after loading plugins we can see them attached to
the Mount.
"""
discovered = len(self.fetcher.get_plugins())
expected = len(JsonStats.FetchStats.Plugins.__all__)
self.assertEqual(discovered, expected)
|
from . import TestCase
import JsonStats.FetchStats.Plugins
from JsonStats.FetchStats import Fetcher
class TestPluginMount(TestCase):
def setUp(self):
# Do stuff that has to happen on every test in this instance
self.fetcher = Fetcher
class _example_plugin(Fetcher):
def __init__(self):
self.context = 'testplugin'
self._load_data()
def _load_data(self):
self._loaded(True)
def dump(self):
return {}
def dump_json(self):
return self.json.dumps(self.dump())
self.example_plugin = _example_plugin
def test_get_plugins(self):
"""
Verify that after loading plugins we can see them attached to
the Mount.
"""
example_plugin = self.example_plugin()
discovered = len(self.fetcher.get_plugins())
assert discovered == 1
|
Fix the plugin mount text. And make it way more intelligent.
|
Fix the plugin mount text. And make it way more intelligent.
|
Python
|
mit
|
RHInception/jsonstats,pombredanne/jsonstats,pombredanne/jsonstats,RHInception/jsonstats
|
from JsonStats.FetchStats.Plugins import *
from . import TestCase
import JsonStats.FetchStats.Plugins
from JsonStats.FetchStats import Fetcher
class TestPluginMount(TestCase):
def setUp(self):
# Do stuff that has to happen on every test in this instance
self.fetcher = Fetcher
def test_get_plugins(self):
"""
Verify that after loading plugins we can see them attached to
the Mount.
"""
discovered = len(self.fetcher.get_plugins())
expected = len(JsonStats.FetchStats.Plugins.__all__)
self.assertEqual(discovered, expected)
Fix the plugin mount text. And make it way more intelligent.
|
from . import TestCase
import JsonStats.FetchStats.Plugins
from JsonStats.FetchStats import Fetcher
class TestPluginMount(TestCase):
def setUp(self):
# Do stuff that has to happen on every test in this instance
self.fetcher = Fetcher
class _example_plugin(Fetcher):
def __init__(self):
self.context = 'testplugin'
self._load_data()
def _load_data(self):
self._loaded(True)
def dump(self):
return {}
def dump_json(self):
return self.json.dumps(self.dump())
self.example_plugin = _example_plugin
def test_get_plugins(self):
"""
Verify that after loading plugins we can see them attached to
the Mount.
"""
example_plugin = self.example_plugin()
discovered = len(self.fetcher.get_plugins())
assert discovered == 1
|
<commit_before>from JsonStats.FetchStats.Plugins import *
from . import TestCase
import JsonStats.FetchStats.Plugins
from JsonStats.FetchStats import Fetcher
class TestPluginMount(TestCase):
def setUp(self):
# Do stuff that has to happen on every test in this instance
self.fetcher = Fetcher
def test_get_plugins(self):
"""
Verify that after loading plugins we can see them attached to
the Mount.
"""
discovered = len(self.fetcher.get_plugins())
expected = len(JsonStats.FetchStats.Plugins.__all__)
self.assertEqual(discovered, expected)
<commit_msg>Fix the plugin mount text. And make it way more intelligent.<commit_after>
|
from . import TestCase
import JsonStats.FetchStats.Plugins
from JsonStats.FetchStats import Fetcher
class TestPluginMount(TestCase):
def setUp(self):
# Do stuff that has to happen on every test in this instance
self.fetcher = Fetcher
class _example_plugin(Fetcher):
def __init__(self):
self.context = 'testplugin'
self._load_data()
def _load_data(self):
self._loaded(True)
def dump(self):
return {}
def dump_json(self):
return self.json.dumps(self.dump())
self.example_plugin = _example_plugin
def test_get_plugins(self):
"""
Verify that after loading plugins we can see them attached to
the Mount.
"""
example_plugin = self.example_plugin()
discovered = len(self.fetcher.get_plugins())
assert discovered == 1
|
from JsonStats.FetchStats.Plugins import *
from . import TestCase
import JsonStats.FetchStats.Plugins
from JsonStats.FetchStats import Fetcher
class TestPluginMount(TestCase):
def setUp(self):
# Do stuff that has to happen on every test in this instance
self.fetcher = Fetcher
def test_get_plugins(self):
"""
Verify that after loading plugins we can see them attached to
the Mount.
"""
discovered = len(self.fetcher.get_plugins())
expected = len(JsonStats.FetchStats.Plugins.__all__)
self.assertEqual(discovered, expected)
Fix the plugin mount text. And make it way more intelligent.from . import TestCase
import JsonStats.FetchStats.Plugins
from JsonStats.FetchStats import Fetcher
class TestPluginMount(TestCase):
def setUp(self):
# Do stuff that has to happen on every test in this instance
self.fetcher = Fetcher
class _example_plugin(Fetcher):
def __init__(self):
self.context = 'testplugin'
self._load_data()
def _load_data(self):
self._loaded(True)
def dump(self):
return {}
def dump_json(self):
return self.json.dumps(self.dump())
self.example_plugin = _example_plugin
def test_get_plugins(self):
"""
Verify that after loading plugins we can see them attached to
the Mount.
"""
example_plugin = self.example_plugin()
discovered = len(self.fetcher.get_plugins())
assert discovered == 1
|
<commit_before>from JsonStats.FetchStats.Plugins import *
from . import TestCase
import JsonStats.FetchStats.Plugins
from JsonStats.FetchStats import Fetcher
class TestPluginMount(TestCase):
def setUp(self):
# Do stuff that has to happen on every test in this instance
self.fetcher = Fetcher
def test_get_plugins(self):
"""
Verify that after loading plugins we can see them attached to
the Mount.
"""
discovered = len(self.fetcher.get_plugins())
expected = len(JsonStats.FetchStats.Plugins.__all__)
self.assertEqual(discovered, expected)
<commit_msg>Fix the plugin mount text. And make it way more intelligent.<commit_after>from . import TestCase
import JsonStats.FetchStats.Plugins
from JsonStats.FetchStats import Fetcher
class TestPluginMount(TestCase):
def setUp(self):
# Do stuff that has to happen on every test in this instance
self.fetcher = Fetcher
class _example_plugin(Fetcher):
def __init__(self):
self.context = 'testplugin'
self._load_data()
def _load_data(self):
self._loaded(True)
def dump(self):
return {}
def dump_json(self):
return self.json.dumps(self.dump())
self.example_plugin = _example_plugin
def test_get_plugins(self):
"""
Verify that after loading plugins we can see them attached to
the Mount.
"""
example_plugin = self.example_plugin()
discovered = len(self.fetcher.get_plugins())
assert discovered == 1
|
0a0b87d584bd731c1db65e32a7e438b0f9aea1a9
|
testing/test_direct_wrapper.py
|
testing/test_direct_wrapper.py
|
import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
|
import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
def test_open_file(test_file):
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_open_file(f, test_file, 0, status)
assert status[0] == 0
|
Add test for open file
|
Add test for open file
|
Python
|
mit
|
mindriot101/fitsio-cffi
|
import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
Add test for open file
|
import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
def test_open_file(test_file):
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_open_file(f, test_file, 0, status)
assert status[0] == 0
|
<commit_before>import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
<commit_msg>Add test for open file<commit_after>
|
import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
def test_open_file(test_file):
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_open_file(f, test_file, 0, status)
assert status[0] == 0
|
import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
Add test for open fileimport os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
def test_open_file(test_file):
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_open_file(f, test_file, 0, status)
assert status[0] == 0
|
<commit_before>import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
<commit_msg>Add test for open file<commit_after>import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
def test_open_file(test_file):
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_open_file(f, test_file, 0, status)
assert status[0] == 0
|
efa90202a0586f15575af11ef299b122de413b30
|
duralex/AddGitHubIssueVisitor.py
|
duralex/AddGitHubIssueVisitor.py
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
Add the GitHub issue number as a new line in the commitMessage field.
|
Add the GitHub issue number as a new line in the commitMessage field.
|
Python
|
mit
|
Legilibre/duralex
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
Add the GitHub issue number as a new line in the commitMessage field.
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
<commit_before># -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
<commit_msg>Add the GitHub issue number as a new line in the commitMessage field.<commit_after>
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
Add the GitHub issue number as a new line in the commitMessage field.# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
<commit_before># -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
<commit_msg>Add the GitHub issue number as a new line in the commitMessage field.<commit_after># -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
0d1e5990d55bea9530beaa49aaf5091a6434a48e
|
newswall/providers/base.py
|
newswall/providers/base.py
|
from newswall.models import Story
class ProviderBase(object):
def __init__(self, source, config):
self.source = source
self.config = config
def update(self):
raise NotImplementedError
def create_story(self, object_url, **kwargs):
defaults = {'source': self.source}
defaults.update(kwargs)
return Story.objects.get_or_create(object_url=object_url,
defaults=defaults)
|
from datetime import date, timedelta
from newswall.models import Story
class ProviderBase(object):
def __init__(self, source, config):
self.source = source
self.config = config
def update(self):
raise NotImplementedError
def create_story(self, object_url, **kwargs):
defaults = {'source': self.source}
defaults.update(kwargs)
if defaults.get('title'):
if Story.objects.filter(
title=defaults.get('title'),
timestamp__gte=date.today() - timedelta(days=3),
).exists():
defaults['is_active'] = False
return Story.objects.get_or_create(object_url=object_url,
defaults=defaults)
|
Set stories to inactive if a story with the same title has been published recently
|
Set stories to inactive if a story with the same title has been published recently
|
Python
|
bsd-3-clause
|
HerraLampila/django-newswall,michaelkuty/django-newswall,matthiask/django-newswall,matthiask/django-newswall,HerraLampila/django-newswall,registerguard/django-newswall,registerguard/django-newswall,michaelkuty/django-newswall
|
from newswall.models import Story
class ProviderBase(object):
def __init__(self, source, config):
self.source = source
self.config = config
def update(self):
raise NotImplementedError
def create_story(self, object_url, **kwargs):
defaults = {'source': self.source}
defaults.update(kwargs)
return Story.objects.get_or_create(object_url=object_url,
defaults=defaults)
Set stories to inactive if a story with the same title has been published recently
|
from datetime import date, timedelta
from newswall.models import Story
class ProviderBase(object):
def __init__(self, source, config):
self.source = source
self.config = config
def update(self):
raise NotImplementedError
def create_story(self, object_url, **kwargs):
defaults = {'source': self.source}
defaults.update(kwargs)
if defaults.get('title'):
if Story.objects.filter(
title=defaults.get('title'),
timestamp__gte=date.today() - timedelta(days=3),
).exists():
defaults['is_active'] = False
return Story.objects.get_or_create(object_url=object_url,
defaults=defaults)
|
<commit_before>from newswall.models import Story
class ProviderBase(object):
def __init__(self, source, config):
self.source = source
self.config = config
def update(self):
raise NotImplementedError
def create_story(self, object_url, **kwargs):
defaults = {'source': self.source}
defaults.update(kwargs)
return Story.objects.get_or_create(object_url=object_url,
defaults=defaults)
<commit_msg>Set stories to inactive if a story with the same title has been published recently<commit_after>
|
from datetime import date, timedelta
from newswall.models import Story
class ProviderBase(object):
def __init__(self, source, config):
self.source = source
self.config = config
def update(self):
raise NotImplementedError
def create_story(self, object_url, **kwargs):
defaults = {'source': self.source}
defaults.update(kwargs)
if defaults.get('title'):
if Story.objects.filter(
title=defaults.get('title'),
timestamp__gte=date.today() - timedelta(days=3),
).exists():
defaults['is_active'] = False
return Story.objects.get_or_create(object_url=object_url,
defaults=defaults)
|
from newswall.models import Story
class ProviderBase(object):
def __init__(self, source, config):
self.source = source
self.config = config
def update(self):
raise NotImplementedError
def create_story(self, object_url, **kwargs):
defaults = {'source': self.source}
defaults.update(kwargs)
return Story.objects.get_or_create(object_url=object_url,
defaults=defaults)
Set stories to inactive if a story with the same title has been published recentlyfrom datetime import date, timedelta
from newswall.models import Story
class ProviderBase(object):
def __init__(self, source, config):
self.source = source
self.config = config
def update(self):
raise NotImplementedError
def create_story(self, object_url, **kwargs):
defaults = {'source': self.source}
defaults.update(kwargs)
if defaults.get('title'):
if Story.objects.filter(
title=defaults.get('title'),
timestamp__gte=date.today() - timedelta(days=3),
).exists():
defaults['is_active'] = False
return Story.objects.get_or_create(object_url=object_url,
defaults=defaults)
|
<commit_before>from newswall.models import Story
class ProviderBase(object):
def __init__(self, source, config):
self.source = source
self.config = config
def update(self):
raise NotImplementedError
def create_story(self, object_url, **kwargs):
defaults = {'source': self.source}
defaults.update(kwargs)
return Story.objects.get_or_create(object_url=object_url,
defaults=defaults)
<commit_msg>Set stories to inactive if a story with the same title has been published recently<commit_after>from datetime import date, timedelta
from newswall.models import Story
class ProviderBase(object):
def __init__(self, source, config):
self.source = source
self.config = config
def update(self):
raise NotImplementedError
def create_story(self, object_url, **kwargs):
defaults = {'source': self.source}
defaults.update(kwargs)
if defaults.get('title'):
if Story.objects.filter(
title=defaults.get('title'),
timestamp__gte=date.today() - timedelta(days=3),
).exists():
defaults['is_active'] = False
return Story.objects.get_or_create(object_url=object_url,
defaults=defaults)
|
d26fe68901948899221cd4ad0ee3ee2e42d69dbd
|
tests/newsletters/factories.py
|
tests/newsletters/factories.py
|
import factory
from adhocracy4.follows import models as follow_models
from adhocracy4.test import factories as a4_factories
from meinberlin.apps.newsletters import models
from tests import factories
# FIXME: copied from core
class FollowFactory(factory.django.DjangoModelFactory):
class Meta:
model = follow_models.Follow
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
class NewsletterFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Newsletter
sender = factory.Faker('email')
sender_name = factory.Faker('name')
subject = factory.Faker('text', max_nb_chars=120)
body = factory.Faker('text')
receivers = models.PROJECT
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
organisation = factory.SubFactory(factories.OrganisationFactory)
|
import factory
from adhocracy4.follows import models as follow_models
from adhocracy4.test import factories as a4_factories
from meinberlin.apps.newsletters import models
from tests import factories
# FIXME: copied from core
class FollowFactory(factory.django.DjangoModelFactory):
class Meta:
model = follow_models.Follow
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
class NewsletterFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Newsletter
sender = factory.Faker('email')
sender_name = factory.Faker('name')
subject = factory.Faker('sentence')
body = factory.Faker('text')
receivers = models.PROJECT
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
organisation = factory.SubFactory(factories.OrganisationFactory)
|
Use sentence faker for email subjects
|
Use sentence faker for email subjects
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
import factory
from adhocracy4.follows import models as follow_models
from adhocracy4.test import factories as a4_factories
from meinberlin.apps.newsletters import models
from tests import factories
# FIXME: copied from core
class FollowFactory(factory.django.DjangoModelFactory):
class Meta:
model = follow_models.Follow
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
class NewsletterFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Newsletter
sender = factory.Faker('email')
sender_name = factory.Faker('name')
subject = factory.Faker('text', max_nb_chars=120)
body = factory.Faker('text')
receivers = models.PROJECT
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
organisation = factory.SubFactory(factories.OrganisationFactory)
Use sentence faker for email subjects
|
import factory
from adhocracy4.follows import models as follow_models
from adhocracy4.test import factories as a4_factories
from meinberlin.apps.newsletters import models
from tests import factories
# FIXME: copied from core
class FollowFactory(factory.django.DjangoModelFactory):
class Meta:
model = follow_models.Follow
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
class NewsletterFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Newsletter
sender = factory.Faker('email')
sender_name = factory.Faker('name')
subject = factory.Faker('sentence')
body = factory.Faker('text')
receivers = models.PROJECT
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
organisation = factory.SubFactory(factories.OrganisationFactory)
|
<commit_before>import factory
from adhocracy4.follows import models as follow_models
from adhocracy4.test import factories as a4_factories
from meinberlin.apps.newsletters import models
from tests import factories
# FIXME: copied from core
class FollowFactory(factory.django.DjangoModelFactory):
class Meta:
model = follow_models.Follow
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
class NewsletterFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Newsletter
sender = factory.Faker('email')
sender_name = factory.Faker('name')
subject = factory.Faker('text', max_nb_chars=120)
body = factory.Faker('text')
receivers = models.PROJECT
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
organisation = factory.SubFactory(factories.OrganisationFactory)
<commit_msg>Use sentence faker for email subjects<commit_after>
|
import factory
from adhocracy4.follows import models as follow_models
from adhocracy4.test import factories as a4_factories
from meinberlin.apps.newsletters import models
from tests import factories
# FIXME: copied from core
class FollowFactory(factory.django.DjangoModelFactory):
class Meta:
model = follow_models.Follow
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
class NewsletterFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Newsletter
sender = factory.Faker('email')
sender_name = factory.Faker('name')
subject = factory.Faker('sentence')
body = factory.Faker('text')
receivers = models.PROJECT
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
organisation = factory.SubFactory(factories.OrganisationFactory)
|
import factory
from adhocracy4.follows import models as follow_models
from adhocracy4.test import factories as a4_factories
from meinberlin.apps.newsletters import models
from tests import factories
# FIXME: copied from core
class FollowFactory(factory.django.DjangoModelFactory):
class Meta:
model = follow_models.Follow
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
class NewsletterFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Newsletter
sender = factory.Faker('email')
sender_name = factory.Faker('name')
subject = factory.Faker('text', max_nb_chars=120)
body = factory.Faker('text')
receivers = models.PROJECT
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
organisation = factory.SubFactory(factories.OrganisationFactory)
Use sentence faker for email subjectsimport factory
from adhocracy4.follows import models as follow_models
from adhocracy4.test import factories as a4_factories
from meinberlin.apps.newsletters import models
from tests import factories
# FIXME: copied from core
class FollowFactory(factory.django.DjangoModelFactory):
class Meta:
model = follow_models.Follow
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
class NewsletterFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Newsletter
sender = factory.Faker('email')
sender_name = factory.Faker('name')
subject = factory.Faker('sentence')
body = factory.Faker('text')
receivers = models.PROJECT
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
organisation = factory.SubFactory(factories.OrganisationFactory)
|
<commit_before>import factory
from adhocracy4.follows import models as follow_models
from adhocracy4.test import factories as a4_factories
from meinberlin.apps.newsletters import models
from tests import factories
# FIXME: copied from core
class FollowFactory(factory.django.DjangoModelFactory):
class Meta:
model = follow_models.Follow
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
class NewsletterFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Newsletter
sender = factory.Faker('email')
sender_name = factory.Faker('name')
subject = factory.Faker('text', max_nb_chars=120)
body = factory.Faker('text')
receivers = models.PROJECT
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
organisation = factory.SubFactory(factories.OrganisationFactory)
<commit_msg>Use sentence faker for email subjects<commit_after>import factory
from adhocracy4.follows import models as follow_models
from adhocracy4.test import factories as a4_factories
from meinberlin.apps.newsletters import models
from tests import factories
# FIXME: copied from core
class FollowFactory(factory.django.DjangoModelFactory):
class Meta:
model = follow_models.Follow
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
class NewsletterFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Newsletter
sender = factory.Faker('email')
sender_name = factory.Faker('name')
subject = factory.Faker('sentence')
body = factory.Faker('text')
receivers = models.PROJECT
creator = factory.SubFactory(factories.UserFactory)
project = factory.SubFactory(a4_factories.ProjectFactory)
organisation = factory.SubFactory(factories.OrganisationFactory)
|
8840cedd74c6c1959358366a88a85e7567b84439
|
tests/test_vector2_negation.py
|
tests/test_vector2_negation.py
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
@given(vector=vectors())
def test_negation_involutive(vector: Vector2):
assert vector == - (- vector)
|
Test that negation is involutive
|
tests/negation: Test that negation is involutive
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
tests/negation: Test that negation is involutive
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
@given(vector=vectors())
def test_negation_involutive(vector: Vector2):
assert vector == - (- vector)
|
<commit_before>from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
<commit_msg>tests/negation: Test that negation is involutive<commit_after>
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
@given(vector=vectors())
def test_negation_involutive(vector: Vector2):
assert vector == - (- vector)
|
from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
tests/negation: Test that negation is involutivefrom hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
@given(vector=vectors())
def test_negation_involutive(vector: Vector2):
assert vector == - (- vector)
|
<commit_before>from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
<commit_msg>tests/negation: Test that negation is involutive<commit_after>from hypothesis import given
from ppb_vector import Vector2
from utils import vectors
@given(vector=vectors())
def test_negation_scalar(vector: Vector2):
assert - vector == (-1) * vector
@given(vector=vectors())
def test_negation_involutive(vector: Vector2):
assert vector == - (- vector)
|
5f07fd7b5d916ca1442a5b599bcec49026295209
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print db
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
t1 = ibmcnx.functions.getDSId( db )
print t1
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
Create documentation of DataSource Settings
|
8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print db
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
t1 = ibmcnx.functions.getDSId( db )
print t1
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print db
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )<commit_msg>8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
t1 = ibmcnx.functions.getDSId( db )
print t1
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print db
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
t1 = ibmcnx.functions.getDSId( db )
print t1
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print db
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )<commit_msg>8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
t1 = ibmcnx.functions.getDSId( db )
print t1
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
8987187ddb1b60e667217d6c068ec4e235fd2c5e
|
core/helpers/constants.py
|
core/helpers/constants.py
|
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
''' Constant mapping appropriate quantipy types to pandas dtypes
'''
DTYPE_MAP = {
"float": ["float64", "float32", "float16"],
"int": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"string": ["object"],
"date": ["datetime64"],
"time": ["timedelta64"],
"bool": ["bool"],
"single": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"dichotomous set": [],
"categorical set": [],
"delimited set": ["object"],
"grid": []
}
MAPPED_PATTERN = "^[^@].+[@].+[^@]$"
|
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
''' Constant mapping appropriate quantipy types to pandas dtypes
'''
DTYPE_MAP = {
"float": ["float64", "float32", "float16"],
"int": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"string": ["object"],
"date": ["datetime64"],
"time": ["timedelta64"],
"bool": ["bool"],
"single": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"dichotomous set": [],
"categorical set": [],
"delimited set": ["object"],
"grid": []
}
MAPPED_PATTERN = "^[^@].*[@].*[^@]$"
|
Edit to the regular expression pattern used to identify meta-mapped references.
|
Edit to the regular expression pattern used to identify meta-mapped references.
|
Python
|
mit
|
Quantipy/quantipy,Quantipy/quantipy,Quantipy/quantipy,Quantipy/quantipy
|
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
''' Constant mapping appropriate quantipy types to pandas dtypes
'''
DTYPE_MAP = {
"float": ["float64", "float32", "float16"],
"int": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"string": ["object"],
"date": ["datetime64"],
"time": ["timedelta64"],
"bool": ["bool"],
"single": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"dichotomous set": [],
"categorical set": [],
"delimited set": ["object"],
"grid": []
}
MAPPED_PATTERN = "^[^@].+[@].+[^@]$"Edit to the regular expression pattern used to identify meta-mapped references.
|
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
''' Constant mapping appropriate quantipy types to pandas dtypes
'''
DTYPE_MAP = {
"float": ["float64", "float32", "float16"],
"int": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"string": ["object"],
"date": ["datetime64"],
"time": ["timedelta64"],
"bool": ["bool"],
"single": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"dichotomous set": [],
"categorical set": [],
"delimited set": ["object"],
"grid": []
}
MAPPED_PATTERN = "^[^@].*[@].*[^@]$"
|
<commit_before>#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
''' Constant mapping appropriate quantipy types to pandas dtypes
'''
DTYPE_MAP = {
"float": ["float64", "float32", "float16"],
"int": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"string": ["object"],
"date": ["datetime64"],
"time": ["timedelta64"],
"bool": ["bool"],
"single": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"dichotomous set": [],
"categorical set": [],
"delimited set": ["object"],
"grid": []
}
MAPPED_PATTERN = "^[^@].+[@].+[^@]$"<commit_msg>Edit to the regular expression pattern used to identify meta-mapped references.<commit_after>
|
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
''' Constant mapping appropriate quantipy types to pandas dtypes
'''
DTYPE_MAP = {
"float": ["float64", "float32", "float16"],
"int": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"string": ["object"],
"date": ["datetime64"],
"time": ["timedelta64"],
"bool": ["bool"],
"single": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"dichotomous set": [],
"categorical set": [],
"delimited set": ["object"],
"grid": []
}
MAPPED_PATTERN = "^[^@].*[@].*[^@]$"
|
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
''' Constant mapping appropriate quantipy types to pandas dtypes
'''
DTYPE_MAP = {
"float": ["float64", "float32", "float16"],
"int": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"string": ["object"],
"date": ["datetime64"],
"time": ["timedelta64"],
"bool": ["bool"],
"single": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"dichotomous set": [],
"categorical set": [],
"delimited set": ["object"],
"grid": []
}
MAPPED_PATTERN = "^[^@].+[@].+[^@]$"Edit to the regular expression pattern used to identify meta-mapped references.#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
''' Constant mapping appropriate quantipy types to pandas dtypes
'''
DTYPE_MAP = {
"float": ["float64", "float32", "float16"],
"int": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"string": ["object"],
"date": ["datetime64"],
"time": ["timedelta64"],
"bool": ["bool"],
"single": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"dichotomous set": [],
"categorical set": [],
"delimited set": ["object"],
"grid": []
}
MAPPED_PATTERN = "^[^@].*[@].*[^@]$"
|
<commit_before>#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
''' Constant mapping appropriate quantipy types to pandas dtypes
'''
DTYPE_MAP = {
"float": ["float64", "float32", "float16"],
"int": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"string": ["object"],
"date": ["datetime64"],
"time": ["timedelta64"],
"bool": ["bool"],
"single": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"dichotomous set": [],
"categorical set": [],
"delimited set": ["object"],
"grid": []
}
MAPPED_PATTERN = "^[^@].+[@].+[^@]$"<commit_msg>Edit to the regular expression pattern used to identify meta-mapped references.<commit_after>#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
''' Constant mapping appropriate quantipy types to pandas dtypes
'''
DTYPE_MAP = {
"float": ["float64", "float32", "float16"],
"int": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"string": ["object"],
"date": ["datetime64"],
"time": ["timedelta64"],
"bool": ["bool"],
"single": ["int64", "int32", "int16", "int8", "int0", "float64", "float32", "float16"],
"dichotomous set": [],
"categorical set": [],
"delimited set": ["object"],
"grid": []
}
MAPPED_PATTERN = "^[^@].*[@].*[^@]$"
|
f3eb4ffe0017b850fcd9a66bcfa0bc00f724064e
|
gapipy/resources/booking/document.py
|
gapipy/resources/booking/document.py
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
Add audience field to Document resource
|
Add audience field to Document resource
- 'audience' field is displayed on list of invoices (bookings/<booking_id>/invoices) and now, also on list of documents (bookings/<booking_id>/documents) to match what is being returned in the API
|
Python
|
mit
|
gadventures/gapipy
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
Add audience field to Document resource
- 'audience' field is displayed on list of invoices (bookings/<booking_id>/invoices) and now, also on list of documents (bookings/<booking_id>/documents) to match what is being returned in the API
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
<commit_before>from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
<commit_msg>Add audience field to Document resource
- 'audience' field is displayed on list of invoices (bookings/<booking_id>/invoices) and now, also on list of documents (bookings/<booking_id>/documents) to match what is being returned in the API <commit_after>
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
Add audience field to Document resource
- 'audience' field is displayed on list of invoices (bookings/<booking_id>/invoices) and now, also on list of documents (bookings/<booking_id>/documents) to match what is being returned in the API from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
<commit_before>from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
<commit_msg>Add audience field to Document resource
- 'audience' field is displayed on list of invoices (bookings/<booking_id>/invoices) and now, also on list of documents (bookings/<booking_id>/documents) to match what is being returned in the API <commit_after>from __future__ import unicode_literals
from ..base import Resource
class Document(Resource):
_resource_name = 'documents'
_is_listable = False
_as_is_fields = ['id', 'href', 'mime_type', 'content', 'type', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('booking', 'Booking'),
]
class Invoice(Resource):
_resource_name = 'invoices'
_is_listable = False
_as_is_fields = ['id', 'href', 'audience']
_date_time_fields_utc = ['date_created']
_resource_fields = [
('document', Document),
('booking', 'Booking'),
]
|
da693543eb2c2daa3228dc583bb527647cc1602c
|
vc_zoom/setup.py
|
vc_zoom/setup.py
|
# This file is part of the Indico plugins.
# Copyright (C) 2020 CERN and ENEA
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import setup
# XXX: keeping some entries in here to make bulk updates easier while
# other plugins still have this metadata in setup.py; everything else
# is in setup.cfg now
setup(
name='indico-plugin-vc-zoom',
version='2.3b1',
install_requires=[
'indico>=2.3.2.dev0',
'PyJWT<2'
],
)
|
# This file is part of the Indico plugins.
# Copyright (C) 2020 CERN and ENEA
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import setup
# XXX: keeping some entries in here to make bulk updates easier while
# other plugins still have this metadata in setup.py; everything else
# is in setup.cfg now
setup(
name='indico-plugin-vc-zoom',
version='2.3b1',
install_requires=[
'indico>=2.3.2.dev0',
'PyJWT>=1.7.1,<2'
],
)
|
Use more specific PyJWT version pin
|
VC/Zoom: Use more specific PyJWT version pin
|
Python
|
mit
|
ThiefMaster/indico-plugins,ThiefMaster/indico-plugins,indico/indico-plugins,indico/indico-plugins,ThiefMaster/indico-plugins,ThiefMaster/indico-plugins,indico/indico-plugins,indico/indico-plugins
|
# This file is part of the Indico plugins.
# Copyright (C) 2020 CERN and ENEA
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import setup
# XXX: keeping some entries in here to make bulk updates easier while
# other plugins still have this metadata in setup.py; everything else
# is in setup.cfg now
setup(
name='indico-plugin-vc-zoom',
version='2.3b1',
install_requires=[
'indico>=2.3.2.dev0',
'PyJWT<2'
],
)
VC/Zoom: Use more specific PyJWT version pin
|
# This file is part of the Indico plugins.
# Copyright (C) 2020 CERN and ENEA
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import setup
# XXX: keeping some entries in here to make bulk updates easier while
# other plugins still have this metadata in setup.py; everything else
# is in setup.cfg now
setup(
name='indico-plugin-vc-zoom',
version='2.3b1',
install_requires=[
'indico>=2.3.2.dev0',
'PyJWT>=1.7.1,<2'
],
)
|
<commit_before># This file is part of the Indico plugins.
# Copyright (C) 2020 CERN and ENEA
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import setup
# XXX: keeping some entries in here to make bulk updates easier while
# other plugins still have this metadata in setup.py; everything else
# is in setup.cfg now
setup(
name='indico-plugin-vc-zoom',
version='2.3b1',
install_requires=[
'indico>=2.3.2.dev0',
'PyJWT<2'
],
)
<commit_msg>VC/Zoom: Use more specific PyJWT version pin<commit_after>
|
# This file is part of the Indico plugins.
# Copyright (C) 2020 CERN and ENEA
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import setup
# XXX: keeping some entries in here to make bulk updates easier while
# other plugins still have this metadata in setup.py; everything else
# is in setup.cfg now
setup(
name='indico-plugin-vc-zoom',
version='2.3b1',
install_requires=[
'indico>=2.3.2.dev0',
'PyJWT>=1.7.1,<2'
],
)
|
# This file is part of the Indico plugins.
# Copyright (C) 2020 CERN and ENEA
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import setup
# XXX: keeping some entries in here to make bulk updates easier while
# other plugins still have this metadata in setup.py; everything else
# is in setup.cfg now
setup(
name='indico-plugin-vc-zoom',
version='2.3b1',
install_requires=[
'indico>=2.3.2.dev0',
'PyJWT<2'
],
)
VC/Zoom: Use more specific PyJWT version pin# This file is part of the Indico plugins.
# Copyright (C) 2020 CERN and ENEA
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import setup
# XXX: keeping some entries in here to make bulk updates easier while
# other plugins still have this metadata in setup.py; everything else
# is in setup.cfg now
setup(
name='indico-plugin-vc-zoom',
version='2.3b1',
install_requires=[
'indico>=2.3.2.dev0',
'PyJWT>=1.7.1,<2'
],
)
|
<commit_before># This file is part of the Indico plugins.
# Copyright (C) 2020 CERN and ENEA
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import setup
# XXX: keeping some entries in here to make bulk updates easier while
# other plugins still have this metadata in setup.py; everything else
# is in setup.cfg now
setup(
name='indico-plugin-vc-zoom',
version='2.3b1',
install_requires=[
'indico>=2.3.2.dev0',
'PyJWT<2'
],
)
<commit_msg>VC/Zoom: Use more specific PyJWT version pin<commit_after># This file is part of the Indico plugins.
# Copyright (C) 2020 CERN and ENEA
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from __future__ import unicode_literals
from setuptools import setup
# XXX: keeping some entries in here to make bulk updates easier while
# other plugins still have this metadata in setup.py; everything else
# is in setup.cfg now
setup(
name='indico-plugin-vc-zoom',
version='2.3b1',
install_requires=[
'indico>=2.3.2.dev0',
'PyJWT>=1.7.1,<2'
],
)
|
f674890edde22bf70324d4a5da12c2ca01874ae8
|
tgrsite/utils.py
|
tgrsite/utils.py
|
class PermsError:
val = False
@classmethod
def suppress(cls):
cls.val = True
def __bool__(self):
return self.val
|
class PermsError:
val = True
@classmethod
def suppress(cls):
cls.val = False
def __bool__(self):
return self.val
|
Fix PermsError.suppress() doing the exact opposite...
|
Fix PermsError.suppress() doing the exact opposite...
|
Python
|
isc
|
ashbc/tgrsite,ashbc/tgrsite,ashbc/tgrsite
|
class PermsError:
val = False
@classmethod
def suppress(cls):
cls.val = True
def __bool__(self):
return self.val
Fix PermsError.suppress() doing the exact opposite...
|
class PermsError:
val = True
@classmethod
def suppress(cls):
cls.val = False
def __bool__(self):
return self.val
|
<commit_before>class PermsError:
val = False
@classmethod
def suppress(cls):
cls.val = True
def __bool__(self):
return self.val
<commit_msg>Fix PermsError.suppress() doing the exact opposite...<commit_after>
|
class PermsError:
val = True
@classmethod
def suppress(cls):
cls.val = False
def __bool__(self):
return self.val
|
class PermsError:
val = False
@classmethod
def suppress(cls):
cls.val = True
def __bool__(self):
return self.val
Fix PermsError.suppress() doing the exact opposite...class PermsError:
val = True
@classmethod
def suppress(cls):
cls.val = False
def __bool__(self):
return self.val
|
<commit_before>class PermsError:
val = False
@classmethod
def suppress(cls):
cls.val = True
def __bool__(self):
return self.val
<commit_msg>Fix PermsError.suppress() doing the exact opposite...<commit_after>class PermsError:
val = True
@classmethod
def suppress(cls):
cls.val = False
def __bool__(self):
return self.val
|
84e964eba11e344f6f0ec612b5743e693a8825bd
|
thoonk/config.py
|
thoonk/config.py
|
import json
import threading
import uuid
from thoonk.consts import *
class ConfigCache(object):
def __init__(self, pubsub):
self._feeds = {}
self.pubsub = pubsub
self.lock = threading.Lock()
self.instance = uuid.uuid4().hex
def __getitem__(self, feed):
with self.lock:
if feed in self._feeds:
return self._feeds[feed]
else:
if not self.pubsub.feed_exists(feed):
raise FeedDoesNotExist
config = json.loads(self.pubsub.redis.get(FEEDCONFIG % feed))
self._feeds[feed] = self.pubsub.feedtypes[config.get(u'type', u'feed')](self.pubsub, feed, config)
return self._feeds[feed]
def invalidate(self, feed, instance, delete=False):
if instance != self.instance:
with self.lock:
if feed in self._feeds:
if delete:
del self._feeds[feed]
else:
del self._feeds[feed].config
|
import json
import threading
import uuid
class ConfigCache(object):
"""
The ConfigCache class stores an in-memory version of each
feed's configuration. As there may be multiple systems using
Thoonk with the same Redis server, and each with its own
ConfigCache instance, each ConfigCache has a self.instance
field to uniquely identify itself.
Attributes:
thoonk -- The main Thoonk object.
instance -- A hex string for uniquely identifying this
ConfigCache instance.
Methods:
invalidate -- Force a feed's config to be retrieved from
Redis instead of in-memory.
"""
def __init__(self, thoonk):
"""
Create a new configuration cache.
Arguments:
thoonk -- The main Thoonk object.
"""
self._feeds = {}
self.thoonk = thoonk
self.lock = threading.Lock()
self.instance = uuid.uuid4().hex
def __getitem__(self, feed):
"""
Return a feed object for a given feed name.
Arguments:
feed -- The name of the requested feed.
"""
with self.lock:
if feed in self._feeds:
return self._feeds[feed]
else:
if not self.thoonk.feed_exists(feed):
raise FeedDoesNotExist
config = self.thoonk.redis.get('feed.config:%s' % feed)
config = json.loads(config)
feed_type = config.get(u'type', u'feed')
feed_class = self.thoonk.feedtypes[feed_type]
self._feeds[feed] = feed_class(self.thoonk, feed, config)
return self._feeds[feed]
def invalidate(self, feed, instance, delete=False):
"""
Delete a configuration so that it will be retrieved from Redis
instead of from the cache.
Arguments:
feed -- The name of the feed to invalidate.
instance -- A UUID identifying the cache which made the
invalidation request.
delete -- Indicates if the entire feed object should be
invalidated, or just its configuration.
"""
if instance != self.instance:
with self.lock:
if feed in self._feeds:
if delete:
del self._feeds[feed]
else:
del self._feeds[feed].config
|
Add docs to the ConfigCache.
|
Add docs to the ConfigCache.
|
Python
|
mit
|
andyet/thoonk.py,fritzy/thoonk.py
|
import json
import threading
import uuid
from thoonk.consts import *
class ConfigCache(object):
def __init__(self, pubsub):
self._feeds = {}
self.pubsub = pubsub
self.lock = threading.Lock()
self.instance = uuid.uuid4().hex
def __getitem__(self, feed):
with self.lock:
if feed in self._feeds:
return self._feeds[feed]
else:
if not self.pubsub.feed_exists(feed):
raise FeedDoesNotExist
config = json.loads(self.pubsub.redis.get(FEEDCONFIG % feed))
self._feeds[feed] = self.pubsub.feedtypes[config.get(u'type', u'feed')](self.pubsub, feed, config)
return self._feeds[feed]
def invalidate(self, feed, instance, delete=False):
if instance != self.instance:
with self.lock:
if feed in self._feeds:
if delete:
del self._feeds[feed]
else:
del self._feeds[feed].config
Add docs to the ConfigCache.
|
import json
import threading
import uuid
class ConfigCache(object):
"""
The ConfigCache class stores an in-memory version of each
feed's configuration. As there may be multiple systems using
Thoonk with the same Redis server, and each with its own
ConfigCache instance, each ConfigCache has a self.instance
field to uniquely identify itself.
Attributes:
thoonk -- The main Thoonk object.
instance -- A hex string for uniquely identifying this
ConfigCache instance.
Methods:
invalidate -- Force a feed's config to be retrieved from
Redis instead of in-memory.
"""
def __init__(self, thoonk):
"""
Create a new configuration cache.
Arguments:
thoonk -- The main Thoonk object.
"""
self._feeds = {}
self.thoonk = thoonk
self.lock = threading.Lock()
self.instance = uuid.uuid4().hex
def __getitem__(self, feed):
"""
Return a feed object for a given feed name.
Arguments:
feed -- The name of the requested feed.
"""
with self.lock:
if feed in self._feeds:
return self._feeds[feed]
else:
if not self.thoonk.feed_exists(feed):
raise FeedDoesNotExist
config = self.thoonk.redis.get('feed.config:%s' % feed)
config = json.loads(config)
feed_type = config.get(u'type', u'feed')
feed_class = self.thoonk.feedtypes[feed_type]
self._feeds[feed] = feed_class(self.thoonk, feed, config)
return self._feeds[feed]
def invalidate(self, feed, instance, delete=False):
"""
Delete a configuration so that it will be retrieved from Redis
instead of from the cache.
Arguments:
feed -- The name of the feed to invalidate.
instance -- A UUID identifying the cache which made the
invalidation request.
delete -- Indicates if the entire feed object should be
invalidated, or just its configuration.
"""
if instance != self.instance:
with self.lock:
if feed in self._feeds:
if delete:
del self._feeds[feed]
else:
del self._feeds[feed].config
|
<commit_before>import json
import threading
import uuid
from thoonk.consts import *
class ConfigCache(object):
def __init__(self, pubsub):
self._feeds = {}
self.pubsub = pubsub
self.lock = threading.Lock()
self.instance = uuid.uuid4().hex
def __getitem__(self, feed):
with self.lock:
if feed in self._feeds:
return self._feeds[feed]
else:
if not self.pubsub.feed_exists(feed):
raise FeedDoesNotExist
config = json.loads(self.pubsub.redis.get(FEEDCONFIG % feed))
self._feeds[feed] = self.pubsub.feedtypes[config.get(u'type', u'feed')](self.pubsub, feed, config)
return self._feeds[feed]
def invalidate(self, feed, instance, delete=False):
if instance != self.instance:
with self.lock:
if feed in self._feeds:
if delete:
del self._feeds[feed]
else:
del self._feeds[feed].config
<commit_msg>Add docs to the ConfigCache.<commit_after>
|
import json
import threading
import uuid
class ConfigCache(object):
"""
The ConfigCache class stores an in-memory version of each
feed's configuration. As there may be multiple systems using
Thoonk with the same Redis server, and each with its own
ConfigCache instance, each ConfigCache has a self.instance
field to uniquely identify itself.
Attributes:
thoonk -- The main Thoonk object.
instance -- A hex string for uniquely identifying this
ConfigCache instance.
Methods:
invalidate -- Force a feed's config to be retrieved from
Redis instead of in-memory.
"""
def __init__(self, thoonk):
"""
Create a new configuration cache.
Arguments:
thoonk -- The main Thoonk object.
"""
self._feeds = {}
self.thoonk = thoonk
self.lock = threading.Lock()
self.instance = uuid.uuid4().hex
def __getitem__(self, feed):
"""
Return a feed object for a given feed name.
Arguments:
feed -- The name of the requested feed.
"""
with self.lock:
if feed in self._feeds:
return self._feeds[feed]
else:
if not self.thoonk.feed_exists(feed):
raise FeedDoesNotExist
config = self.thoonk.redis.get('feed.config:%s' % feed)
config = json.loads(config)
feed_type = config.get(u'type', u'feed')
feed_class = self.thoonk.feedtypes[feed_type]
self._feeds[feed] = feed_class(self.thoonk, feed, config)
return self._feeds[feed]
def invalidate(self, feed, instance, delete=False):
"""
Delete a configuration so that it will be retrieved from Redis
instead of from the cache.
Arguments:
feed -- The name of the feed to invalidate.
instance -- A UUID identifying the cache which made the
invalidation request.
delete -- Indicates if the entire feed object should be
invalidated, or just its configuration.
"""
if instance != self.instance:
with self.lock:
if feed in self._feeds:
if delete:
del self._feeds[feed]
else:
del self._feeds[feed].config
|
import json
import threading
import uuid
from thoonk.consts import *
class ConfigCache(object):
def __init__(self, pubsub):
self._feeds = {}
self.pubsub = pubsub
self.lock = threading.Lock()
self.instance = uuid.uuid4().hex
def __getitem__(self, feed):
with self.lock:
if feed in self._feeds:
return self._feeds[feed]
else:
if not self.pubsub.feed_exists(feed):
raise FeedDoesNotExist
config = json.loads(self.pubsub.redis.get(FEEDCONFIG % feed))
self._feeds[feed] = self.pubsub.feedtypes[config.get(u'type', u'feed')](self.pubsub, feed, config)
return self._feeds[feed]
def invalidate(self, feed, instance, delete=False):
if instance != self.instance:
with self.lock:
if feed in self._feeds:
if delete:
del self._feeds[feed]
else:
del self._feeds[feed].config
Add docs to the ConfigCache.import json
import threading
import uuid
class ConfigCache(object):
"""
The ConfigCache class stores an in-memory version of each
feed's configuration. As there may be multiple systems using
Thoonk with the same Redis server, and each with its own
ConfigCache instance, each ConfigCache has a self.instance
field to uniquely identify itself.
Attributes:
thoonk -- The main Thoonk object.
instance -- A hex string for uniquely identifying this
ConfigCache instance.
Methods:
invalidate -- Force a feed's config to be retrieved from
Redis instead of in-memory.
"""
def __init__(self, thoonk):
"""
Create a new configuration cache.
Arguments:
thoonk -- The main Thoonk object.
"""
self._feeds = {}
self.thoonk = thoonk
self.lock = threading.Lock()
self.instance = uuid.uuid4().hex
def __getitem__(self, feed):
"""
Return a feed object for a given feed name.
Arguments:
feed -- The name of the requested feed.
"""
with self.lock:
if feed in self._feeds:
return self._feeds[feed]
else:
if not self.thoonk.feed_exists(feed):
raise FeedDoesNotExist
config = self.thoonk.redis.get('feed.config:%s' % feed)
config = json.loads(config)
feed_type = config.get(u'type', u'feed')
feed_class = self.thoonk.feedtypes[feed_type]
self._feeds[feed] = feed_class(self.thoonk, feed, config)
return self._feeds[feed]
def invalidate(self, feed, instance, delete=False):
"""
Delete a configuration so that it will be retrieved from Redis
instead of from the cache.
Arguments:
feed -- The name of the feed to invalidate.
instance -- A UUID identifying the cache which made the
invalidation request.
delete -- Indicates if the entire feed object should be
invalidated, or just its configuration.
"""
if instance != self.instance:
with self.lock:
if feed in self._feeds:
if delete:
del self._feeds[feed]
else:
del self._feeds[feed].config
|
<commit_before>import json
import threading
import uuid
from thoonk.consts import *
class ConfigCache(object):
def __init__(self, pubsub):
self._feeds = {}
self.pubsub = pubsub
self.lock = threading.Lock()
self.instance = uuid.uuid4().hex
def __getitem__(self, feed):
with self.lock:
if feed in self._feeds:
return self._feeds[feed]
else:
if not self.pubsub.feed_exists(feed):
raise FeedDoesNotExist
config = json.loads(self.pubsub.redis.get(FEEDCONFIG % feed))
self._feeds[feed] = self.pubsub.feedtypes[config.get(u'type', u'feed')](self.pubsub, feed, config)
return self._feeds[feed]
def invalidate(self, feed, instance, delete=False):
if instance != self.instance:
with self.lock:
if feed in self._feeds:
if delete:
del self._feeds[feed]
else:
del self._feeds[feed].config
<commit_msg>Add docs to the ConfigCache.<commit_after>import json
import threading
import uuid
class ConfigCache(object):
"""
The ConfigCache class stores an in-memory version of each
feed's configuration. As there may be multiple systems using
Thoonk with the same Redis server, and each with its own
ConfigCache instance, each ConfigCache has a self.instance
field to uniquely identify itself.
Attributes:
thoonk -- The main Thoonk object.
instance -- A hex string for uniquely identifying this
ConfigCache instance.
Methods:
invalidate -- Force a feed's config to be retrieved from
Redis instead of in-memory.
"""
def __init__(self, thoonk):
"""
Create a new configuration cache.
Arguments:
thoonk -- The main Thoonk object.
"""
self._feeds = {}
self.thoonk = thoonk
self.lock = threading.Lock()
self.instance = uuid.uuid4().hex
def __getitem__(self, feed):
"""
Return a feed object for a given feed name.
Arguments:
feed -- The name of the requested feed.
"""
with self.lock:
if feed in self._feeds:
return self._feeds[feed]
else:
if not self.thoonk.feed_exists(feed):
raise FeedDoesNotExist
config = self.thoonk.redis.get('feed.config:%s' % feed)
config = json.loads(config)
feed_type = config.get(u'type', u'feed')
feed_class = self.thoonk.feedtypes[feed_type]
self._feeds[feed] = feed_class(self.thoonk, feed, config)
return self._feeds[feed]
def invalidate(self, feed, instance, delete=False):
"""
Delete a configuration so that it will be retrieved from Redis
instead of from the cache.
Arguments:
feed -- The name of the feed to invalidate.
instance -- A UUID identifying the cache which made the
invalidation request.
delete -- Indicates if the entire feed object should be
invalidated, or just its configuration.
"""
if instance != self.instance:
with self.lock:
if feed in self._feeds:
if delete:
del self._feeds[feed]
else:
del self._feeds[feed].config
|
2e4b3f3dc8e0f949700c810912e32a2dffa2def3
|
ttag/__init__.py
|
ttag/__init__.py
|
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, ModelInstanceArg, \
StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
VERSION = (1, 0, 'alpha')
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
|
try:
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, \
ModelInstanceArg, StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
except ImportError:
# This allows setup.py to skip import errors which may occur if ttag is
# being installed at the same time as Django.
pass
VERSION = (1, 0, 'alpha', 2)
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
|
Work around an error if ttag is installed at the same time as Django
|
Work around an error if ttag is installed at the same time as Django
|
Python
|
bsd-3-clause
|
caktus/django-ttag,caktus/django-ttag,matuu/django-ttag,matuu/django-ttag,lincolnloop/django-ttag,lincolnloop/django-ttag
|
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, ModelInstanceArg, \
StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
VERSION = (1, 0, 'alpha')
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
Work around an error if ttag is installed at the same time as Django
|
try:
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, \
ModelInstanceArg, StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
except ImportError:
# This allows setup.py to skip import errors which may occur if ttag is
# being installed at the same time as Django.
pass
VERSION = (1, 0, 'alpha', 2)
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
|
<commit_before>from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, ModelInstanceArg, \
StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
VERSION = (1, 0, 'alpha')
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
<commit_msg>Work around an error if ttag is installed at the same time as Django<commit_after>
|
try:
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, \
ModelInstanceArg, StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
except ImportError:
# This allows setup.py to skip import errors which may occur if ttag is
# being installed at the same time as Django.
pass
VERSION = (1, 0, 'alpha', 2)
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
|
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, ModelInstanceArg, \
StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
VERSION = (1, 0, 'alpha')
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
Work around an error if ttag is installed at the same time as Djangotry:
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, \
ModelInstanceArg, StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
except ImportError:
# This allows setup.py to skip import errors which may occur if ttag is
# being installed at the same time as Django.
pass
VERSION = (1, 0, 'alpha', 2)
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
|
<commit_before>from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, ModelInstanceArg, \
StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
VERSION = (1, 0, 'alpha')
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
<commit_msg>Work around an error if ttag is installed at the same time as Django<commit_after>try:
from ttag.args import Arg, BasicArg, BooleanArg, ConstantArg, DateArg, \
DateTimeArg, IntegerArg, IsInstanceArg, KeywordsArg, \
ModelInstanceArg, StringArg, TimeArg
from ttag.core import Tag
from ttag.exceptions import TagArgumentMissing, TagValidationError
except ImportError:
# This allows setup.py to skip import errors which may occur if ttag is
# being installed at the same time as Django.
pass
VERSION = (1, 0, 'alpha', 2)
def get_version(number_only=False):
version = [str(VERSION[0])]
number = True
for bit in VERSION[1:]:
if not isinstance(bit, int):
if number_only:
break
number = False
version.append(number and '.' or '-')
version.append(str(bit))
return ''.join(version)
|
0ee942eaffc2a60b87c21eeec75f01eb1a50b8e0
|
tests/demo_project/manage.py
|
tests/demo_project/manage.py
|
#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
# We add ourselves into the python path, so we can find
# the package later.
demo_root =os.path.dirname(os.path.abspath(__file__))
crud_install = os.path.dirname(os.path.dirname(demo_root))
sys.path.insert(0, crud_install)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
# We add ourselves into the python path, so we can find
# the package later.
demo_root =os.path.dirname(os.path.abspath(__file__))
crud_install = os.path.dirname(os.path.dirname(demo_root))
sys.path.insert(0, crud_install)
sys.path.insert(0, demo_root)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
Make sure the demo project is in the pythonpath
|
Make sure the demo project is in the pythonpath
|
Python
|
bsd-3-clause
|
oscarmlage/django-cruds-adminlte,oscarmlage/django-cruds-adminlte,oscarmlage/django-cruds-adminlte
|
#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
# We add ourselves into the python path, so we can find
# the package later.
demo_root =os.path.dirname(os.path.abspath(__file__))
crud_install = os.path.dirname(os.path.dirname(demo_root))
sys.path.insert(0, crud_install)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
Make sure the demo project is in the pythonpath
|
#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
# We add ourselves into the python path, so we can find
# the package later.
demo_root =os.path.dirname(os.path.abspath(__file__))
crud_install = os.path.dirname(os.path.dirname(demo_root))
sys.path.insert(0, crud_install)
sys.path.insert(0, demo_root)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
# We add ourselves into the python path, so we can find
# the package later.
demo_root =os.path.dirname(os.path.abspath(__file__))
crud_install = os.path.dirname(os.path.dirname(demo_root))
sys.path.insert(0, crud_install)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
<commit_msg>Make sure the demo project is in the pythonpath<commit_after>
|
#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
# We add ourselves into the python path, so we can find
# the package later.
demo_root =os.path.dirname(os.path.abspath(__file__))
crud_install = os.path.dirname(os.path.dirname(demo_root))
sys.path.insert(0, crud_install)
sys.path.insert(0, demo_root)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
# We add ourselves into the python path, so we can find
# the package later.
demo_root =os.path.dirname(os.path.abspath(__file__))
crud_install = os.path.dirname(os.path.dirname(demo_root))
sys.path.insert(0, crud_install)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
Make sure the demo project is in the pythonpath#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
# We add ourselves into the python path, so we can find
# the package later.
demo_root =os.path.dirname(os.path.abspath(__file__))
crud_install = os.path.dirname(os.path.dirname(demo_root))
sys.path.insert(0, crud_install)
sys.path.insert(0, demo_root)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
# We add ourselves into the python path, so we can find
# the package later.
demo_root =os.path.dirname(os.path.abspath(__file__))
crud_install = os.path.dirname(os.path.dirname(demo_root))
sys.path.insert(0, crud_install)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
<commit_msg>Make sure the demo project is in the pythonpath<commit_after>#!/usr/bin/env python
import os
import sys
from pathlib import Path
if __name__ == "__main__":
# We add ourselves into the python path, so we can find
# the package later.
demo_root =os.path.dirname(os.path.abspath(__file__))
crud_install = os.path.dirname(os.path.dirname(demo_root))
sys.path.insert(0, crud_install)
sys.path.insert(0, demo_root)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
7d1dc8851f1571b2f39a886298bc7b8ff270a6b7
|
tests/run/generators_py35.py
|
tests/run/generators_py35.py
|
# mode: run
# tag: generators, pure3.5
from __future__ import generator_stop
# "generator_stop" was only added in Py3.5.
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> try:
... list(x())
... except RuntimeError:
... print("OK!")
... else:
... print("NOT RAISED!")
OK!
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def anno_gen(x: 'int') -> 'float':
"""
>>> gen = anno_gen(2)
>>> next(gen)
2.0
>>> ret, arg = sorted(anno_gen.__annotations__.items())
>>> print(ret[0]); print(ret[1])
return
float
>>> print(arg[0]); print(arg[1])
x
int
"""
yield float(x)
|
# mode: run
# tag: generators, pure3.5
from __future__ import generator_stop
# "generator_stop" was only added in Py3.5.
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> try:
... list(x())
... except RuntimeError:
... print("OK!")
... else:
... print("NOT RAISED!")
OK!
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def anno_gen(x: 'int') -> 'float':
"""
>>> gen = anno_gen(2)
>>> next(gen)
2.0
>>> ret, arg = sorted(anno_gen.__annotations__.items())
>>> print(ret[0]); print(str(ret[1]).strip("'")) # strip makes it pass with/without PEP563
return
float
>>> print(arg[0]); print(str(arg[1]).strip("'"))
x
int
"""
yield float(x)
|
Make annotation tests work with non-evaluated annotations (GH-4050)
|
Make annotation tests work with non-evaluated annotations (GH-4050)
Backported from 3dc2b9dfc23638fbef2558d619709b5235d5df08
Partial fix for https://github.com/cython/cython/issues/3919
|
Python
|
apache-2.0
|
scoder/cython,scoder/cython,cython/cython,da-woods/cython,scoder/cython,scoder/cython,cython/cython,da-woods/cython,da-woods/cython,da-woods/cython,cython/cython,cython/cython
|
# mode: run
# tag: generators, pure3.5
from __future__ import generator_stop
# "generator_stop" was only added in Py3.5.
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> try:
... list(x())
... except RuntimeError:
... print("OK!")
... else:
... print("NOT RAISED!")
OK!
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def anno_gen(x: 'int') -> 'float':
"""
>>> gen = anno_gen(2)
>>> next(gen)
2.0
>>> ret, arg = sorted(anno_gen.__annotations__.items())
>>> print(ret[0]); print(ret[1])
return
float
>>> print(arg[0]); print(arg[1])
x
int
"""
yield float(x)
Make annotation tests work with non-evaluated annotations (GH-4050)
Backported from 3dc2b9dfc23638fbef2558d619709b5235d5df08
Partial fix for https://github.com/cython/cython/issues/3919
|
# mode: run
# tag: generators, pure3.5
from __future__ import generator_stop
# "generator_stop" was only added in Py3.5.
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> try:
... list(x())
... except RuntimeError:
... print("OK!")
... else:
... print("NOT RAISED!")
OK!
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def anno_gen(x: 'int') -> 'float':
"""
>>> gen = anno_gen(2)
>>> next(gen)
2.0
>>> ret, arg = sorted(anno_gen.__annotations__.items())
>>> print(ret[0]); print(str(ret[1]).strip("'")) # strip makes it pass with/without PEP563
return
float
>>> print(arg[0]); print(str(arg[1]).strip("'"))
x
int
"""
yield float(x)
|
<commit_before># mode: run
# tag: generators, pure3.5
from __future__ import generator_stop
# "generator_stop" was only added in Py3.5.
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> try:
... list(x())
... except RuntimeError:
... print("OK!")
... else:
... print("NOT RAISED!")
OK!
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def anno_gen(x: 'int') -> 'float':
"""
>>> gen = anno_gen(2)
>>> next(gen)
2.0
>>> ret, arg = sorted(anno_gen.__annotations__.items())
>>> print(ret[0]); print(ret[1])
return
float
>>> print(arg[0]); print(arg[1])
x
int
"""
yield float(x)
<commit_msg>Make annotation tests work with non-evaluated annotations (GH-4050)
Backported from 3dc2b9dfc23638fbef2558d619709b5235d5df08
Partial fix for https://github.com/cython/cython/issues/3919
<commit_after>
|
# mode: run
# tag: generators, pure3.5
from __future__ import generator_stop
# "generator_stop" was only added in Py3.5.
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> try:
... list(x())
... except RuntimeError:
... print("OK!")
... else:
... print("NOT RAISED!")
OK!
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def anno_gen(x: 'int') -> 'float':
"""
>>> gen = anno_gen(2)
>>> next(gen)
2.0
>>> ret, arg = sorted(anno_gen.__annotations__.items())
>>> print(ret[0]); print(str(ret[1]).strip("'")) # strip makes it pass with/without PEP563
return
float
>>> print(arg[0]); print(str(arg[1]).strip("'"))
x
int
"""
yield float(x)
|
# mode: run
# tag: generators, pure3.5
from __future__ import generator_stop
# "generator_stop" was only added in Py3.5.
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> try:
... list(x())
... except RuntimeError:
... print("OK!")
... else:
... print("NOT RAISED!")
OK!
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def anno_gen(x: 'int') -> 'float':
"""
>>> gen = anno_gen(2)
>>> next(gen)
2.0
>>> ret, arg = sorted(anno_gen.__annotations__.items())
>>> print(ret[0]); print(ret[1])
return
float
>>> print(arg[0]); print(arg[1])
x
int
"""
yield float(x)
Make annotation tests work with non-evaluated annotations (GH-4050)
Backported from 3dc2b9dfc23638fbef2558d619709b5235d5df08
Partial fix for https://github.com/cython/cython/issues/3919
# mode: run
# tag: generators, pure3.5
from __future__ import generator_stop
# "generator_stop" was only added in Py3.5.
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> try:
... list(x())
... except RuntimeError:
... print("OK!")
... else:
... print("NOT RAISED!")
OK!
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def anno_gen(x: 'int') -> 'float':
"""
>>> gen = anno_gen(2)
>>> next(gen)
2.0
>>> ret, arg = sorted(anno_gen.__annotations__.items())
>>> print(ret[0]); print(str(ret[1]).strip("'")) # strip makes it pass with/without PEP563
return
float
>>> print(arg[0]); print(str(arg[1]).strip("'"))
x
int
"""
yield float(x)
|
<commit_before># mode: run
# tag: generators, pure3.5
from __future__ import generator_stop
# "generator_stop" was only added in Py3.5.
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> try:
... list(x())
... except RuntimeError:
... print("OK!")
... else:
... print("NOT RAISED!")
OK!
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def anno_gen(x: 'int') -> 'float':
"""
>>> gen = anno_gen(2)
>>> next(gen)
2.0
>>> ret, arg = sorted(anno_gen.__annotations__.items())
>>> print(ret[0]); print(ret[1])
return
float
>>> print(arg[0]); print(arg[1])
x
int
"""
yield float(x)
<commit_msg>Make annotation tests work with non-evaluated annotations (GH-4050)
Backported from 3dc2b9dfc23638fbef2558d619709b5235d5df08
Partial fix for https://github.com/cython/cython/issues/3919
<commit_after># mode: run
# tag: generators, pure3.5
from __future__ import generator_stop
# "generator_stop" was only added in Py3.5.
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> try:
... list(x())
... except RuntimeError:
... print("OK!")
... else:
... print("NOT RAISED!")
OK!
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def anno_gen(x: 'int') -> 'float':
"""
>>> gen = anno_gen(2)
>>> next(gen)
2.0
>>> ret, arg = sorted(anno_gen.__annotations__.items())
>>> print(ret[0]); print(str(ret[1]).strip("'")) # strip makes it pass with/without PEP563
return
float
>>> print(arg[0]); print(str(arg[1]).strip("'"))
x
int
"""
yield float(x)
|
5a5e820fa50377904e6fdd592fed5e883698c3f0
|
tests/testapp/models/city.py
|
tests/testapp/models/city.py
|
from django.db import models
from binder.models import BinderModel
class City(BinderModel):
country = models.ForeignKey('Country', null=False, blank=False, related_name='cities', on_delete=models.CASCADE)
name = models.TextField(unique=True, max_length=100)
class CityState(BinderModel):
"""
City states are like cities, but they can also decide that they do not belong to a country
"""
country = models.ForeignKey('Country', null=True, blank=True, related_name='city_states', on_delete=models.SET_NULL)
name = models.TextField(unique=True, max_length=100)
class PermanentCity(BinderModel):
"""
Some cities are indestrucable. Even if we delete them, they are not really deleted, and can be rerissen from their ashes
"""
country = models.ForeignKey('Country', null=False, blank=False, related_name='permanent_cities', on_delete=models.CASCADE)
name = models.TextField(unique=True, max_length=100)
deleted = models.BooleanField()
|
from django.db import models
from binder.models import BinderModel
class City(BinderModel):
country = models.ForeignKey('Country', null=False, blank=False, related_name='cities', on_delete=models.CASCADE)
name = models.CharField(unique=True, max_length=100)
class CityState(BinderModel):
"""
City states are like cities, but they can also decide that they do not belong to a country
"""
country = models.ForeignKey('Country', null=True, blank=True, related_name='city_states', on_delete=models.SET_NULL)
name = models.CharField(unique=True, max_length=100)
class PermanentCity(BinderModel):
"""
Some cities are indestrucable. Even if we delete them, they are not really deleted, and can be rerissen from their ashes
"""
country = models.ForeignKey('Country', null=False, blank=False, related_name='permanent_cities', on_delete=models.CASCADE)
name = models.CharField(unique=True, max_length=100)
deleted = models.BooleanField()
|
Change text field to charfields, since those are indexable in mysql
|
Change text field to charfields, since those are indexable in mysql
|
Python
|
mit
|
CodeYellowBV/django-binder
|
from django.db import models
from binder.models import BinderModel
class City(BinderModel):
country = models.ForeignKey('Country', null=False, blank=False, related_name='cities', on_delete=models.CASCADE)
name = models.TextField(unique=True, max_length=100)
class CityState(BinderModel):
"""
City states are like cities, but they can also decide that they do not belong to a country
"""
country = models.ForeignKey('Country', null=True, blank=True, related_name='city_states', on_delete=models.SET_NULL)
name = models.TextField(unique=True, max_length=100)
class PermanentCity(BinderModel):
"""
Some cities are indestrucable. Even if we delete them, they are not really deleted, and can be rerissen from their ashes
"""
country = models.ForeignKey('Country', null=False, blank=False, related_name='permanent_cities', on_delete=models.CASCADE)
name = models.TextField(unique=True, max_length=100)
deleted = models.BooleanField()Change text field to charfields, since those are indexable in mysql
|
from django.db import models
from binder.models import BinderModel
class City(BinderModel):
country = models.ForeignKey('Country', null=False, blank=False, related_name='cities', on_delete=models.CASCADE)
name = models.CharField(unique=True, max_length=100)
class CityState(BinderModel):
"""
City states are like cities, but they can also decide that they do not belong to a country
"""
country = models.ForeignKey('Country', null=True, blank=True, related_name='city_states', on_delete=models.SET_NULL)
name = models.CharField(unique=True, max_length=100)
class PermanentCity(BinderModel):
"""
Some cities are indestrucable. Even if we delete them, they are not really deleted, and can be rerissen from their ashes
"""
country = models.ForeignKey('Country', null=False, blank=False, related_name='permanent_cities', on_delete=models.CASCADE)
name = models.CharField(unique=True, max_length=100)
deleted = models.BooleanField()
|
<commit_before>from django.db import models
from binder.models import BinderModel
class City(BinderModel):
country = models.ForeignKey('Country', null=False, blank=False, related_name='cities', on_delete=models.CASCADE)
name = models.TextField(unique=True, max_length=100)
class CityState(BinderModel):
"""
City states are like cities, but they can also decide that they do not belong to a country
"""
country = models.ForeignKey('Country', null=True, blank=True, related_name='city_states', on_delete=models.SET_NULL)
name = models.TextField(unique=True, max_length=100)
class PermanentCity(BinderModel):
"""
Some cities are indestrucable. Even if we delete them, they are not really deleted, and can be rerissen from their ashes
"""
country = models.ForeignKey('Country', null=False, blank=False, related_name='permanent_cities', on_delete=models.CASCADE)
name = models.TextField(unique=True, max_length=100)
deleted = models.BooleanField()<commit_msg>Change text field to charfields, since those are indexable in mysql<commit_after>
|
from django.db import models
from binder.models import BinderModel
class City(BinderModel):
country = models.ForeignKey('Country', null=False, blank=False, related_name='cities', on_delete=models.CASCADE)
name = models.CharField(unique=True, max_length=100)
class CityState(BinderModel):
"""
City states are like cities, but they can also decide that they do not belong to a country
"""
country = models.ForeignKey('Country', null=True, blank=True, related_name='city_states', on_delete=models.SET_NULL)
name = models.CharField(unique=True, max_length=100)
class PermanentCity(BinderModel):
"""
Some cities are indestrucable. Even if we delete them, they are not really deleted, and can be rerissen from their ashes
"""
country = models.ForeignKey('Country', null=False, blank=False, related_name='permanent_cities', on_delete=models.CASCADE)
name = models.CharField(unique=True, max_length=100)
deleted = models.BooleanField()
|
from django.db import models
from binder.models import BinderModel
class City(BinderModel):
country = models.ForeignKey('Country', null=False, blank=False, related_name='cities', on_delete=models.CASCADE)
name = models.TextField(unique=True, max_length=100)
class CityState(BinderModel):
"""
City states are like cities, but they can also decide that they do not belong to a country
"""
country = models.ForeignKey('Country', null=True, blank=True, related_name='city_states', on_delete=models.SET_NULL)
name = models.TextField(unique=True, max_length=100)
class PermanentCity(BinderModel):
"""
Some cities are indestrucable. Even if we delete them, they are not really deleted, and can be rerissen from their ashes
"""
country = models.ForeignKey('Country', null=False, blank=False, related_name='permanent_cities', on_delete=models.CASCADE)
name = models.TextField(unique=True, max_length=100)
deleted = models.BooleanField()Change text field to charfields, since those are indexable in mysqlfrom django.db import models
from binder.models import BinderModel
class City(BinderModel):
country = models.ForeignKey('Country', null=False, blank=False, related_name='cities', on_delete=models.CASCADE)
name = models.CharField(unique=True, max_length=100)
class CityState(BinderModel):
"""
City states are like cities, but they can also decide that they do not belong to a country
"""
country = models.ForeignKey('Country', null=True, blank=True, related_name='city_states', on_delete=models.SET_NULL)
name = models.CharField(unique=True, max_length=100)
class PermanentCity(BinderModel):
"""
Some cities are indestrucable. Even if we delete them, they are not really deleted, and can be rerissen from their ashes
"""
country = models.ForeignKey('Country', null=False, blank=False, related_name='permanent_cities', on_delete=models.CASCADE)
name = models.CharField(unique=True, max_length=100)
deleted = models.BooleanField()
|
<commit_before>from django.db import models
from binder.models import BinderModel
class City(BinderModel):
country = models.ForeignKey('Country', null=False, blank=False, related_name='cities', on_delete=models.CASCADE)
name = models.TextField(unique=True, max_length=100)
class CityState(BinderModel):
"""
City states are like cities, but they can also decide that they do not belong to a country
"""
country = models.ForeignKey('Country', null=True, blank=True, related_name='city_states', on_delete=models.SET_NULL)
name = models.TextField(unique=True, max_length=100)
class PermanentCity(BinderModel):
"""
Some cities are indestrucable. Even if we delete them, they are not really deleted, and can be rerissen from their ashes
"""
country = models.ForeignKey('Country', null=False, blank=False, related_name='permanent_cities', on_delete=models.CASCADE)
name = models.TextField(unique=True, max_length=100)
deleted = models.BooleanField()<commit_msg>Change text field to charfields, since those are indexable in mysql<commit_after>from django.db import models
from binder.models import BinderModel
class City(BinderModel):
country = models.ForeignKey('Country', null=False, blank=False, related_name='cities', on_delete=models.CASCADE)
name = models.CharField(unique=True, max_length=100)
class CityState(BinderModel):
"""
City states are like cities, but they can also decide that they do not belong to a country
"""
country = models.ForeignKey('Country', null=True, blank=True, related_name='city_states', on_delete=models.SET_NULL)
name = models.CharField(unique=True, max_length=100)
class PermanentCity(BinderModel):
"""
Some cities are indestrucable. Even if we delete them, they are not really deleted, and can be rerissen from their ashes
"""
country = models.ForeignKey('Country', null=False, blank=False, related_name='permanent_cities', on_delete=models.CASCADE)
name = models.CharField(unique=True, max_length=100)
deleted = models.BooleanField()
|
48132de52d573f7f650ab693c1ad0b6007ebfaef
|
cybox/test/common/vocab_test.py
|
cybox/test/common/vocab_test.py
|
import unittest
from cybox.common.vocabs import VocabString
import cybox.test
from cybox.utils import normalize_to_xml
class TestVocabString(unittest.TestCase):
def test_plain(self):
a = VocabString("test_value")
self.assertTrue(a.is_plain())
def test_round_trip(self):
attr_dict = {
'value': "test_value",
'vocab_name': "test_a",
'vocab_reference': "test_b",
'condition': "test_d",
'apply_condition': "test_0",
'bit_mask': "test_1",
'pattern_type': "test_e",
'regex_syntax': "test_f",
'has_changed': "test_j",
'trend': "test_k",
}
attr_obj = VocabString.object_from_dict(attr_dict)
attr_dict2 = VocabString.dict_from_object(attr_obj)
cybox.test.assert_equal_ignore(attr_dict, attr_dict2, ['xsi:type'])
if __name__ == "__main__":
unittest.main()
|
import unittest
from cybox.common.vocabs import VocabString
import cybox.test
from cybox.utils import normalize_to_xml
class TestVocabString(unittest.TestCase):
def test_plain(self):
a = VocabString("test_value")
self.assertTrue(a.is_plain())
def test_round_trip(self):
vocab_dict = {
'value': "test_value",
'vocab_name': "test_a",
'vocab_reference': "test_b",
'condition': "test_d",
# Leave out apply_condition since value is not a list.
'bit_mask': "test_1",
'pattern_type': "test_e",
'regex_syntax': "test_f",
'has_changed': "test_j",
'trend': "test_k",
}
vocab_dict2 = cybox.test.round_trip_dict(VocabString, vocab_dict)
cybox.test.assert_equal_ignore(vocab_dict, vocab_dict2, ['xsi:type'])
def test_round_trip_list(self):
vocab_dict = {
'value': ['Value1', 'Value2', 'Value3'],
'condition': "Equals",
'apply_condition': "ALL",
}
vocab_dict2 = cybox.test.round_trip_dict(VocabString, vocab_dict)
cybox.test.assert_equal_ignore(vocab_dict, vocab_dict2, ['xsi:type'])
if __name__ == "__main__":
unittest.main()
|
Clean up controlled vocab tests
|
Clean up controlled vocab tests
|
Python
|
bsd-3-clause
|
CybOXProject/python-cybox
|
import unittest
from cybox.common.vocabs import VocabString
import cybox.test
from cybox.utils import normalize_to_xml
class TestVocabString(unittest.TestCase):
def test_plain(self):
a = VocabString("test_value")
self.assertTrue(a.is_plain())
def test_round_trip(self):
attr_dict = {
'value': "test_value",
'vocab_name': "test_a",
'vocab_reference': "test_b",
'condition': "test_d",
'apply_condition': "test_0",
'bit_mask': "test_1",
'pattern_type': "test_e",
'regex_syntax': "test_f",
'has_changed': "test_j",
'trend': "test_k",
}
attr_obj = VocabString.object_from_dict(attr_dict)
attr_dict2 = VocabString.dict_from_object(attr_obj)
cybox.test.assert_equal_ignore(attr_dict, attr_dict2, ['xsi:type'])
if __name__ == "__main__":
unittest.main()
Clean up controlled vocab tests
|
import unittest
from cybox.common.vocabs import VocabString
import cybox.test
from cybox.utils import normalize_to_xml
class TestVocabString(unittest.TestCase):
def test_plain(self):
a = VocabString("test_value")
self.assertTrue(a.is_plain())
def test_round_trip(self):
vocab_dict = {
'value': "test_value",
'vocab_name': "test_a",
'vocab_reference': "test_b",
'condition': "test_d",
# Leave out apply_condition since value is not a list.
'bit_mask': "test_1",
'pattern_type': "test_e",
'regex_syntax': "test_f",
'has_changed': "test_j",
'trend': "test_k",
}
vocab_dict2 = cybox.test.round_trip_dict(VocabString, vocab_dict)
cybox.test.assert_equal_ignore(vocab_dict, vocab_dict2, ['xsi:type'])
def test_round_trip_list(self):
vocab_dict = {
'value': ['Value1', 'Value2', 'Value3'],
'condition': "Equals",
'apply_condition': "ALL",
}
vocab_dict2 = cybox.test.round_trip_dict(VocabString, vocab_dict)
cybox.test.assert_equal_ignore(vocab_dict, vocab_dict2, ['xsi:type'])
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from cybox.common.vocabs import VocabString
import cybox.test
from cybox.utils import normalize_to_xml
class TestVocabString(unittest.TestCase):
def test_plain(self):
a = VocabString("test_value")
self.assertTrue(a.is_plain())
def test_round_trip(self):
attr_dict = {
'value': "test_value",
'vocab_name': "test_a",
'vocab_reference': "test_b",
'condition': "test_d",
'apply_condition': "test_0",
'bit_mask': "test_1",
'pattern_type': "test_e",
'regex_syntax': "test_f",
'has_changed': "test_j",
'trend': "test_k",
}
attr_obj = VocabString.object_from_dict(attr_dict)
attr_dict2 = VocabString.dict_from_object(attr_obj)
cybox.test.assert_equal_ignore(attr_dict, attr_dict2, ['xsi:type'])
if __name__ == "__main__":
unittest.main()
<commit_msg>Clean up controlled vocab tests<commit_after>
|
import unittest
from cybox.common.vocabs import VocabString
import cybox.test
from cybox.utils import normalize_to_xml
class TestVocabString(unittest.TestCase):
def test_plain(self):
a = VocabString("test_value")
self.assertTrue(a.is_plain())
def test_round_trip(self):
vocab_dict = {
'value': "test_value",
'vocab_name': "test_a",
'vocab_reference': "test_b",
'condition': "test_d",
# Leave out apply_condition since value is not a list.
'bit_mask': "test_1",
'pattern_type': "test_e",
'regex_syntax': "test_f",
'has_changed': "test_j",
'trend': "test_k",
}
vocab_dict2 = cybox.test.round_trip_dict(VocabString, vocab_dict)
cybox.test.assert_equal_ignore(vocab_dict, vocab_dict2, ['xsi:type'])
def test_round_trip_list(self):
vocab_dict = {
'value': ['Value1', 'Value2', 'Value3'],
'condition': "Equals",
'apply_condition': "ALL",
}
vocab_dict2 = cybox.test.round_trip_dict(VocabString, vocab_dict)
cybox.test.assert_equal_ignore(vocab_dict, vocab_dict2, ['xsi:type'])
if __name__ == "__main__":
unittest.main()
|
import unittest
from cybox.common.vocabs import VocabString
import cybox.test
from cybox.utils import normalize_to_xml
class TestVocabString(unittest.TestCase):
def test_plain(self):
a = VocabString("test_value")
self.assertTrue(a.is_plain())
def test_round_trip(self):
attr_dict = {
'value': "test_value",
'vocab_name': "test_a",
'vocab_reference': "test_b",
'condition': "test_d",
'apply_condition': "test_0",
'bit_mask': "test_1",
'pattern_type': "test_e",
'regex_syntax': "test_f",
'has_changed': "test_j",
'trend': "test_k",
}
attr_obj = VocabString.object_from_dict(attr_dict)
attr_dict2 = VocabString.dict_from_object(attr_obj)
cybox.test.assert_equal_ignore(attr_dict, attr_dict2, ['xsi:type'])
if __name__ == "__main__":
unittest.main()
Clean up controlled vocab testsimport unittest
from cybox.common.vocabs import VocabString
import cybox.test
from cybox.utils import normalize_to_xml
class TestVocabString(unittest.TestCase):
def test_plain(self):
a = VocabString("test_value")
self.assertTrue(a.is_plain())
def test_round_trip(self):
vocab_dict = {
'value': "test_value",
'vocab_name': "test_a",
'vocab_reference': "test_b",
'condition': "test_d",
# Leave out apply_condition since value is not a list.
'bit_mask': "test_1",
'pattern_type': "test_e",
'regex_syntax': "test_f",
'has_changed': "test_j",
'trend': "test_k",
}
vocab_dict2 = cybox.test.round_trip_dict(VocabString, vocab_dict)
cybox.test.assert_equal_ignore(vocab_dict, vocab_dict2, ['xsi:type'])
def test_round_trip_list(self):
vocab_dict = {
'value': ['Value1', 'Value2', 'Value3'],
'condition': "Equals",
'apply_condition': "ALL",
}
vocab_dict2 = cybox.test.round_trip_dict(VocabString, vocab_dict)
cybox.test.assert_equal_ignore(vocab_dict, vocab_dict2, ['xsi:type'])
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from cybox.common.vocabs import VocabString
import cybox.test
from cybox.utils import normalize_to_xml
class TestVocabString(unittest.TestCase):
def test_plain(self):
a = VocabString("test_value")
self.assertTrue(a.is_plain())
def test_round_trip(self):
attr_dict = {
'value': "test_value",
'vocab_name': "test_a",
'vocab_reference': "test_b",
'condition': "test_d",
'apply_condition': "test_0",
'bit_mask': "test_1",
'pattern_type': "test_e",
'regex_syntax': "test_f",
'has_changed': "test_j",
'trend': "test_k",
}
attr_obj = VocabString.object_from_dict(attr_dict)
attr_dict2 = VocabString.dict_from_object(attr_obj)
cybox.test.assert_equal_ignore(attr_dict, attr_dict2, ['xsi:type'])
if __name__ == "__main__":
unittest.main()
<commit_msg>Clean up controlled vocab tests<commit_after>import unittest
from cybox.common.vocabs import VocabString
import cybox.test
from cybox.utils import normalize_to_xml
class TestVocabString(unittest.TestCase):
def test_plain(self):
a = VocabString("test_value")
self.assertTrue(a.is_plain())
def test_round_trip(self):
vocab_dict = {
'value': "test_value",
'vocab_name': "test_a",
'vocab_reference': "test_b",
'condition': "test_d",
# Leave out apply_condition since value is not a list.
'bit_mask': "test_1",
'pattern_type': "test_e",
'regex_syntax': "test_f",
'has_changed': "test_j",
'trend': "test_k",
}
vocab_dict2 = cybox.test.round_trip_dict(VocabString, vocab_dict)
cybox.test.assert_equal_ignore(vocab_dict, vocab_dict2, ['xsi:type'])
def test_round_trip_list(self):
vocab_dict = {
'value': ['Value1', 'Value2', 'Value3'],
'condition': "Equals",
'apply_condition': "ALL",
}
vocab_dict2 = cybox.test.round_trip_dict(VocabString, vocab_dict)
cybox.test.assert_equal_ignore(vocab_dict, vocab_dict2, ['xsi:type'])
if __name__ == "__main__":
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.