commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
f498eba42dd3d35e9ff0b5240f44fe8df25332ed | law/contrib/cms/tasks.py | law/contrib/cms/tasks.py | # -*- coding: utf-8 -*-
"""
CMS-related tasks.
https://home.cern/about/experiments/cms
"""
__all__ = ["BundleCMSSW"]
import os
import luigi
from law import Task, LocalFileTarget, NO_STR
from law.decorator import log
from law.util import rel_path, interruptable_popen
class BundleCMSSW(Task):
task_namespace = "law.cms"
path = luigi.Parameter(description="the path to the CMSSW checkout to bundle")
exclude = luigi.Parameter(default=NO_STR, description="regular expression for excluding files "
"or directories, relative to the CMSSW checkout path")
def __init__(self, *args, **kwargs):
super(BundleCMSSW, self).__init__(*args, **kwargs)
self.path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.path)))
def output(self):
return LocalFileTarget("{}.tgz".format(os.path.basename(self.path)))
@log
def run(self):
with self.output().localize("w") as tmp:
self.bundle(tmp.path)
def bundle(self, dst_path):
cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.path, dst_path]
if self.exclude != NO_STR:
cmd += [self.exclude]
code = interruptable_popen(cmd)[0]
if code != 0:
raise Exception("cmssw bundling failed")
| # -*- coding: utf-8 -*-
"""
CMS-related tasks.
https://home.cern/about/experiments/cms
"""
__all__ = ["BundleCMSSW"]
import os
import luigi
from law import Task, LocalFileTarget, NO_STR
from law.decorator import log
from law.util import rel_path, interruptable_popen
class BundleCMSSW(Task):
task_namespace = "law.cms"
cmssw_path = luigi.Parameter(description="the path to the CMSSW checkout to bundle")
exclude = luigi.Parameter(default=NO_STR, description="regular expression for excluding files "
"or directories, relative to the CMSSW checkout path")
def __init__(self, *args, **kwargs):
super(BundleCMSSW, self).__init__(*args, **kwargs)
self.cmssw_path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.cmssw_path)))
def output(self):
return LocalFileTarget("{}.tgz".format(os.path.basename(self.cmssw_path)))
@log
def run(self):
with self.output().localize("w") as tmp:
self.bundle(tmp.path)
def bundle(self, dst_path):
cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.cmssw_path, dst_path]
if self.exclude != NO_STR:
cmd += [self.exclude]
code = interruptable_popen(cmd)[0]
if code != 0:
raise Exception("cmssw bundling failed")
| Rename path parameter in contrib.cms.BundleCMSSW. | Rename path parameter in contrib.cms.BundleCMSSW.
| Python | bsd-3-clause | riga/law,riga/law | ---
+++
@@ -23,17 +23,17 @@
task_namespace = "law.cms"
- path = luigi.Parameter(description="the path to the CMSSW checkout to bundle")
+ cmssw_path = luigi.Parameter(description="the path to the CMSSW checkout to bundle")
exclude = luigi.Parameter(default=NO_STR, description="regular expression for excluding files "
"or directories, relative to the CMSSW checkout path")
def __init__(self, *args, **kwargs):
super(BundleCMSSW, self).__init__(*args, **kwargs)
- self.path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.path)))
+ self.cmssw_path = os.path.expandvars(os.path.expanduser(os.path.abspath(self.cmssw_path)))
def output(self):
- return LocalFileTarget("{}.tgz".format(os.path.basename(self.path)))
+ return LocalFileTarget("{}.tgz".format(os.path.basename(self.cmssw_path)))
@log
def run(self):
@@ -41,7 +41,7 @@
self.bundle(tmp.path)
def bundle(self, dst_path):
- cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.path, dst_path]
+ cmd = [rel_path(__file__, "bundle_cmssw.sh"), self.cmssw_path, dst_path]
if self.exclude != NO_STR:
cmd += [self.exclude]
|
65d6a7ef78b013b261a9a1ad0cfe4756d5cfd675 | redditanalyser/__init__.py | redditanalyser/__init__.py | # -*- coding: utf-8 -*-
import logging
import praw
from settings import Config
from .database import create_db_session
logging.basicConfig(level="INFO")
logger = logging.getLogger(__name__)
# Project configuration settings
cfg = Config()
if not cfg.USERNAME:
logger.error("Username in settings must be set. Exiting...")
sys.exit()
# setup DB session
session = create_db_session(cfg.SQLALCHEMY_DATABASE_URI)
# setup PRAW handler
handler = None
if cfg.MULTIPROCESS:
handler = praw.handlers.MultiprocessHandler()
# setup and open connection to Reddit
user_agent = "Reddit analytics scraper by /u/{}".format(cfg.USERNAME)
reddit = praw.Reddit(user_agent=user_agent, handler=handler)
reddit.config.decode_html_entities = True
# Attributes of interest for comment objects
# note: including `author` slows comment requests considerably
COMMENT_ATTRS = [
'id',
'created_utc',
# 'author',
'body',
'score',
'ups',
'downs',
'subreddit',
'subreddit_id',
'controversiality',
'is_root',
'parent_id',
'gilded',
'permalink',
]
| # -*- coding: utf-8 -*-
import logging
import praw
from settings import Config
from .database import create_db_session
logging.basicConfig(level="WARNING")
logger = logging.getLogger(__name__)
# Project configuration settings
cfg = Config()
if not cfg.USERNAME:
logger.error("Username in settings must be set. Exiting...")
sys.exit()
# setup DB session
session = create_db_session(cfg.SQLALCHEMY_DATABASE_URI)
# setup PRAW handler
handler = None
if cfg.MULTIPROCESS:
handler = praw.handlers.MultiprocessHandler()
# setup and open connection to Reddit
user_agent = "Reddit analytics scraper by /u/{}".format(cfg.USERNAME)
reddit = praw.Reddit(user_agent=user_agent, handler=handler)
reddit.config.decode_html_entities = True
# Attributes of interest for comment objects
# note: including `author` slows comment requests considerably
COMMENT_ATTRS = [
'id',
'created_utc',
# 'author',
'body',
'score',
'ups',
'downs',
'subreddit',
'subreddit_id',
'controversiality',
'is_root',
'parent_id',
'gilded',
'permalink',
]
| Revert log level setup to WARN | Revert log level setup to WARN
| Python | mit | PsyBorgs/redditanalyser,PsyBorgs/redditanalyser | ---
+++
@@ -7,7 +7,7 @@
from .database import create_db_session
-logging.basicConfig(level="INFO")
+logging.basicConfig(level="WARNING")
logger = logging.getLogger(__name__)
# Project configuration settings |
ff02f0e8a7b62d5afdc88730129a5e0811fb5a82 | monitor/monitor_test.py | monitor/monitor_test.py | #!/usr/bin/python
import unittest
import monitor
class TestMonitor(unittest.TestCase):
def test_attr(self):
mon = monitor.Monitor()
assert mon.pvprefix == "MON-CONTROL:"
assert mon.monitorname == "PI1"
def test_testimage(self):
mon = monitor.Monitor()
image = mon.testimage()
assert len(image) != 0
# print image
def test_readPV(self):
mon = monitor.Monitor()
pv_result = mon.readPV()
assert pv_result == "CAM1"
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/python
import unittest
from mock import patch, MagicMock, PropertyMock
from monitor import Monitor
class MonitorUpdateTest(unittest.TestCase):
def setUp(self):
with patch('monitor.pv.PV'):
mock_plotter = MagicMock()
self.monitor = Monitor("MYNAME", mock_plotter)
def test_does_nothing_if_camera_is_None(self):
self.monitor.camera = None
try:
self.monitor._update_image()
self.assertTrue(True)
except Exception as ex:
self.fail("Unexpected expection thrown" + str(ex))
def test_gets_image_data_from_camera_when_not_None(self):
mock_camera = MagicMock()
self.monitor.camera = mock_camera
self.monitor._update_image()
mock_camera.get_image_data.assert_called_once_with()
def test_get_size_data_from_camera_when_not_None(self):
mock_camera = MagicMock()
mock_xsize = PropertyMock(return_value=100)
mock_ysize = PropertyMock(return_value=200)
type(mock_camera).xsize = mock_xsize
type(mock_camera).ysize = mock_ysize
self.monitor.camera = mock_camera
self.monitor._update_image()
mock_xsize.assert_called_once_with()
mock_ysize.assert_called_once_with()
def test_calls_plotter_with_image_and_size_data(self):
data = 111
xsize = 100
ysize = 200
mock_camera = MagicMock(xsize=xsize, ysize=ysize)
mock_camera.get_image_data = MagicMock(return_value=data)
self.monitor.camera = mock_camera
self.monitor._update_image()
self.monitor.plotter.show.assert_called_once_with(data, xsize, ysize)
if __name__ == '__main__':
unittest.main()
| Rewrite monitortests. Add tests for update_image | Rewrite monitortests. Add tests for update_image
| Python | apache-2.0 | nickbattam/picamon,nickbattam/picamon,nickbattam/picamon,nickbattam/picamon | ---
+++
@@ -1,26 +1,63 @@
#!/usr/bin/python
import unittest
-import monitor
+from mock import patch, MagicMock, PropertyMock
-class TestMonitor(unittest.TestCase):
+from monitor import Monitor
- def test_attr(self):
- mon = monitor.Monitor()
- assert mon.pvprefix == "MON-CONTROL:"
- assert mon.monitorname == "PI1"
- def test_testimage(self):
- mon = monitor.Monitor()
- image = mon.testimage()
- assert len(image) != 0
-# print image
+class MonitorUpdateTest(unittest.TestCase):
- def test_readPV(self):
- mon = monitor.Monitor()
- pv_result = mon.readPV()
- assert pv_result == "CAM1"
+ def setUp(self):
+ with patch('monitor.pv.PV'):
+ mock_plotter = MagicMock()
+ self.monitor = Monitor("MYNAME", mock_plotter)
+
+ def test_does_nothing_if_camera_is_None(self):
+ self.monitor.camera = None
+
+ try:
+ self.monitor._update_image()
+ self.assertTrue(True)
+ except Exception as ex:
+ self.fail("Unexpected expection thrown" + str(ex))
+
+
+ def test_gets_image_data_from_camera_when_not_None(self):
+ mock_camera = MagicMock()
+ self.monitor.camera = mock_camera
+
+ self.monitor._update_image()
+
+ mock_camera.get_image_data.assert_called_once_with()
+
+ def test_get_size_data_from_camera_when_not_None(self):
+ mock_camera = MagicMock()
+
+ mock_xsize = PropertyMock(return_value=100)
+ mock_ysize = PropertyMock(return_value=200)
+ type(mock_camera).xsize = mock_xsize
+ type(mock_camera).ysize = mock_ysize
+
+ self.monitor.camera = mock_camera
+
+ self.monitor._update_image()
+
+ mock_xsize.assert_called_once_with()
+ mock_ysize.assert_called_once_with()
+
+ def test_calls_plotter_with_image_and_size_data(self):
+ data = 111
+ xsize = 100
+ ysize = 200
+ mock_camera = MagicMock(xsize=xsize, ysize=ysize)
+ mock_camera.get_image_data = MagicMock(return_value=data)
+ self.monitor.camera = mock_camera
+
+ self.monitor._update_image()
+
+ self.monitor.plotter.show.assert_called_once_with(data, xsize, ysize)
if __name__ == '__main__':
- unittest.main()
+ unittest.main() |
14f5842a6ae598253f30a711983985ce60c17e04 | localore/search/views.py | localore/search/views.py | from django.shortcuts import render
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
| from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
json = request.GET.get('json', False)
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if json:
response['search_results'] = [
dict(
(attr, getattr(result.specific, attr))
for attr in ['title', 'url']
if hasattr(result.specific, attr)
) for result in response['search_results']
]
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
| Add support for returning search results as JSON. | Add support for returning search results as JSON.
| Python | mpl-2.0 | ghostwords/localore,ghostwords/localore,ghostwords/localore | ---
+++
@@ -1,11 +1,13 @@
+from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
+from django.http import JsonResponse
from django.shortcuts import render
-from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
+ json = request.GET.get('json', False)
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
@@ -28,7 +30,20 @@
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
- return render(request, 'search/search.html', {
+ response = {
'search_query': search_query,
'search_results': search_results,
- })
+ }
+
+ if json:
+ response['search_results'] = [
+ dict(
+ (attr, getattr(result.specific, attr))
+ for attr in ['title', 'url']
+ if hasattr(result.specific, attr)
+ ) for result in response['search_results']
+ ]
+
+ return JsonResponse(response)
+ else:
+ return render(request, 'search/search.html', response) |
6741f6746fe24c91b791ed35048e750d9c52d5ce | scripts/utils.py | scripts/utils.py | # -*- coding: utf-8 -*-
import os
import logging
import datetime
from website import settings
def format_now():
return datetime.datetime.now().isoformat()
def add_file_logger(logger, script_name, suffix=None):
_, name = os.path.split(script_name)
if suffix is not None:
name = '{0}-{1}'.format(name, suffix)
file_handler = logging.FileHandler(
os.path.join(
settings.LOG_PATH,
'.'.join([name, format_now(), 'log'])
)
)
logger.addHandler(file_handler)
| # -*- coding: utf-8 -*-
import os
import logging
import datetime
from website import settings
def format_now():
return datetime.datetime.now().isoformat()
def add_file_logger(logger, script_name, suffix=None):
_, name = os.path.split(script_name)
name = name.rstrip('c')
if suffix is not None:
name = '{0}-{1}'.format(name, suffix)
file_handler = logging.FileHandler(
os.path.join(
settings.LOG_PATH,
'.'.join([name, format_now(), 'log'])
)
)
logger.addHandler(file_handler)
| Make script log file names more consistent | Make script log file names more consistent
...by stripping c's of filenames, so that we get
script_name.py-timestamp rather than script_name.pyc-timestamp
| Python | apache-2.0 | caneruguz/osf.io,kch8qx/osf.io,cwisecarver/osf.io,rdhyee/osf.io,crcresearch/osf.io,abought/osf.io,TomBaxter/osf.io,kwierman/osf.io,mluke93/osf.io,alexschiller/osf.io,laurenrevere/osf.io,DanielSBrown/osf.io,mluo613/osf.io,alexschiller/osf.io,crcresearch/osf.io,Nesiehr/osf.io,cslzchen/osf.io,leb2dg/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,aaxelb/osf.io,abought/osf.io,kwierman/osf.io,amyshi188/osf.io,samchrisinger/osf.io,acshi/osf.io,hmoco/osf.io,doublebits/osf.io,DanielSBrown/osf.io,icereval/osf.io,doublebits/osf.io,doublebits/osf.io,chennan47/osf.io,abought/osf.io,brianjgeiger/osf.io,wearpants/osf.io,samchrisinger/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,mluo613/osf.io,kch8qx/osf.io,mluo613/osf.io,rdhyee/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,cwisecarver/osf.io,RomanZWang/osf.io,chrisseto/osf.io,caseyrollins/osf.io,doublebits/osf.io,caneruguz/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,jnayak1/osf.io,TomHeatwole/osf.io,binoculars/osf.io,TomBaxter/osf.io,mattclark/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,cwisecarver/osf.io,erinspace/osf.io,adlius/osf.io,leb2dg/osf.io,icereval/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,emetsger/osf.io,zachjanicki/osf.io,kwierman/osf.io,zachjanicki/osf.io,mfraezz/osf.io,zamattiac/osf.io,alexschiller/osf.io,wearpants/osf.io,icereval/osf.io,acshi/osf.io,zamattiac/osf.io,crcresearch/osf.io,leb2dg/osf.io,caneruguz/osf.io,mluke93/osf.io,chrisseto/osf.io,SSJohns/osf.io,amyshi188/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,rdhyee/osf.io,mfraezz/osf.io,erinspace/osf.io,hmoco/osf.io,emetsger/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,felliott/osf.io,felliott/osf.io,monikagrabowska/osf.io,mluke93/osf.io,HalcyonChimera/osf.io,samchrisinger/osf.io,zachjanicki/osf.io,adlius/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,jnayak1/osf.io,wearpants/osf.io,felliott/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,emetsger/osf.io,RomanZWang/osf.io,sloria/osf.io,mfraezz/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,kch8qx/osf.io,mluo613/osf.io,amyshi188/osf.io,binoculars/osf.io,abought/osf.io,binoculars/osf.io,mluke93/osf.io,Nesiehr/osf.io,baylee-d/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,alexschiller/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,kwierman/osf.io,cwisecarver/osf.io,caneruguz/osf.io,amyshi188/osf.io,chennan47/osf.io,hmoco/osf.io,chennan47/osf.io,cslzchen/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,SSJohns/osf.io,caseyrollins/osf.io,adlius/osf.io,laurenrevere/osf.io,leb2dg/osf.io,mattclark/osf.io,Nesiehr/osf.io,felliott/osf.io,chrisseto/osf.io,mattclark/osf.io,pattisdr/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,sloria/osf.io,DanielSBrown/osf.io,rdhyee/osf.io,laurenrevere/osf.io,wearpants/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,cslzchen/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,pattisdr/osf.io,baylee-d/osf.io,emetsger/osf.io,hmoco/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,saradbowman/osf.io,RomanZWang/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,samchrisinger/osf.io,alexschiller/osf.io,erinspace/osf.io,zamattiac/osf.io,zamattiac/osf.io,doublebits/osf.io | ---
+++
@@ -13,6 +13,7 @@
def add_file_logger(logger, script_name, suffix=None):
_, name = os.path.split(script_name)
+ name = name.rstrip('c')
if suffix is not None:
name = '{0}-{1}'.format(name, suffix)
file_handler = logging.FileHandler( |
462acba741c929ae79d5c2e6f6fc98307f556ec9 | shopping_list.py | shopping_list.py | shopping_list = []
def show_help():
print("What should we pick up at the store?")
print("Enter DONE to stop. Enter HELP for this help. Enter SHOW to see your current list.")
def add_to_list(item):
shopping_list.append(item)
print("Added! List has {} items.".format(len(shopping_list)))
| shopping_list = []
def show_help():
print("What should we pick up at the store?")
print("Enter DONE to stop. Enter HELP for this help. Enter SHOW to see your current list.")
def add_to_list(item):
shopping_list.append(item)
print("Added! List has {} items.".format(len(shopping_list)))
def show_list():
print("Here's your list:")
for item in shopping_list:
print(item)
| Implement method to show contents of the list. | Implement method to show contents of the list.
| Python | mit | adityatrivedi/shopping-list | ---
+++
@@ -8,4 +8,8 @@
shopping_list.append(item)
print("Added! List has {} items.".format(len(shopping_list)))
+def show_list():
+ print("Here's your list:")
+ for item in shopping_list:
+ print(item)
|
2f77a49d4a8393b1de14aac29b41204e8006876e | project/members/rest.py | project/members/rest.py | from rest_framework import viewsets, serializers
from .models import MemberType, Member, MembershipApplicationTag, MembershipApplication
class MemberTypeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = MemberType
class MemberTypeViewSet(viewsets.ModelViewSet):
serializer_class = MemberTypeSerializer
queryset = MemberType.objects.all()
class MembershipApplicationTagSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = MembershipApplicationTag
class MembershipApplicationTagViewSet(viewsets.ModelViewSet):
serializer_class = MembershipApplicationTagSerializer
queryset = MembershipApplicationTag.objects.all()
class MemberSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Member
class MemberViewSet(viewsets.ModelViewSet):
serializer_class = MemberSerializer
queryset = Member.objects.all()
class MembershipApplicationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = MembershipApplication
class MembershipApplicationSerializerViewSet(viewsets.ModelViewSet):
serializer_class = MembershipApplicationSerializer
queryset = MembershipApplication.objects.all()
| from rest_framework import viewsets, serializers
from .models import MemberType, Member, MembershipApplicationTag, MembershipApplication
class MemberTypeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = MemberType
class MemberTypeViewSet(viewsets.ModelViewSet):
serializer_class = MemberTypeSerializer
queryset = MemberType.objects.all()
class MembershipApplicationTagSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = MembershipApplicationTag
class MembershipApplicationTagViewSet(viewsets.ModelViewSet):
serializer_class = MembershipApplicationTagSerializer
queryset = MembershipApplicationTag.objects.all()
class MemberSerializer(serializers.HyperlinkedModelSerializer):
credit = serializers.CharField(read_only=True)
class Meta:
model = Member
fields = '__all__'
class MemberViewSet(viewsets.ModelViewSet):
serializer_class = MemberSerializer
queryset = Member.objects.all()
class MembershipApplicationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = MembershipApplication
class MembershipApplicationSerializerViewSet(viewsets.ModelViewSet):
serializer_class = MembershipApplicationSerializer
queryset = MembershipApplication.objects.all()
| Add credit property to member REST API response | Add credit property to member REST API response
| Python | mit | jautero/asylum,HelsinkiHacklab/asylum,rambo/asylum,ojousima/asylum,rambo/asylum,hacklab-fi/asylum,rambo/asylum,HelsinkiHacklab/asylum,hacklab-fi/asylum,ojousima/asylum,HelsinkiHacklab/asylum,hacklab-fi/asylum,jautero/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,ojousima/asylum,ojousima/asylum,rambo/asylum,jautero/asylum | ---
+++
@@ -18,8 +18,10 @@
queryset = MembershipApplicationTag.objects.all()
class MemberSerializer(serializers.HyperlinkedModelSerializer):
+ credit = serializers.CharField(read_only=True)
class Meta:
model = Member
+ fields = '__all__'
class MemberViewSet(viewsets.ModelViewSet):
serializer_class = MemberSerializer |
3f5bd92a11ed69592e21888838088f8baa0d6575 | makefiles_plain_python.py | makefiles_plain_python.py | #!/usr/bin/python3
def make_a_file(i):
f = "out_python/{:05d}".format(i)
try:
with open(f, "w") as fp:
fp.write("hello")
# print("wrote", f)
except:
print("failed on", f)
def make_a_lot_of_files(how_many):
for i in range(how_many):
make_a_file(i)
return None
print(make_a_lot_of_files(100000))
| #!/usr/bin/python3
import asyncio
@asyncio.coroutine
def make_a_file(i):
f = "out_python/{:05d}".format(i)
try:
with open(f, "w") as fp:
fp.write("hello")
# print("wrote", f)
except:
print("failed on", f)
def make_a_lot_of_files(how_many):
loop = asyncio.get_event_loop()
tasks = [asyncio.ensure_future(make_a_file(i)) for i in range(how_many)]
loop.run_until_complete(asyncio.wait(tasks))
if __name__ == "__main__":
from sys import argv
how_many = int(argv[1]) if len(argv) == 2 else 100000
make_a_lot_of_files(how_many)
| Use asyncio in plain python example. | Use asyncio in plain python example.
| Python | mit | reedwade/python-golang-linking,reedwade/python-golang-linking | ---
+++
@@ -1,5 +1,8 @@
#!/usr/bin/python3
+import asyncio
+
+@asyncio.coroutine
def make_a_file(i):
f = "out_python/{:05d}".format(i)
try:
@@ -11,8 +14,12 @@
def make_a_lot_of_files(how_many):
- for i in range(how_many):
- make_a_file(i)
- return None
+ loop = asyncio.get_event_loop()
+ tasks = [asyncio.ensure_future(make_a_file(i)) for i in range(how_many)]
+ loop.run_until_complete(asyncio.wait(tasks))
-print(make_a_lot_of_files(100000))
+
+if __name__ == "__main__":
+ from sys import argv
+ how_many = int(argv[1]) if len(argv) == 2 else 100000
+ make_a_lot_of_files(how_many) |
ef6b10e2a01c42fa12d067eae86429dab756f534 | raiden/constants.py | raiden/constants.py | # -*- coding: utf-8 -*-
UINT64_MAX = 2 ** 64 - 1
UINT64_MIN = 0
INT64_MAX = 2 ** 63 - 1
INT64_MIN = -(2 ** 63)
UINT256_MAX = 2 ** 256 - 1
# Deployed to Ropsten revival on 2017-08-18 from commit 01554102f0a52fc5aec3f41dc46d53017108b400
ROPSTEN_REGISTRY_ADDRESS = '7205a22f083a12d1b22ee89d7e892d23b1f1438a'
ROPSTEN_DISCOVERY_ADDRESS = '1ed4eab14a09ba2f334d9ed579a5ee4ae57aec45'
DISCOVERY_REGISTRATION_GAS = 500000
MINUTE_SEC = 60
MINUTE_MS = 60 * 1000
NETTINGCHANNEL_SETTLE_TIMEOUT_MIN = 6
# TODO: add this as an attribute of the transport class
UDP_MAX_MESSAGE_SIZE = 1200
| # -*- coding: utf-8 -*-
UINT64_MAX = 2 ** 64 - 1
UINT64_MIN = 0
INT64_MAX = 2 ** 63 - 1
INT64_MIN = -(2 ** 63)
UINT256_MAX = 2 ** 256 - 1
# Deployed to Ropsten revival on 2017-09-03 from commit f4f8dcbe791b7be8bc15475f79ad9cbbfe15435b
ROPSTEN_REGISTRY_ADDRESS = 'ce30a13daa47c0f35631e5ed750e39c12172f325'
ROPSTEN_DISCOVERY_ADDRESS = 'aecb64f87c7fa12d983e541eabb0064fc9d87c4f'
DISCOVERY_REGISTRATION_GAS = 500000
MINUTE_SEC = 60
MINUTE_MS = 60 * 1000
NETTINGCHANNEL_SETTLE_TIMEOUT_MIN = 6
# TODO: add this as an attribute of the transport class
UDP_MAX_MESSAGE_SIZE = 1200
| Deploy smart contracts v0.0.6 to Ropsten | Deploy smart contracts v0.0.6 to Ropsten | Python | mit | hackaugusto/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden,hackaugusto/raiden,tomashaber/raiden,tomashaber/raiden | ---
+++
@@ -8,9 +8,9 @@
UINT256_MAX = 2 ** 256 - 1
-# Deployed to Ropsten revival on 2017-08-18 from commit 01554102f0a52fc5aec3f41dc46d53017108b400
-ROPSTEN_REGISTRY_ADDRESS = '7205a22f083a12d1b22ee89d7e892d23b1f1438a'
-ROPSTEN_DISCOVERY_ADDRESS = '1ed4eab14a09ba2f334d9ed579a5ee4ae57aec45'
+# Deployed to Ropsten revival on 2017-09-03 from commit f4f8dcbe791b7be8bc15475f79ad9cbbfe15435b
+ROPSTEN_REGISTRY_ADDRESS = 'ce30a13daa47c0f35631e5ed750e39c12172f325'
+ROPSTEN_DISCOVERY_ADDRESS = 'aecb64f87c7fa12d983e541eabb0064fc9d87c4f'
DISCOVERY_REGISTRATION_GAS = 500000
|
f4a2068703ea0e32c4e62c3451d2c6bce0922b1c | movies/test/test_views.py | movies/test/test_views.py | from django.test import TestCase
from rest_framework.test import APIClient
from movies.test import factories
class MovieViewSetTestCase(TestCase):
def setUp(self):
self.api = APIClient()
self.movies = factories.MoviesFactory.create()
def test_entry(self):
response = self.api.get('/movies/').json()
self.assertListEqual(response,
[{'rated': 'For None', 'released': '1991-04-20', 'title': 'The ultimate hippie movies',
'runtime': '1337', 'imdb_id': 'tt1111111', 'orig_language': 'en'},])
| from django.test import TestCase
from rest_framework.test import APIClient
from movies.test import factories
class MovieViewSetTestCase(TestCase):
def setUp(self):
self.api = APIClient()
self.movies = factories.MoviesFactory.create()
def test_entry(self):
response = self.api.get('/movies/').json()
self.assertListEqual(response,
[{'rated': 'For Non', 'released': '1991-04-20', 'title': 'The ultimate hippie movies',
'runtime': '1337', 'imdb_id': 'tt1111111', 'orig_language': 'en'},])
| Test will fail, will jenkins run? | Test will fail, will jenkins run?
| Python | mit | kinoreel/kinoreel-backend,kinoreel/kinoreel-backend | ---
+++
@@ -14,5 +14,5 @@
response = self.api.get('/movies/').json()
self.assertListEqual(response,
- [{'rated': 'For None', 'released': '1991-04-20', 'title': 'The ultimate hippie movies',
+ [{'rated': 'For Non', 'released': '1991-04-20', 'title': 'The ultimate hippie movies',
'runtime': '1337', 'imdb_id': 'tt1111111', 'orig_language': 'en'},]) |
fc9970e97575af85384fedc58cdc1ff9800628cc | neo/test/rawiotest/test_alphaomegarawio.py | neo/test/rawiotest/test_alphaomegarawio.py | """
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import logging
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
logging.getLogger().setLevel(logging.INFO)
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/",
]
if __name__ == "__main__":
unittest.main()
| """
Tests of neo.rawio.examplerawio
Note for dev:
if you write a new RawIO class your need to put some file
to be tested at g-node portal, Ask neuralensemble list for that.
The file need to be small.
Then you have to copy/paste/renamed the TestExampleRawIO
class and a full test will be done to test if the new coded IO
is compliant with the RawIO API.
If you have problems, do not hesitate to ask help github (prefered)
of neuralensemble list.
Note that same mechanism is used a neo.io API so files are tested
several time with neo.rawio (numpy buffer) and neo.io (neo object tree).
See neo.test.iotest.*
Author: Samuel Garcia
"""
import logging
import unittest
from neo.rawio.alphaomegarawio import AlphaOmegaRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
logging.getLogger().setLevel(logging.INFO)
class TestAlphaOmegaRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = AlphaOmegaRawIO
entities_to_download = [
"alphaomega",
]
entities_to_test = [
"alphaomega/mpx_map_version4",
]
if __name__ == "__main__":
unittest.main()
| Use correct test data path | Use correct test data path
| Python | bsd-3-clause | samuelgarcia/python-neo,apdavison/python-neo,JuliaSprenger/python-neo,INM-6/python-neo,NeuralEnsemble/python-neo | ---
+++
@@ -41,7 +41,7 @@
]
entities_to_test = [
- "alphaomega/",
+ "alphaomega/mpx_map_version4",
]
|
a9330038b2dc56e34ccc5ecdfb297adfc50ec005 | nlpipe/modules/corefnl.py | nlpipe/modules/corefnl.py | """
Wrapper around antske/coref_draft.
Input should be NAF files parsed by alpino (e.g. alpinonerc)
"""
from KafNafParserPy import KafNafParser
from multisieve_coreference import process_coreference
from io import BytesIO
import logging
from nlpipe.module import Module
log = logging.getLogger(__name__)
class CorefNL(Module):
name = "corefnl"
def process(self, text):
inb = BytesIO(text.encode("utf-8"))
naf = KafNafParser(inb)
naf = process_coreference(naf)
b = BytesIO()
naf.dump(b)
return b.getvalue().decode("utf-8")
CorefNL.register()
| """
Wrapper around antske/coref_draft.
Input should be NAF files parsed by alpino (e.g. alpinonerc)
"""
import logging
from nlpipe.module import Module
import subprocess
import os
log = logging.getLogger(__name__)
class CorefNL(Module):
name = "corefnl"
def check_status(self):
if 'COREF_HOME' not in os.environ:
raise Exception("COREF_HOME not set!")
coref_home = os.environ['COREF_HOME']
if not os.path.exists(coref_home):
raise Exception("Coref not found at COREF_HOME={coref_home}".format(**locals()))
def process(self, text):
coref_home = os.environ['COREF_HOME']
command = [os.path.join(coref_home, "env/bin/python"),
"-m", "multisieve_coreference.resolve_coreference"]
p = subprocess.Popen(command, shell=False, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = [x.decode("utf-8") for x in p.communicate(text.encode("utf-8"))]
if err:
raise Exception(err)
if not out:
raise Exception("No output from coreference and no error message")
return out
CorefNL.register()
| Change coref to external call | Change coref to external call
| Python | mit | vanatteveldt/nlpipe,vanatteveldt/nlpipe,vanatteveldt/nlpipe | ---
+++
@@ -3,11 +3,10 @@
Input should be NAF files parsed by alpino (e.g. alpinonerc)
"""
-from KafNafParserPy import KafNafParser
-from multisieve_coreference import process_coreference
-from io import BytesIO
import logging
from nlpipe.module import Module
+import subprocess
+import os
log = logging.getLogger(__name__)
@@ -15,12 +14,27 @@
class CorefNL(Module):
name = "corefnl"
+ def check_status(self):
+ if 'COREF_HOME' not in os.environ:
+ raise Exception("COREF_HOME not set!")
+ coref_home = os.environ['COREF_HOME']
+ if not os.path.exists(coref_home):
+ raise Exception("Coref not found at COREF_HOME={coref_home}".format(**locals()))
+
def process(self, text):
- inb = BytesIO(text.encode("utf-8"))
- naf = KafNafParser(inb)
- naf = process_coreference(naf)
- b = BytesIO()
- naf.dump(b)
- return b.getvalue().decode("utf-8")
+ coref_home = os.environ['COREF_HOME']
+ command = [os.path.join(coref_home, "env/bin/python"),
+ "-m", "multisieve_coreference.resolve_coreference"]
+ p = subprocess.Popen(command, shell=False, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ out, err = [x.decode("utf-8") for x in p.communicate(text.encode("utf-8"))]
+ if err:
+ raise Exception(err)
+ if not out:
+ raise Exception("No output from coreference and no error message")
+
+ return out
+
CorefNL.register() |
d0a907706b8f21e63818375a669c6a8fbe398a5f | project_closing/__openerp__.py | project_closing/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Camtpcaomp
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Project closing",
"version": "1.1",
"author": "Camptocamp",
"website": "http://www.camptocamp.com",
"category": "project Management",
"depends": ["project"],
"description": """
Automatic account analytic closing when related project is closed.
and If a projet is open, the related analytic account will be re-open.
""",
"data": [],
'installable': True,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Camtpcaomp
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Project closing",
"version": "1.1",
"author": "Camptocamp,Odoo Community Association (OCA)",
"website": "http://www.camptocamp.com",
"category": "project Management",
"depends": ["project"],
"description": """
Automatic account analytic closing when related project is closed.
and If a projet is open, the related analytic account will be re-open.
""",
"data": [],
'installable': True,
}
| Add OCA as author of OCA addons | Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.
| Python | agpl-3.0 | Endika/project,acsone/project-service,yelizariev/project-service,dreispt/project,OCA/project-service,Antiun/project,dreispt/project-service,ddico/project,eezee-it/project-service,incaser/project,sergiocorato/project-service,raycarnes/project,Antiun/project-service,NeovaHealth/project-service,xpansa/project-service,acsone/project,akretion/project-service | ---
+++
@@ -21,7 +21,7 @@
{
"name": "Project closing",
"version": "1.1",
- "author": "Camptocamp",
+ "author": "Camptocamp,Odoo Community Association (OCA)",
"website": "http://www.camptocamp.com",
"category": "project Management",
"depends": ["project"], |
495ea3347eec9d1a902779de8dd07ba91aa48f60 | server/validator.py | server/validator.py | from girder.api import access
from girder.api.rest import Resource
from girder.api.describe import Description
class Validator(Resource):
def __init__(self, celeryApp):
self.resourceName = 'romanesco_validator'
self.route('GET', (), self.find)
self.celeryApp = celeryApp
@access.public
def find(self, params):
return self.celeryApp.send_task('romanesco.validators', [
params.get('type', None),
params.get('format', None)]).get()
find.description = (
Description('List or search for validators.')
.param('type', 'Find validators with this type.', required=False)
.param('format', 'Find validators with this format.', required=False)
)
| from girder.api import access
from girder.api.rest import Resource
from girder.api.describe import Description
class Validator(Resource):
def __init__(self, celeryApp):
super(Validator, self).__init__()
self.resourceName = 'romanesco_validator'
self.route('GET', (), self.find)
self.celeryApp = celeryApp
@access.public
def find(self, params):
return self.celeryApp.send_task('romanesco.validators', [
params.get('type', None),
params.get('format', None)]).get()
find.description = (
Description('List or search for validators.')
.param('type', 'Find validators with this type.', required=False)
.param('format', 'Find validators with this format.', required=False)
)
| Call Resource constructor from Validator, fixing a Girder warning | Call Resource constructor from Validator, fixing a Girder warning
| Python | apache-2.0 | Kitware/romanesco,Kitware/romanesco,Kitware/romanesco,Kitware/romanesco | ---
+++
@@ -5,6 +5,7 @@
class Validator(Resource):
def __init__(self, celeryApp):
+ super(Validator, self).__init__()
self.resourceName = 'romanesco_validator'
self.route('GET', (), self.find)
self.celeryApp = celeryApp |
5ffdb8b395576ffaff4dd948361b6baeffa1072c | pypeerassets/networks.py | pypeerassets/networks.py | from collections import namedtuple
Network = namedtuple('Network', [
'network_name',
'network_shortname',
'pubkeyhash',
'wif_prefix',
'scripthash',
'magicbytes'
])
networks = (
# Peercoin mainnet
Network("Peercoin", "ppc", b'37', b'b7', b'75', b'e6e8e9e5'),
# Peercoin testnet
Network("Peercoin-testnet", "tppc", b'6f', b'ef', b'c4', b'cbf2c0ef')
)
def query(query):
'''find matching parameter among the networks'''
for network in networks:
for field in network:
if field == query:
return network
| from collections import namedtuple
Network = namedtuple('Network', [
'network_name',
'network_shortname',
'pubkeyhash',
'wif_prefix',
'scripthash',
'magicbytes'
])
networks = (
# Peercoin mainnet
Network("Peercoin", "ppc", b'37', b'b7', b'75', b'e6e8e9e5'),
# Peercoin testnet
Network("Peercoin-testnet", "tppc", b'6f', b'ef', b'c4', b'cbf2c0ef')
# Bitcoin mainnet
Network("Bitcoin", "btc", b'00', b'80', b'05', b'd9b4bef9'),
# Bitcoin testnet
Network("Bitcoin-testnet", "tbtc", b'6f', b'ef', b'c4', b'dab5bffa')
)
def query(query):
'''find matching parameter among the networks'''
for network in networks:
for field in network:
if field == query:
return network
| Add specifications for Bitcoin mainnet and testnet | Add specifications for Bitcoin mainnet and testnet | Python | bsd-3-clause | PeerAssets/pypeerassets,backpacker69/pypeerassets | ---
+++
@@ -14,6 +14,10 @@
Network("Peercoin", "ppc", b'37', b'b7', b'75', b'e6e8e9e5'),
# Peercoin testnet
Network("Peercoin-testnet", "tppc", b'6f', b'ef', b'c4', b'cbf2c0ef')
+ # Bitcoin mainnet
+ Network("Bitcoin", "btc", b'00', b'80', b'05', b'd9b4bef9'),
+ # Bitcoin testnet
+ Network("Bitcoin-testnet", "tbtc", b'6f', b'ef', b'c4', b'dab5bffa')
)
def query(query): |
b1e2b05a3e77b869cf9d0de820134237ce5de5ba | sideloader/tasks.py | sideloader/tasks.py | from celery import task
import os
import sys
import subprocess
@task()
def build(build, giturl, branch):
# Use subprocess to execute a build, update the db with results
local = os.path.dirname(sys.argv[0])
buildpack = os.path.join(local, 'bin/build_package')
deployfile = build.project.deploy_file
print "Executing build %s %s" % (giturl, branch)
args = [buildpack, '--branch', branch]
if build.project.deploy_file:
args.extend(['--deploy-file', build.project.deploy_file])
if build.project.release_stream:
args.extend(['--push', build.project.release_stream.push_command])
args.append(giturl)
builder = subprocess.Popen(args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=local)
builder.wait()
build.log = builder.stdout.read()
if builder.returncode != 0:
build.state = 2
else:
build.state = 1
build.save()
| from celery import task
import os
import sys
import subprocess
@task()
def build(build, giturl, branch):
# Use subprocess to execute a build, update the db with results
local = os.path.dirname(sys.argv[0])
buildpack = os.path.join(local, 'bin/build_package')
deployfile = build.project.deploy_file
print "Executing build %s %s" % (giturl, branch)
args = [buildpack, '--branch', branch]
if build.project.deploy_file:
args.extend(['--deploy-file', build.project.deploy_file])
if build.project.release_stream:
args.extend(['--push', build.project.release_stream.push_command])
args.append(giturl)
builder = subprocess.Popen(args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=local)
stdoutdata, stderrdata = builder.communicate()
build.log = stdoutdata
if builder.returncode != 0:
build.state = 2
else:
build.state = 1
build.save()
| Use communicate so process out does not deadlock | Use communicate so process out does not deadlock
| Python | mit | praekelt/sideloader,praekelt/sideloader,praekelt/sideloader,praekelt/sideloader | ---
+++
@@ -22,15 +22,14 @@
args.append(giturl)
- builder = subprocess.Popen(args,
+ builder = subprocess.Popen(args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=local)
- builder.wait()
-
- build.log = builder.stdout.read()
+ stdoutdata, stderrdata = builder.communicate()
+ build.log = stdoutdata
if builder.returncode != 0:
build.state = 2
else:
build.state = 1
-
+
build.save() |
73b684af1941ea116d8b4b37ef463cd3c9da0493 | django_modelviews/templatetags/modelview_tags.py | django_modelviews/templatetags/modelview_tags.py | from django import template
from django.template.loader import render_to_string
from django.utils.text import capfirst
from django_modelview import generic
register = template.Library()
@register.simple_tag
def ordering_link(request, field, title):
title = capfirst(title)
if not hasattr(request, '_ordering_link_cache'):
data = request.GET and request.GET.copy() or {}
# Remove pagination and ordering vars
for k in ('o', 'ot', 'page'):
if k in data:
del data[k]
request._ordering_link_cache = (
request.GET.get('o', ''),
request.GET.get('ot') == 'desc' and 'desc' or 'asc',
generic.querystring(data),
)
c = request._ordering_link_cache
tmpl = u'<a href="?%s%s" class="%s">%s</a>'
if c[0] == field:
dir = c[1] == 'asc' and 'desc' or 'asc'
return tmpl % (
c[2],
u'&o=%s&ot=%s' % (
field,
dir,
),
c[1],
title,
)
return tmpl % (
c[2],
u'&o=%s' % field,
'',
title,
)
| from django import template
from django.template.loader import render_to_string
from django.utils.text import capfirst
from django_modelviews import generic
register = template.Library()
@register.simple_tag
def ordering_link(request, field, title):
title = capfirst(title)
if not hasattr(request, '_ordering_link_cache'):
data = request.GET and request.GET.copy() or {}
# Remove pagination and ordering vars
for k in ('o', 'ot', 'page'):
if k in data:
del data[k]
request._ordering_link_cache = (
request.GET.get('o', ''),
request.GET.get('ot') == 'desc' and 'desc' or 'asc',
generic.querystring(data),
)
c = request._ordering_link_cache
tmpl = u'<a href="?%s%s" class="%s">%s</a>'
if c[0] == field:
dir = c[1] == 'asc' and 'desc' or 'asc'
return tmpl % (
c[2],
u'&o=%s&ot=%s' % (
field,
dir,
),
c[1],
title,
)
return tmpl % (
c[2],
u'&o=%s' % field,
'',
title,
)
| Fix typo in project name | Fix typo in project name
| Python | bsd-3-clause | matthiask/towel,matthiask/towel,matthiask/towel,matthiask/towel | ---
+++
@@ -2,7 +2,7 @@
from django.template.loader import render_to_string
from django.utils.text import capfirst
-from django_modelview import generic
+from django_modelviews import generic
register = template.Library()
|
297660a27dc5b23beb0f616965c60389bce3c2d8 | h2o-py/tests/testdir_algos/rf/pyunit_bigcatRF.py | h2o-py/tests/testdir_algos/rf/pyunit_bigcatRF.py | import sys
sys.path.insert(1, "../../../")
import h2o
def bigcatRF(ip,port):
# Connect to h2o
h2o.init(ip,port)
# Training set has 100 categories from cat001 to cat100
# Categories cat001, cat003, ... are perfect predictors of y = 1
# Categories cat002, cat004, ... are perfect predictors of y = 0
#Log.info("Importing bigcat_5000x2.csv data...\n")
bigcat = h2o.import_frame(path=h2o.locate("smalldata/gbm_test/bigcat_5000x2.csv"))
bigcat["y"] = bigcat["y"].asfactor()
#Log.info("Summary of bigcat_5000x2.csv from H2O:\n")
#bigcat.summary()
# Train H2O DRF Model:
#Log.info("H2O DRF (Naive Split) with parameters:\nclassification = TRUE, ntree = 1, depth = 1, nbins = 100\n")
model = h2o.random_forest(x=bigcat[["X"]], y=bigcat["y"], ntrees=1, max_depth=1, nbins=100)
model.show()
if __name__ == "__main__":
h2o.run_test(sys.argv, bigcatRF)
| import sys
sys.path.insert(1, "../../../")
import h2o
def bigcatRF(ip,port):
# Connect to h2o
h2o.init(ip,port)
# Training set has 100 categories from cat001 to cat100
# Categories cat001, cat003, ... are perfect predictors of y = 1
# Categories cat002, cat004, ... are perfect predictors of y = 0
#Log.info("Importing bigcat_5000x2.csv data...\n")
bigcat = h2o.import_frame(path=h2o.locate("smalldata/gbm_test/bigcat_5000x2.csv"))
bigcat["y"] = bigcat["y"].asfactor()
#Log.info("Summary of bigcat_5000x2.csv from H2O:\n")
#bigcat.summary()
# Train H2O DRF Model:
#Log.info("H2O DRF (Naive Split) with parameters:\nclassification = TRUE, ntree = 1, depth = 1, nbins = 100, nbins_cats=10\n")
model = h2o.random_forest(x=bigcat[["X"]], y=bigcat["y"], ntrees=1, max_depth=1, nbins=100, nbins_cats=10)
model.show()
if __name__ == "__main__":
h2o.run_test(sys.argv, bigcatRF)
| Add usage of nbins_cats to RF pyunit. | Add usage of nbins_cats to RF pyunit.
| Python | apache-2.0 | nilbody/h2o-3,YzPaul3/h2o-3,datachand/h2o-3,michalkurka/h2o-3,brightchen/h2o-3,madmax983/h2o-3,weaver-viii/h2o-3,junwucs/h2o-3,michalkurka/h2o-3,tarasane/h2o-3,michalkurka/h2o-3,datachand/h2o-3,madmax983/h2o-3,spennihana/h2o-3,h2oai/h2o-3,junwucs/h2o-3,datachand/h2o-3,junwucs/h2o-3,YzPaul3/h2o-3,bospetersen/h2o-3,jangorecki/h2o-3,nilbody/h2o-3,printedheart/h2o-3,h2oai/h2o-dev,printedheart/h2o-3,bospetersen/h2o-3,datachand/h2o-3,tarasane/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,weaver-viii/h2o-3,mrgloom/h2o-3,mrgloom/h2o-3,mrgloom/h2o-3,printedheart/h2o-3,h2oai/h2o-3,PawarPawan/h2o-v3,h2oai/h2o-dev,ChristosChristofidis/h2o-3,madmax983/h2o-3,jangorecki/h2o-3,weaver-viii/h2o-3,pchmieli/h2o-3,tarasane/h2o-3,pchmieli/h2o-3,datachand/h2o-3,mrgloom/h2o-3,madmax983/h2o-3,weaver-viii/h2o-3,mathemage/h2o-3,h2oai/h2o-dev,datachand/h2o-3,junwucs/h2o-3,mathemage/h2o-3,mrgloom/h2o-3,h2oai/h2o-3,PawarPawan/h2o-v3,spennihana/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,PawarPawan/h2o-v3,h2oai/h2o-3,weaver-viii/h2o-3,pchmieli/h2o-3,junwucs/h2o-3,brightchen/h2o-3,junwucs/h2o-3,printedheart/h2o-3,printedheart/h2o-3,kyoren/https-github.com-h2oai-h2o-3,nilbody/h2o-3,PawarPawan/h2o-v3,spennihana/h2o-3,YzPaul3/h2o-3,brightchen/h2o-3,ChristosChristofidis/h2o-3,tarasane/h2o-3,ChristosChristofidis/h2o-3,mrgloom/h2o-3,tarasane/h2o-3,h2oai/h2o-3,PawarPawan/h2o-v3,kyoren/https-github.com-h2oai-h2o-3,YzPaul3/h2o-3,michalkurka/h2o-3,kyoren/https-github.com-h2oai-h2o-3,weaver-viii/h2o-3,nilbody/h2o-3,ChristosChristofidis/h2o-3,spennihana/h2o-3,PawarPawan/h2o-v3,nilbody/h2o-3,nilbody/h2o-3,pchmieli/h2o-3,ChristosChristofidis/h2o-3,kyoren/https-github.com-h2oai-h2o-3,nilbody/h2o-3,brightchen/h2o-3,bospetersen/h2o-3,YzPaul3/h2o-3,YzPaul3/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,madmax983/h2o-3,kyoren/https-github.com-h2oai-h2o-3,brightchen/h2o-3,YzPaul3/h2o-3,ChristosChristofidis/h2o-3,pchmieli/h2o-3,mathemage/h2o-3,bospetersen/h2o-3,brightchen/h2o-3,h2oai/h2o-3,weaver-viii/h2o-3,kyoren/https-github.com-h2oai-h2o-3,kyoren/https-github.com-h2oai-h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,pchmieli/h2o-3,tarasane/h2o-3,mathemage/h2o-3,mathemage/h2o-3,spennihana/h2o-3,brightchen/h2o-3,mathemage/h2o-3,mrgloom/h2o-3,datachand/h2o-3,jangorecki/h2o-3,tarasane/h2o-3,madmax983/h2o-3,pchmieli/h2o-3,bospetersen/h2o-3,printedheart/h2o-3,bospetersen/h2o-3,ChristosChristofidis/h2o-3,printedheart/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,PawarPawan/h2o-v3,h2oai/h2o-3,michalkurka/h2o-3,madmax983/h2o-3,bospetersen/h2o-3,junwucs/h2o-3,jangorecki/h2o-3 | ---
+++
@@ -18,8 +18,8 @@
#bigcat.summary()
# Train H2O DRF Model:
- #Log.info("H2O DRF (Naive Split) with parameters:\nclassification = TRUE, ntree = 1, depth = 1, nbins = 100\n")
- model = h2o.random_forest(x=bigcat[["X"]], y=bigcat["y"], ntrees=1, max_depth=1, nbins=100)
+ #Log.info("H2O DRF (Naive Split) with parameters:\nclassification = TRUE, ntree = 1, depth = 1, nbins = 100, nbins_cats=10\n")
+ model = h2o.random_forest(x=bigcat[["X"]], y=bigcat["y"], ntrees=1, max_depth=1, nbins=100, nbins_cats=10)
model.show()
if __name__ == "__main__": |
89e49a69e700f49fa70391b02c839e3a0a4a1c7f | server/accounts/views.py | server/accounts/views.py | from django.shortcuts import render
# Create your views here.
| from django.shortcuts import render
from django.contrib.auth import login, logout
from django.contrib.auth.models import User
from rest_framework import viewsets
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.views import APIView
# Create your views here.
from . import serializers, permissions, authenticators, models
class UserView(viewsets.ModelViewSet):
serializer_class = serializers.UserSerializer
model = User
def get_permissions(self):
# allow non-authenticated user to create
return (AllowAny() if self.request.method == 'POST'
else permissions.IsStaffOrTargetUser()),
class AuthView(APIView):
authentication_classes = (authenticators.QuietBasicAuthentication,)
def post(self, request, *args, **kwargs):
login(request, request.user)
return Response(serializers.UserSerializer(request.user).data)
def delete(self, request, *args, **kwargs):
logout(request)
return Response()
| Update the models for Auth and User. | Update the models for Auth and User.
| Python | agpl-3.0 | TomDataworks/angular-inventory,TomDataworks/angular-inventory | ---
+++
@@ -1,3 +1,31 @@
from django.shortcuts import render
+from django.contrib.auth import login, logout
+from django.contrib.auth.models import User
+from rest_framework import viewsets
+from rest_framework.permissions import AllowAny
+from rest_framework.response import Response
+from rest_framework.views import APIView
# Create your views here.
+
+from . import serializers, permissions, authenticators, models
+
+class UserView(viewsets.ModelViewSet):
+ serializer_class = serializers.UserSerializer
+ model = User
+
+ def get_permissions(self):
+ # allow non-authenticated user to create
+ return (AllowAny() if self.request.method == 'POST'
+ else permissions.IsStaffOrTargetUser()),
+
+class AuthView(APIView):
+ authentication_classes = (authenticators.QuietBasicAuthentication,)
+
+ def post(self, request, *args, **kwargs):
+ login(request, request.user)
+ return Response(serializers.UserSerializer(request.user).data)
+
+ def delete(self, request, *args, **kwargs):
+ logout(request)
+ return Response() |
0cb0fee339883adeb93f787b5cc19e5293463c06 | skimage/_shared/utils.py | skimage/_shared/utils.py | import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
'''Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
'''
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
msg = 'Call to deprecated function `%s`.' % func.__name__
if self.alt_func is not None:
msg += ' Use `%s` instead.' % self.alt_func
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.'
if self.alt_func is not None:
doc += ' Use `%s` instead.' % self.alt_func
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
| import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
'''Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
'''
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
| Remove duplicate code for alternative function | Remove duplicate code for alternative function
| Python | bsd-3-clause | michaelpacer/scikit-image,Midafi/scikit-image,jwiggins/scikit-image,Midafi/scikit-image,michaelaye/scikit-image,Hiyorimi/scikit-image,paalge/scikit-image,SamHames/scikit-image,GaZ3ll3/scikit-image,juliusbierk/scikit-image,WarrenWeckesser/scikits-image,ajaybhat/scikit-image,oew1v07/scikit-image,rjeli/scikit-image,Britefury/scikit-image,SamHames/scikit-image,emon10005/scikit-image,chintak/scikit-image,bennlich/scikit-image,robintw/scikit-image,SamHames/scikit-image,Britefury/scikit-image,ClinicalGraphics/scikit-image,almarklein/scikit-image,youprofit/scikit-image,vighneshbirodkar/scikit-image,dpshelio/scikit-image,vighneshbirodkar/scikit-image,ajaybhat/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,rjeli/scikit-image,chintak/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,almarklein/scikit-image,bsipocz/scikit-image,blink1073/scikit-image,michaelaye/scikit-image,blink1073/scikit-image,warmspringwinds/scikit-image,ofgulban/scikit-image,bsipocz/scikit-image,chriscrosscutler/scikit-image,juliusbierk/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,SamHames/scikit-image,newville/scikit-image,youprofit/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,bennlich/scikit-image,oew1v07/scikit-image,Hiyorimi/scikit-image,ofgulban/scikit-image,newville/scikit-image,vighneshbirodkar/scikit-image,pratapvardhan/scikit-image,keflavich/scikit-image,jwiggins/scikit-image,robintw/scikit-image,pratapvardhan/scikit-image,almarklein/scikit-image,paalge/scikit-image,almarklein/scikit-image,ClinicalGraphics/scikit-image,WarrenWeckesser/scikits-image,dpshelio/scikit-image,chintak/scikit-image | ---
+++
@@ -25,9 +25,12 @@
def __call__(self, func):
+ alt_msg = ''
+ if self.alt_func is not None:
+ alt_msg = ' Use `%s` instead.' % self.alt_func
+
msg = 'Call to deprecated function `%s`.' % func.__name__
- if self.alt_func is not None:
- msg += ' Use `%s` instead.' % self.alt_func
+ msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
@@ -41,10 +44,7 @@
return func(*args, **kwargs)
# modify doc string to display deprecation warning
- doc = 'Deprecated function.'
- if self.alt_func is not None:
- doc += ' Use `%s` instead.' % self.alt_func
-
+ doc = 'Deprecated function.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else: |
0b28fe44514969470db926c6f38615a8a5478bf6 | smoke_signal/__init__.py | smoke_signal/__init__.py | from flask import Flask, g
from .main.views import main
from .nojs.views import nojs
from sqlalchemy import create_engine
from smoke_signal.database.models import Base
from sqlalchemy.orm import sessionmaker
app = Flask(__name__, instance_relative_config=True)
app.config.from_object("config")
app.config.from_pyfile("config.py")
app.register_blueprint(main)
app.register_blueprint(nojs)
@app.before_request
def init_db():
engine = create_engine(app.config["DATABASE_PATH"])
if not engine.dialect.has_table(engine.connect(), "feed"):
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
g.db = Session()
@app.teardown_appcontext
def shutdown_session(exception=None):
db = getattr(g, 'db', None)
if db is not None:
g.db.close()
| from flask import Flask, g
from .main.views import main
from sqlalchemy import create_engine
from smoke_signal.database.models import Base
from sqlalchemy.orm import sessionmaker
app = Flask(__name__, instance_relative_config=True)
app.config.from_object("config")
app.config.from_pyfile("config.py")
app.register_blueprint(main)
@app.before_request
def init_db():
engine = create_engine(app.config["DATABASE_PATH"])
if not engine.dialect.has_table(engine.connect(), "feed"):
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
g.db = Session()
@app.teardown_appcontext
def shutdown_session(exception=None):
db = getattr(g, 'db', None)
if db is not None:
g.db.close()
| Remove the no-JS version from the app | Remove the no-JS version from the app
I haven't looked into it for a long while.
| Python | mit | flacerdk/smoke-signal,flacerdk/smoke-signal,flacerdk/smoke-signal | ---
+++
@@ -1,6 +1,5 @@
from flask import Flask, g
from .main.views import main
-from .nojs.views import nojs
from sqlalchemy import create_engine
from smoke_signal.database.models import Base
from sqlalchemy.orm import sessionmaker
@@ -9,7 +8,6 @@
app.config.from_object("config")
app.config.from_pyfile("config.py")
app.register_blueprint(main)
-app.register_blueprint(nojs)
@app.before_request |
cad7bdca07f5ba979f14b11e6fd4109b1f7043c9 | terminate_instances.py | terminate_instances.py | s program terminate instances if proper tag is not used
"""
import time
import boto3
start_time = time.time()
ec2 = boto3.resource('ec2')
ec2_client = boto3.client('ec2')
tag_deparment = ['Finance', 'Marketing', 'HumanResources', 'Research'] # Your departments
shutdown_instance = False
for instance in ec2.instances.all():
instance_state = instance.state['Name']
if instance_state == ('running' or 'pending'):
for tags in instance.tags:
for department in tag_deparment:
if tags['Value'] == department:
shutdown_instance = False
break
else:
shutdown_instance = True
print('The following instance will be shutdown', instance.id, 'Shutdown = ', shutdown_instance)
if shutdown_instance is True:
ec2_client.stop_instances(
InstanceIds = [instance.id],
Force = True
)
| """
this program terminate instances if proper tag is not used
"""
import time
import boto3
start_time = time.time()
ec2 = boto3.resource('ec2')
ec2_client = boto3.client('ec2')
tag_deparment = ['Finance', 'Marketing', 'HumanResources', 'Research'] # Your departments
shutdown_instance = False
for instance in ec2.instances.all():
instance_state = instance.state['Name']
if instance_state == ('running' or 'pending'):
for tags in instance.tags:
for department in tag_deparment:
if tags['Value'] == department:
shutdown_instance = False
break
else:
shutdown_instance = True
print('The following instance will be shutdown', instance.id, 'Shutdown = ', shutdown_instance)
if shutdown_instance is True:
ec2_client.stop_instances(
InstanceIds = [instance.id],
Force = True
)
| Terminate instances is not proper tag | Terminate instances is not proper tag
| Python | mit | gabrielrojasnyc/AWS | ---
+++
@@ -1,4 +1,5 @@
-s program terminate instances if proper tag is not used
+"""
+this program terminate instances if proper tag is not used
"""
|
2dbc4e2aec98aba8a0e307f951b412464db8b078 | della/user_manager/urls.py | della/user_manager/urls.py | from django.conf.urls import url
from django.contrib.auth import views as auth_views
from .views import (SignupView, UserProfileUpdateView, DrawNamesView,
UserProfileDetailView, ActivateView)
urlpatterns = [
url(r'^login/$', auth_views.login, name='login',
kwargs={'template_name': 'user_manager/login.html'}),
url(r'^logout/$', auth_views.logout, name='logout',
kwargs={'next_page': '/'}),
url(r'^create/$', SignupView.as_view()),
url(r'^update/$', UserProfileUpdateView.as_view(), name='update'),
url(r'^activate/(?P<username>[0-9A-Za-z_]+)-(?P<code>[0-9A-Za-z_:-]+)/$',
ActivateView.as_view()),
url(r'^draw-names/$', DrawNamesView.as_view()),
url(r'^@(?P<username>[a-zA-Z0-9_]+)/$', UserProfileDetailView.as_view(),
name='user-detail'),
]
| from django.conf.urls import url
from django.contrib.auth import views as auth_views
from .views import (SignupView, UserProfileUpdateView, DrawNamesView,
UserProfileDetailView, ActivateView)
urlpatterns = [
url(r'^login/$', auth_views.login, name='login',
kwargs={'template_name': 'user_manager/login.html'}),
url(r'^logout/$', auth_views.logout, name='logout',
kwargs={'next_page': '/'}),
url(r'^create/$', SignupView.as_view()),
url(r'^update/$', UserProfileUpdateView.as_view(), name='update'),
url(r'^activate/(?P<username>[0-9A-Za-z_]+)-(?P<code>[0-9A-Za-z_:-]+)/$',
ActivateView.as_view(), name='activate-user'),
url(r'^draw-names/$', DrawNamesView.as_view()),
url(r'^@(?P<username>[a-zA-Z0-9_]+)/$', UserProfileDetailView.as_view(),
name='user-detail'),
]
| Add name for url config of `ActivateView` | Add name for url config of `ActivateView`
| Python | mit | avinassh/della,avinassh/della,avinassh/della | ---
+++
@@ -12,7 +12,7 @@
url(r'^create/$', SignupView.as_view()),
url(r'^update/$', UserProfileUpdateView.as_view(), name='update'),
url(r'^activate/(?P<username>[0-9A-Za-z_]+)-(?P<code>[0-9A-Za-z_:-]+)/$',
- ActivateView.as_view()),
+ ActivateView.as_view(), name='activate-user'),
url(r'^draw-names/$', DrawNamesView.as_view()),
url(r'^@(?P<username>[a-zA-Z0-9_]+)/$', UserProfileDetailView.as_view(),
name='user-detail'), |
9b8c1f35d057bbf6e336434bd028cb0b2673afb8 | installer/installer_config/admin.py | installer/installer_config/admin.py | from django.contrib import admin
from installer_config.models import Package, TerminalPrompt, EnvironmentProfile
class PackageAdmin(admin.ModelAdmin):
model = Package
list_display = ('display_name', 'version', 'website')
class TerminalPromptAdmin(admin.ModelAdmin):
model = TerminalPrompt
list_display = ('display_name', 'install_name', 'description')
admin.site.register(Package, PackageAdmin)
admin.site.register(TerminalPrompt, TerminalPromptAdmin)
# admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
| from django.contrib import admin
from installer_config.models import Package, TerminalPrompt, EnvironmentProfile
class PackageAdmin(admin.ModelAdmin):
model = Package
list_display = ('display_name', 'version', 'website')
class TerminalPromptAdmin(admin.ModelAdmin):
model = TerminalPrompt
list_display = ('display_name', 'install_name', 'description')
class EnvironmentProfileAdmin(admin.ModelAdmin):
model = EnvironmentProfile
admin.site.register(Package, PackageAdmin)
admin.site.register(TerminalPrompt, TerminalPromptAdmin)
admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
| Add Environment Profile to Admin view | Add Environment Profile to Admin view
| Python | mit | ezPy-co/ezpy,ezPy-co/ezpy,alibulota/Package_Installer,alibulota/Package_Installer | ---
+++
@@ -12,6 +12,10 @@
list_display = ('display_name', 'install_name', 'description')
+class EnvironmentProfileAdmin(admin.ModelAdmin):
+ model = EnvironmentProfile
+
+
admin.site.register(Package, PackageAdmin)
admin.site.register(TerminalPrompt, TerminalPromptAdmin)
-# admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin)
+admin.site.register(EnvironmentProfile, EnvironmentProfileAdmin) |
d379badd16528d2f7cd3826fcef5bd87be30cccf | nightreads/user_manager/user_service.py | nightreads/user_manager/user_service.py | from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import (Subscription, SubscriptionActivation,
UnsubscriptionActivation)
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.subscription.tags.clear()
user.subscription.tags.add(*tags_objs)
user.save()
return True
return False
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
Subscription.objects.create(user=user)
return user
def generate_subscribe_key(user):
subscribe_key = User.objects.make_random_password(length=80)
SubscriptionActivation.objects.update_or_create(
user=user, defaults={'subscribe_key': subscribe_key})
return subscribe_key
def generate_unsubscribe_key(user):
unsubscribe_key = User.objects.make_random_password(length=80)
UnsubscriptionActivation.objects.update_or_create(
user=user, defaults={'unsubscribe_key': unsubscribe_key})
return unsubscribe_key
| from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import (Subscription, SubscriptionActivation,
UnsubscriptionActivation)
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.subscription.tags.clear()
user.subscription.tags.add(*tags_objs)
user.save()
return True
return False
def get_or_create_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
Subscription.objects.create(user=user)
return user
def get_user(email):
return User.objects.filter(username=email).first()
def generate_subscribe_key(user):
subscribe_key = User.objects.make_random_password(length=80)
SubscriptionActivation.objects.update_or_create(
user=user, defaults={'subscribe_key': subscribe_key})
return subscribe_key
def generate_unsubscribe_key(user):
unsubscribe_key = User.objects.make_random_password(length=80)
UnsubscriptionActivation.objects.update_or_create(
user=user, defaults={'unsubscribe_key': unsubscribe_key})
return unsubscribe_key
def update_subscription(user, status):
user.subscription.is_subscribed = status
user.save()
| Rename `get_user` to `get_or_create_user` and add a new `get_user` | Rename `get_user` to `get_or_create_user` and add a new `get_user`
| Python | mit | avinassh/nightreads,avinassh/nightreads | ---
+++
@@ -15,11 +15,15 @@
return False
-def get_user(email):
+def get_or_create_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
Subscription.objects.create(user=user)
return user
+
+
+def get_user(email):
+ return User.objects.filter(username=email).first()
def generate_subscribe_key(user):
@@ -34,3 +38,8 @@
UnsubscriptionActivation.objects.update_or_create(
user=user, defaults={'unsubscribe_key': unsubscribe_key})
return unsubscribe_key
+
+
+def update_subscription(user, status):
+ user.subscription.is_subscribed = status
+ user.save() |
8f0956313b140d7a0d51510cd9b4a5eec7d54570 | plugins/holland.lib.lvm/tests/test_util.py | plugins/holland.lib.lvm/tests/test_util.py | import os
import signal
from nose.tools import *
from holland.lib.lvm.util import *
def test_format_bytes():
assert_equals(format_bytes(1024), '1.00KB')
assert_equals(format_bytes(0), '0.00Bytes')
def test_getmount():
assert_equals(getmount('/'), '/')
assert_equals(getmount('/foobarbaz'), '/')
def test_getdevice():
# XXX: bad hack
dev = open('/etc/mtab', 'r').readline().split()[0].strip()
assert_equals(getdevice('/'), dev)
assert_equals(getdevice('/foobarbaz'), None)
def test_relpath():
assert_raises(ValueError, relpath, '')
assert_equals(relpath('/foo/bar/baz', '/foo/bar'), 'baz')
assert_equals(relpath('/foo/bar/', '/foo/bar/'), os.curdir)
def test_signalmanager():
sigmgr = SignalManager()
sigmgr.trap(signal.SIGINT)
os.kill(os.getpid(), signal.SIGINT)
ok_(sigmgr.pending)
assert_equals(sigmgr.pending[0], signal.SIGINT)
sigmgr.restore()
assert_raises(KeyboardInterrupt, os.kill, os.getpid(), signal.SIGINT)
| import os
import signal
from nose.tools import *
from holland.lib.lvm.util import *
def test_format_bytes():
assert_equals(format_bytes(1024), '1.00KB')
assert_equals(format_bytes(0), '0.00Bytes')
def test_getmount():
assert_equals(getmount('/'), '/')
assert_equals(getmount('/foobarbaz'), '/')
def test_getdevice():
# XXX: bad hack
dev = open('/etc/mtab', 'r').readline().split()[0].strip()
assert_equals(getdevice('/'), dev)
assert_equals(getdevice('/foobarbaz'), None)
def test_relpath():
assert_raises(ValueError, relpath, '')
assert_equals(relpath('/foo/bar/baz', '/foo/bar'), 'baz')
assert_equals(relpath('/foo/bar/', '/foo/bar/'), os.curdir)
def test_signalmanager():
sigmgr = SignalManager()
sigmgr.trap(signal.SIGINT)
os.kill(os.getpid(), signal.SIGINT)
ok_(sigmgr.pending)
assert_equals(sigmgr.pending[0], signal.SIGINT)
sigmgr.restore()
assert_raises(KeyboardInterrupt, os.kill, os.getpid(), signal.SIGINT)
def test_parsebytes():
# bytes without units should be interpretted as MB
bytes = parse_bytes('1024')
assert_equals(bytes, 1024**3)
# this should not be bytes
ok_(bytes > 1024)
bytes = parse_bytes('1024G')
assert_equals(bytes, 1024**4)
| Add test case to holland.lib.lvm for parsing snapshot-size without units | Add test case to holland.lib.lvm for parsing snapshot-size without units
| Python | bsd-3-clause | m00dawg/holland,m00dawg/holland | ---
+++
@@ -30,3 +30,13 @@
assert_equals(sigmgr.pending[0], signal.SIGINT)
sigmgr.restore()
assert_raises(KeyboardInterrupt, os.kill, os.getpid(), signal.SIGINT)
+
+def test_parsebytes():
+ # bytes without units should be interpretted as MB
+ bytes = parse_bytes('1024')
+ assert_equals(bytes, 1024**3)
+ # this should not be bytes
+ ok_(bytes > 1024)
+
+ bytes = parse_bytes('1024G')
+ assert_equals(bytes, 1024**4) |
fcfa0b96226ba8b8d2bbd62365c2cab3f6e42d99 | salt/runners/state.py | salt/runners/state.py | '''
Execute overstate functions
'''
# Import Salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
over_run = overstate.stages()
salt.output.display_output(over_run, 'pprint', opts=__opts__)
return over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(overstate.over, 'pprint', opts=__opts__)
return overstate.over
| '''
Execute overstate functions
'''
# Import Salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
overstate.stages()
salt.output.display_output(overstate.over_run, 'pprint', opts=__opts__)
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(overstate.over, 'pprint', opts=__opts__)
return overstate.over
| Print and return the correct data | Print and return the correct data
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | ---
+++
@@ -12,9 +12,9 @@
over a group of systems
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
- over_run = overstate.stages()
- salt.output.display_output(over_run, 'pprint', opts=__opts__)
- return over_run
+ overstate.stages()
+ salt.output.display_output(overstate.over_run, 'pprint', opts=__opts__)
+ return overstate.over_run
def show_stages(env='base', os_fn=None):
''' |
f01e6fe6efb16a31a99b35cecaf000a0cb54bd4e | nodeconductor/core/authentication.py | nodeconductor/core/authentication.py | from __future__ import unicode_literals
import nodeconductor.logging.middleware
import rest_framework.authentication
def user_capturing_auth(auth):
class CapturingAuthentication(auth):
def authenticate(self, request):
result = super(CapturingAuthentication, self).authenticate(request)
if result is not None:
user, _ = result
nodeconductor.logging.middleware.set_current_user(user)
return result
return CapturingAuthentication
SessionAuthentication = user_capturing_auth(rest_framework.authentication.SessionAuthentication)
TokenAuthentication = user_capturing_auth(rest_framework.authentication.TokenAuthentication)
| from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
import rest_framework.authentication
from rest_framework import exceptions
import nodeconductor.logging.middleware
TOKEN_KEY = 'x-auth-token'
class TokenAuthentication(rest_framework.authentication.TokenAuthentication):
"""
Custom token-based authentication.
Use TOKEN_KEY from request query parameters if authentication token was not found in header.
"""
def get_authorization_value(self, request):
auth = rest_framework.authentication.get_authorization_header(request)
if not auth:
auth = request.query_params.get(TOKEN_KEY, '')
return auth
def authenticate(self, request):
auth = self.get_authorization_value(request).split()
if not auth or auth[0].lower() != b'token':
return None
if len(auth) == 1:
msg = _('Invalid token. No credentials provided.')
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = _('Invalid token. Token string should not contain spaces.')
raise exceptions.AuthenticationFailed(msg)
return self.authenticate_credentials(auth[1])
def user_capturing_auth(auth):
class CapturingAuthentication(auth):
def authenticate(self, request):
result = super(CapturingAuthentication, self).authenticate(request)
if result is not None:
user, _ = result
nodeconductor.logging.middleware.set_current_user(user)
return result
return CapturingAuthentication
SessionAuthentication = user_capturing_auth(rest_framework.authentication.SessionAuthentication)
TokenAuthentication = user_capturing_auth(TokenAuthentication)
| Use get parameter in token auth (nc-544) | Use get parameter in token auth (nc-544)
| Python | mit | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | ---
+++
@@ -1,8 +1,42 @@
from __future__ import unicode_literals
+
+from django.utils.translation import ugettext_lazy as _
+import rest_framework.authentication
+from rest_framework import exceptions
import nodeconductor.logging.middleware
-import rest_framework.authentication
+
+TOKEN_KEY = 'x-auth-token'
+
+
+class TokenAuthentication(rest_framework.authentication.TokenAuthentication):
+ """
+ Custom token-based authentication.
+
+ Use TOKEN_KEY from request query parameters if authentication token was not found in header.
+ """
+
+ def get_authorization_value(self, request):
+ auth = rest_framework.authentication.get_authorization_header(request)
+ if not auth:
+ auth = request.query_params.get(TOKEN_KEY, '')
+ return auth
+
+ def authenticate(self, request):
+ auth = self.get_authorization_value(request).split()
+
+ if not auth or auth[0].lower() != b'token':
+ return None
+
+ if len(auth) == 1:
+ msg = _('Invalid token. No credentials provided.')
+ raise exceptions.AuthenticationFailed(msg)
+ elif len(auth) > 2:
+ msg = _('Invalid token. Token string should not contain spaces.')
+ raise exceptions.AuthenticationFailed(msg)
+
+ return self.authenticate_credentials(auth[1])
def user_capturing_auth(auth):
@@ -16,6 +50,5 @@
return CapturingAuthentication
-
SessionAuthentication = user_capturing_auth(rest_framework.authentication.SessionAuthentication)
-TokenAuthentication = user_capturing_auth(rest_framework.authentication.TokenAuthentication)
+TokenAuthentication = user_capturing_auth(TokenAuthentication) |
7bee444eeb17ec956478e999db47338fdf201411 | querylist/tests/querylist_list_tests.py | querylist/tests/querylist_list_tests.py | import unittest2
from querylist import QueryList
class QueryListActsAsList(unittest2.TestCase):
"""QueryList should behave as lists behave"""
def setUp(self):
self.src_list = [{'foo': 1}, {'foo': 2}, {'foo': 3}]
self.query_list = QueryList(self.src_list)
def test_QueryList_items_are_equal_to_its_source_lists_items(self):
self.assertEqual(self.src_list, self.query_list)
def test_QueryList_length_is_equal_to_its_source_lists_length(self):
self.assertEqual(len(self.src_list), len(self.query_list))
def test_QueryLists_can_append_like_lists(self):
dbl_list = self.src_list + self.src_list
dbl_query_list = self.query_list + self.query_list
self.assertEqual(dbl_query_list, dbl_list) | import unittest2
from querylist import QueryList
class QueryListActsAsList(unittest2.TestCase):
"""QueryLists should act just like lists"""
def setUp(self):
self.src_list = [{'foo': 1}, {'foo': 2}, {'foo': 3}]
self.query_list = QueryList(self.src_list)
def test_QueryList_items_are_equal_to_its_source_lists_items(self):
self.assertEqual(self.src_list, self.query_list)
def test_QueryList_length_is_equal_to_its_source_lists_length(self):
self.assertEqual(len(self.src_list), len(self.query_list))
def test_QueryLists_can_append_like_lists(self):
dbl_list = self.src_list + self.src_list
dbl_query_list = self.query_list + self.query_list
self.assertEqual(dbl_query_list, dbl_list)
def test_QueryList_slicing_works_like_list_slicing(self):
self.assertEqual(self.query_list[:2], self.src_list[:2])
def test_QueryList_indexing_works_like_list_indexing(self):
self.assertEqual(self.query_list[1], self.src_list[1])
| Add some more acts as list tests. | Add some more acts as list tests.
| Python | mit | thomasw/querylist,zoidbergwill/querylist | ---
+++
@@ -3,7 +3,7 @@
from querylist import QueryList
class QueryListActsAsList(unittest2.TestCase):
- """QueryList should behave as lists behave"""
+ """QueryLists should act just like lists"""
def setUp(self):
self.src_list = [{'foo': 1}, {'foo': 2}, {'foo': 3}]
self.query_list = QueryList(self.src_list)
@@ -19,3 +19,9 @@
dbl_query_list = self.query_list + self.query_list
self.assertEqual(dbl_query_list, dbl_list)
+
+ def test_QueryList_slicing_works_like_list_slicing(self):
+ self.assertEqual(self.query_list[:2], self.src_list[:2])
+
+ def test_QueryList_indexing_works_like_list_indexing(self):
+ self.assertEqual(self.query_list[1], self.src_list[1]) |
d0fbb133cf2203de5f79f7fcdb647a3dff56f5a9 | copy_relative_path.py | copy_relative_path.py | import sublime, sublime_plugin
from os.path import relpath
class CopyRelativePathCommand(sublime_plugin.TextCommand):
def run(self, edit):
filename = self.view.file_name()
if len(filename) > 0:
# Copy shortest relpath for file compared to open folders
sublime.set_clipboard(
min(
(
relpath(filename, folder)
for folder in sublime.active_window().folders()
),
key=len,
)
)
sublime.status_message("Copied relative path")
def is_enabled(self):
return self.view.file_name() and len(self.view.file_name()) > 0
| import sublime, sublime_plugin
from os.path import relpath
class CopyRelativePathCommand(sublime_plugin.TextCommand):
def run(self, edit):
filename = self.view.file_name()
if len(filename) > 0:
# Copy shortest relpath for file compared to open folders
sublime.set_clipboard(
min(
(
relpath(filename, folder)
for folder in sublime.active_window().folders()
),
key=len,
)
)
sublime.status_message("Copied relative path")
def is_enabled(self):
return bool(self.view.file_name() and len(self.view.file_name()) > 0)
| Add bool cast to appease ST3 | Add bool cast to appease ST3
| Python | mit | bpicolo/CopyRelativePath | ---
+++
@@ -18,4 +18,4 @@
sublime.status_message("Copied relative path")
def is_enabled(self):
- return self.view.file_name() and len(self.view.file_name()) > 0
+ return bool(self.view.file_name() and len(self.view.file_name()) > 0) |
8b519628839bc2360d2f0f48231e2cf7b9edc6b3 | scripts/analytics/run_keen_summaries.py | scripts/analytics/run_keen_summaries.py | from framework.celery_tasks import app as celery_app
from scripts.analytics.user_summary import UserSummary
from scripts.analytics.node_summary import NodeSummary
from scripts.analytics.base import DateAnalyticsHarness
class SummaryHarness(DateAnalyticsHarness):
@property
def analytics_classes(self):
return [NodeSummary, UserSummary]
@celery_app.task(name='scripts.run_keen_summaries')
def run_main(date):
SummaryHarness().main(date)
if __name__ == '__main__':
SummaryHarness().main()
| from framework.celery_tasks import app as celery_app
from scripts.analytics.user_summary import UserSummary
from scripts.analytics.node_summary import NodeSummary
from scripts.analytics.institution_summary import InstitutionSummary
from scripts.analytics.base import DateAnalyticsHarness
class SummaryHarness(DateAnalyticsHarness):
@property
def analytics_classes(self):
return [NodeSummary, UserSummary, InstitutionSummary]
@celery_app.task(name='scripts.run_keen_summaries')
def run_main(date):
SummaryHarness().main(date)
if __name__ == '__main__':
SummaryHarness().main()
| Add new InstitutionSummary to run_keen_summarries harness | Add new InstitutionSummary to run_keen_summarries harness
| Python | apache-2.0 | leb2dg/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,saradbowman/osf.io,chrisseto/osf.io,cwisecarver/osf.io,mluo613/osf.io,cwisecarver/osf.io,alexschiller/osf.io,acshi/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,sloria/osf.io,mluo613/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,hmoco/osf.io,aaxelb/osf.io,caseyrollins/osf.io,leb2dg/osf.io,mluo613/osf.io,rdhyee/osf.io,mfraezz/osf.io,erinspace/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,felliott/osf.io,cslzchen/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,adlius/osf.io,chrisseto/osf.io,chennan47/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,baylee-d/osf.io,erinspace/osf.io,pattisdr/osf.io,alexschiller/osf.io,erinspace/osf.io,monikagrabowska/osf.io,acshi/osf.io,crcresearch/osf.io,chennan47/osf.io,caneruguz/osf.io,aaxelb/osf.io,cslzchen/osf.io,hmoco/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,binoculars/osf.io,adlius/osf.io,adlius/osf.io,crcresearch/osf.io,Nesiehr/osf.io,adlius/osf.io,alexschiller/osf.io,sloria/osf.io,acshi/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,mattclark/osf.io,felliott/osf.io,icereval/osf.io,mfraezz/osf.io,caneruguz/osf.io,rdhyee/osf.io,alexschiller/osf.io,hmoco/osf.io,monikagrabowska/osf.io,mattclark/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,leb2dg/osf.io,acshi/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,icereval/osf.io,binoculars/osf.io,saradbowman/osf.io,acshi/osf.io,caseyrollins/osf.io,sloria/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,mluo613/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,leb2dg/osf.io,felliott/osf.io,mluo613/osf.io,cslzchen/osf.io,chennan47/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,icereval/osf.io,laurenrevere/osf.io,mfraezz/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,rdhyee/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,chrisseto/osf.io,hmoco/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io | ---
+++
@@ -1,6 +1,7 @@
from framework.celery_tasks import app as celery_app
from scripts.analytics.user_summary import UserSummary
from scripts.analytics.node_summary import NodeSummary
+from scripts.analytics.institution_summary import InstitutionSummary
from scripts.analytics.base import DateAnalyticsHarness
@@ -8,7 +9,7 @@
@property
def analytics_classes(self):
- return [NodeSummary, UserSummary]
+ return [NodeSummary, UserSummary, InstitutionSummary]
@celery_app.task(name='scripts.run_keen_summaries') |
51bae69bdbbc6cf19858ff8ed91efb09f4a0b845 | spam_lists/exceptions.py | spam_lists/exceptions.py | # -*- coding: utf-8 -*-
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError, ValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsError, ValueError):
'''The value is not a valid host'''
class InvalidHostnameError(SpamListsError, ValueError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsError, ValueError):
'''The value is not a valid url'''
| # -*- coding: utf-8 -*-
class SpamListsError(Exception):
'''There was an error during testing a url or host'''
class UnknownCodeError(SpamListsError, KeyError):
'''The classification code from the service was not recognized'''
class UnathorizedAPIKeyError(SpamListsError, ValueError):
'''The API key used to query the service was not authorized'''
class InvalidHostError(SpamListsError, ValueError):
'''The value is not a valid host'''
class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsError, ValueError):
'''The value is not a valid url'''
| Make InvalidHostnameError a subclass of InvalidHostError | Make InvalidHostnameError a subclass of InvalidHostError
| Python | mit | piotr-rusin/spam-lists | ---
+++
@@ -12,7 +12,7 @@
class InvalidHostError(SpamListsError, ValueError):
'''The value is not a valid host'''
-class InvalidHostnameError(SpamListsError, ValueError):
+class InvalidHostnameError(InvalidHostError):
'''The value is not a valid hostname'''
class InvalidURLError(SpamListsError, ValueError): |
94e7fb9821d904dba19fee1ca1d129259f33204e | skimage/draw/__init__.py | skimage/draw/__init__.py | from ._draw import line, polygon, ellipse, ellipse_perimeter, \
circle, circle_perimeter, set_color, bresenham
| from ._draw import line, polygon, ellipse, ellipse_perimeter, \
circle, circle_perimeter, set_color, bresenham
__all__ = ['line',
'polygon',
'ellipse',
'ellipse_perimeter',
'circle',
'circle_perimeter',
'set_color',
'bresenham']
| Add __all__ to draw package | Add __all__ to draw package
| Python | bsd-3-clause | keflavich/scikit-image,michaelpacer/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,robintw/scikit-image,michaelaye/scikit-image,emon10005/scikit-image,chriscrosscutler/scikit-image,GaZ3ll3/scikit-image,michaelaye/scikit-image,emon10005/scikit-image,ajaybhat/scikit-image,bennlich/scikit-image,vighneshbirodkar/scikit-image,rjeli/scikit-image,Hiyorimi/scikit-image,SamHames/scikit-image,newville/scikit-image,juliusbierk/scikit-image,youprofit/scikit-image,keflavich/scikit-image,Midafi/scikit-image,ClinicalGraphics/scikit-image,pratapvardhan/scikit-image,newville/scikit-image,ofgulban/scikit-image,blink1073/scikit-image,WarrenWeckesser/scikits-image,oew1v07/scikit-image,chriscrosscutler/scikit-image,vighneshbirodkar/scikit-image,chintak/scikit-image,warmspringwinds/scikit-image,Britefury/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,michaelpacer/scikit-image,rjeli/scikit-image,jwiggins/scikit-image,robintw/scikit-image,blink1073/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,warmspringwinds/scikit-image,ofgulban/scikit-image,chintak/scikit-image,ajaybhat/scikit-image,oew1v07/scikit-image,paalge/scikit-image,bennlich/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,Midafi/scikit-image,paalge/scikit-image,rjeli/scikit-image,paalge/scikit-image,almarklein/scikit-image,SamHames/scikit-image,jwiggins/scikit-image,SamHames/scikit-image,youprofit/scikit-image,GaZ3ll3/scikit-image,bsipocz/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,dpshelio/scikit-image,Hiyorimi/scikit-image,almarklein/scikit-image,juliusbierk/scikit-image,bsipocz/scikit-image,Britefury/scikit-image,pratapvardhan/scikit-image | ---
+++
@@ -1,2 +1,11 @@
from ._draw import line, polygon, ellipse, ellipse_perimeter, \
circle, circle_perimeter, set_color, bresenham
+
+__all__ = ['line',
+ 'polygon',
+ 'ellipse',
+ 'ellipse_perimeter',
+ 'circle',
+ 'circle_perimeter',
+ 'set_color',
+ 'bresenham'] |
6dde06470c9cd868319b1b4615d3065b61a6bc2c | sqlcop/cli.py | sqlcop/cli.py | import sys
import sqlparse
from .checks import has_cross_join
def parse_file(filename):
import json
with open(filename, 'r') as fh:
return json.load(fh)
CHECKS = (
(has_cross_join, 'query contains cross join'),
)
def check_query(el):
"""
Run each of the defined checks on a query.
"""
stmt = sqlparse.parse(el)
for check in CHECKS:
if check[0](stmt[0]):
return False, check[1]
return True, ''
def main():
argv = sys.argv
try:
queries = parse_file(argv[1])
except IndexError:
raise Exception('Filename required')
failed = False
for query, tests in queries.iteritems():
passed, message = check_query(query)
if not passed:
failed = True
print_message(message, tests, query)
sys.exit(255 if failed else 0)
def print_message(message, tests, query):
print "FAILED - %s" % (message)
print "-----------------------------------------------------------------"
print "Test Methods:"
print "%s" % "\n".join(tests)
print
print "Query:"
print "%s" % query
| import sys
import sqlparse
from .checks import has_cross_join
def parse_file(filename):
return open(filename, 'r').readlines()
CHECKS = (
(has_cross_join, 'query contains cross join'),
)
def check_query(el):
"""
Run each of the defined checks on a query.
"""
stmt = sqlparse.parse(el)
for check in CHECKS:
if check[0](stmt[0]):
return False, check[1]
return True, ''
def main():
argv = sys.argv
try:
queries = parse_file(argv[1])
except IndexError:
raise Exception('Filename required')
failed = False
for query in queries:
passed, message = check_query(query)
if not passed:
failed = True
print_message(message, query)
sys.exit(255 if failed else 0)
def print_message(message, query):
print "FAILED - %s" % (message)
print "-----------------------------------------------------------------"
print
print "Query:"
print "%s" % query
| Work with plain SQL files | Work with plain SQL files
| Python | bsd-3-clause | freshbooks/sqlcop | ---
+++
@@ -4,9 +4,7 @@
def parse_file(filename):
- import json
- with open(filename, 'r') as fh:
- return json.load(fh)
+ return open(filename, 'r').readlines()
CHECKS = (
@@ -32,19 +30,17 @@
except IndexError:
raise Exception('Filename required')
failed = False
- for query, tests in queries.iteritems():
+ for query in queries:
passed, message = check_query(query)
if not passed:
failed = True
- print_message(message, tests, query)
+ print_message(message, query)
sys.exit(255 if failed else 0)
-def print_message(message, tests, query):
+def print_message(message, query):
print "FAILED - %s" % (message)
print "-----------------------------------------------------------------"
- print "Test Methods:"
- print "%s" % "\n".join(tests)
print
print "Query:"
print "%s" % query |
7dd2f4a7d21d42fbccbd34d356ab341b99aea54e | squash/dashboard/urls.py | squash/dashboard/urls.py | from django.conf.urls import include, url
from django.contrib import admin
from rest_framework.routers import DefaultRouter
from . import views
admin.site.site_header = 'SQUASH Admin'
api_router = DefaultRouter()
api_router.register(r'jobs', views.JobViewSet)
api_router.register(r'metrics', views.MetricViewSet)
api_router.register(r'measurements', views.MeasurementViewSet,
base_name='measurements')
api_router.register(r'datasets', views.DatasetViewSet,
base_name='datasets')
api_router.register(r'AMx', views.AMxViewSet, base_name='AMx')
api_router.register(r'PAx', views.PAxViewSet, base_name='PAx')
urlpatterns = [
url(r'^dashboard/api/?', include(api_router.urls)),
url(r'^dashboard/admin/?', include(admin.site.urls)),
url(r'^dashboard/(?P<bokeh_app>[\w./-]+)/?$',
views.embed_bokeh, name='bokeh_app'),
url(r'$', views.home, name='home')
]
| from django.conf.urls import include, url
from django.contrib import admin
from rest_framework.routers import DefaultRouter
from . import views
admin.site.site_header = 'SQUASH Admin'
api_router = DefaultRouter()
api_router.register(r'jobs', views.JobViewSet)
api_router.register(r'metrics', views.MetricViewSet)
api_router.register(r'measurements', views.MeasurementViewSet,
base_name='measurements')
api_router.register(r'datasets', views.DatasetViewSet,
base_name='datasets')
api_router.register(r'AMx', views.AMxViewSet, base_name='AMx')
api_router.register(r'PAx', views.PAxViewSet, base_name='PAx')
urlpatterns = [
url(r'^dashboard/api/', include(api_router.urls)),
url(r'^dashboard/admin/', include(admin.site.urls)),
url(r'^dashboard/(?P<bokeh_app>[\w./-]+)/?$',
views.embed_bokeh, name='bokeh_app'),
url(r'$', views.home, name='home')
]
| Fix URLs returned by the API, make sure we have the traling slash | Fix URLs returned by the API, make sure we have the traling slash
| Python | mit | lsst-sqre/qa-dashboard,lsst-sqre/qa-dashboard,lsst-sqre/qa-dashboard | ---
+++
@@ -16,8 +16,8 @@
api_router.register(r'PAx', views.PAxViewSet, base_name='PAx')
urlpatterns = [
- url(r'^dashboard/api/?', include(api_router.urls)),
- url(r'^dashboard/admin/?', include(admin.site.urls)),
+ url(r'^dashboard/api/', include(api_router.urls)),
+ url(r'^dashboard/admin/', include(admin.site.urls)),
url(r'^dashboard/(?P<bokeh_app>[\w./-]+)/?$',
views.embed_bokeh, name='bokeh_app'),
url(r'$', views.home, name='home') |
e8cee8b5a85762442e72079edd621363013455d9 | src/config.py | src/config.py |
sites_to_check = [
{
"name": "Lissu Monitor",
"url": "http://lissu.tampere.fi/monitor.php?stop=0014",
"acceptable_statuses": [200],
"mandatory_strings": [
"table2"
]
},
{
"name": "Siri API",
"url": "https://siri.ij2010.tampere.fi/ws",
"acceptable_statuses": [401],
"mandatory_strings": [
"Full authentication is required to access this resource",
"Apache Tomcat"
]
}
# TODO Next time when api is down, check below url:
# http://lissu.tampere.fi/ajax_servers/busLocations.php
]
|
sites_to_check = [
{
"name": "Lissu Monitor",
"url": "http://lissu.tampere.fi/monitor.php?stop=0014",
"acceptable_statuses": [200],
"mandatory_strings": [
"table2"
]
},
{
"name": "Siri API",
"url": "https://siri.ij2010.tampere.fi/ws",
"acceptable_statuses": [401],
"mandatory_strings": [
"Full authentication is required to access this resource",
"Apache Tomcat"
]
}
]
| Remove todo on lissu php ajax call. It returns and empty array in case of problems | Remove todo on lissu php ajax call. It returns and empty array in case of problems
| Python | mit | Vilsepi/nysseituu,Vilsepi/nysseituu | ---
+++
@@ -17,6 +17,4 @@
"Apache Tomcat"
]
}
- # TODO Next time when api is down, check below url:
- # http://lissu.tampere.fi/ajax_servers/busLocations.php
] |
e6d8789a1847ebe1525bb87c80b90d45db7cd29e | source/setup.py | source/setup.py | from distutils.core import setup
from Cython.Build import cythonize
ext_options = {"compiler_directives": {"profile": True}, "annotate": True}
setup(
name='Weighted-Levenshtein',
version='',
packages=[''],
url='',
license='',
author='Team bluebird',
author_email='',
description='', requires=['numpy','weighted_levenshtein','pandas', 'fuzzy', 'ngram'],
ext_modules = cythonize("*.pyx", **ext_options)
)
| from setuptools import setup
from Cython.Build import cythonize
ext_options = {"compiler_directives": {"profile": True}, "annotate": True}
setup(
name='Weighted-Levenshtein',
version='',
packages=[''],
url='',
license='',
author='Team bluebird',
author_email='',
description='', install_requires=['numpy','weighted_levenshtein','pandas', 'fuzzy', 'ngram'],
ext_modules = cythonize("*.pyx", **ext_options)
)
| Refactor rename to cost matrx | Refactor rename to cost matrx
| Python | mit | elangovana/NLP-BackTransliteration-PersianNames | ---
+++
@@ -1,4 +1,4 @@
-from distutils.core import setup
+from setuptools import setup
from Cython.Build import cythonize
ext_options = {"compiler_directives": {"profile": True}, "annotate": True}
@@ -11,7 +11,7 @@
license='',
author='Team bluebird',
author_email='',
- description='', requires=['numpy','weighted_levenshtein','pandas', 'fuzzy', 'ngram'],
+ description='', install_requires=['numpy','weighted_levenshtein','pandas', 'fuzzy', 'ngram'],
ext_modules = cythonize("*.pyx", **ext_options)
) |
3a17b72b82d2b23a676b92bd3292e04b77796ba7 | dock/__init__.py | dock/__init__.py | """
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
constants
"""
import logging
def set_logging(name="dock", level=logging.DEBUG):
# create logger
logger = logging.getLogger(name)
logger.handlers = []
logger.setLevel(level)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)
set_logging(level=logging.WARNING) # override this however you want | """
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
constants
"""
import logging
def set_logging(name="dock", level=logging.DEBUG, handler=None):
# create logger
logger = logging.getLogger(name)
logger.handlers = []
logger.setLevel(level)
if not handler:
# create console handler and set level to debug
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
handler.setFormatter(formatter)
# add ch to logger
logger.addHandler(handler)
set_logging(level=logging.WARNING) # override this however you want
| Allow redirecting dock's logs to a given handler only | Allow redirecting dock's logs to a given handler only
| Python | bsd-3-clause | david-martin/atomic-reactor,fr34k8/atomic-reactor,jpopelka/atomic-reactor,shaded-enmity/atomic-reactor,DBuildService/atomic-reactor,DBuildService/atomic-reactor,vrutkovs/atomic-reactor,fatherlinux/atomic-reactor,fr34k8/atomic-reactor,mmilata/atomic-reactor,jpopelka/atomic-reactor,maxamillion/atomic-reactor,mmilata/atomic-reactor,vrutkovs/atomic-reactor,projectatomic/atomic-reactor,fatherlinux/atomic-reactor,projectatomic/atomic-reactor,jarodwilson/atomic-reactor,jarodwilson/atomic-reactor,maxamillion/atomic-reactor,TomasTomecek/atomic-reactor,TomasTomecek/atomic-reactor,david-martin/atomic-reactor,shaded-enmity/atomic-reactor | ---
+++
@@ -11,24 +11,25 @@
import logging
-def set_logging(name="dock", level=logging.DEBUG):
+def set_logging(name="dock", level=logging.DEBUG, handler=None):
# create logger
logger = logging.getLogger(name)
logger.handlers = []
logger.setLevel(level)
- # create console handler and set level to debug
- ch = logging.StreamHandler()
- ch.setLevel(logging.DEBUG)
+ if not handler:
+ # create console handler and set level to debug
+ handler = logging.StreamHandler()
+ handler.setLevel(logging.DEBUG)
- # create formatter
- formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ # create formatter
+ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
- # add formatter to ch
- ch.setFormatter(formatter)
+ # add formatter to ch
+ handler.setFormatter(formatter)
# add ch to logger
- logger.addHandler(ch)
+ logger.addHandler(handler)
set_logging(level=logging.WARNING) # override this however you want |
99f21681c81ab67f0be5d6265d09af983599cc08 | esis/cli.py | esis/cli.py | # -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
import argparse
import os
def main():
"""Entry point for the esis.py script."""
args = parse_arguments()
print args
def valid_directory(path):
"""Directory validation."""
if not os.path.isdir(path):
raise argparse.ArgumentTypeError(
'{!r} is not a valid directory'.format(path))
if not os.access(path, os.R_OK | os.X_OK):
raise argparse.ArgumentTypeError(
'not enough permissions to explore {!r}'.format(path))
return path
def parse_arguments():
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers(help='Subcommands')
index = subparsers.add_parser('index', help='Index SQLite database files')
index.add_argument('directory', type=valid_directory, help='Base directory')
search = subparsers.add_parser('search', help='Search indexed data')
search.add_argument('query', help='Search query')
args = parser.parse_args()
return args
if __name__ == '__main__':
main()
| # -*- coding: utf-8 -*-
"""Elastic Search Index & Search."""
import argparse
import logging
import os
logger = logging.getLogger(__name__)
def main():
"""Entry point for the esis.py script."""
args = parse_arguments()
args.func(args)
def index(args):
"""Index database information into elasticsearch."""
logger.debug('Indexing %r...', args.directory)
def search(args):
"""Send query to elasticsearch."""
logger.debug('Searching %r...', args.query)
def valid_directory(path):
"""Directory validation."""
if not os.path.isdir(path):
raise argparse.ArgumentTypeError(
'{!r} is not a valid directory'.format(path))
if not os.access(path, os.R_OK | os.X_OK):
raise argparse.ArgumentTypeError(
'not enough permissions to explore {!r}'.format(path))
return path
def parse_arguments():
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers(help='Subcommands')
index_parser = subparsers.add_parser('index', help='Index SQLite database files')
index_parser.add_argument('directory', type=valid_directory, help='Base directory')
index_parser.set_defaults(func=index)
search_parser = subparsers.add_parser('search', help='Search indexed data')
search_parser.add_argument('query', help='Search query')
search_parser.set_defaults(func=search)
args = parser.parse_args()
return args
if __name__ == '__main__':
main()
| Add handlers for the index/search subcommands | Add handlers for the index/search subcommands
| Python | mit | jcollado/esis | ---
+++
@@ -2,13 +2,23 @@
"""Elastic Search Index & Search."""
import argparse
+import logging
import os
+logger = logging.getLogger(__name__)
def main():
"""Entry point for the esis.py script."""
args = parse_arguments()
- print args
+ args.func(args)
+
+def index(args):
+ """Index database information into elasticsearch."""
+ logger.debug('Indexing %r...', args.directory)
+
+def search(args):
+ """Send query to elasticsearch."""
+ logger.debug('Searching %r...', args.query)
def valid_directory(path):
"""Directory validation."""
@@ -31,10 +41,12 @@
"""
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers(help='Subcommands')
- index = subparsers.add_parser('index', help='Index SQLite database files')
- index.add_argument('directory', type=valid_directory, help='Base directory')
- search = subparsers.add_parser('search', help='Search indexed data')
- search.add_argument('query', help='Search query')
+ index_parser = subparsers.add_parser('index', help='Index SQLite database files')
+ index_parser.add_argument('directory', type=valid_directory, help='Base directory')
+ index_parser.set_defaults(func=index)
+ search_parser = subparsers.add_parser('search', help='Search indexed data')
+ search_parser.add_argument('query', help='Search query')
+ search_parser.set_defaults(func=search)
args = parser.parse_args()
return args
|
752b4486c64fe0313f531b06bc0cb003804cc211 | examples/rietveld/rietveld_helper/urls.py | examples/rietveld/rietveld_helper/urls.py | from django.conf.urls.defaults import *
from django.contrib import admin
from codereview.urls import urlpatterns
admin.autodiscover()
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': 'static/'}),
(r'^accounts/login/$', 'django.contrib.auth.views.login'),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login'),
# ('^admin/', include(admin.site.urls)),
('^_ah/admin', 'rietveld_helper.views.admin_redirect'),
)
| from django.conf.urls.defaults import *
from django.contrib import admin
#from codereview.urls import urlpatterns
admin.autodiscover()
urlpatterns = patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': 'static/'}),
(r'^accounts/login/$', 'django.contrib.auth.views.login'),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login'),
# ('^admin/', include(admin.site.urls)),
('^_ah/admin', 'rietveld_helper.views.admin_redirect'),
('', include('codereview.urls')),
)
| Use include() instead of appending urlpatterns. | Use include() instead of appending urlpatterns.
| Python | apache-2.0 | andialbrecht/django-gae2django,bubenkoff/bubenkoff-gae2django,andialbrecht/django-gae2django,bubenkoff/bubenkoff-gae2django | ---
+++
@@ -1,15 +1,16 @@
from django.conf.urls.defaults import *
from django.contrib import admin
-from codereview.urls import urlpatterns
+#from codereview.urls import urlpatterns
admin.autodiscover()
-urlpatterns += patterns('',
+urlpatterns = patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': 'static/'}),
(r'^accounts/login/$', 'django.contrib.auth.views.login'),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login'),
# ('^admin/', include(admin.site.urls)),
('^_ah/admin', 'rietveld_helper.views.admin_redirect'),
+ ('', include('codereview.urls')),
) |
9737eced8e2d667e3413a7d65946658d94f5868c | yg/emanate/__init__.py | yg/emanate/__init__.py | # -*- coding: utf-8 -*-
from .events import Event
__author__ = 'YouGov, plc'
__email__ = 'dev@yougov.com'
__version__ = '0.3.0'
__all__ = ['Event']
| # -*- coding: utf-8 -*-
from .events import Event
__author__ = 'YouGov, plc'
__email__ = 'dev@yougov.com'
__all__ = ['Event']
try:
import pkg_resources
dist = pkg_resources.get_distribution('yg.emanate')
__version__ = dist.version
except Exception:
__version__ = 'unknown'
| Load the version from the package metadata rather than trying to maintain it in a third place. | Load the version from the package metadata rather than trying to maintain it in a third place.
| Python | mit | yougov/emanate | ---
+++
@@ -3,6 +3,12 @@
__author__ = 'YouGov, plc'
__email__ = 'dev@yougov.com'
-__version__ = '0.3.0'
__all__ = ['Event']
+
+try:
+ import pkg_resources
+ dist = pkg_resources.get_distribution('yg.emanate')
+ __version__ = dist.version
+except Exception:
+ __version__ = 'unknown' |
0d1b8597a75f7e24ce3e74f99aad359e27a32be5 | fixcity/bmabr/tests/test_templatetags.py | fixcity/bmabr/tests/test_templatetags.py | import unittest
class TestRecaptchaTags(unittest.TestCase):
def test_recaptcha_html(self):
from fixcity.bmabr.templatetags import recaptcha_tags
from django.conf import settings
html = recaptcha_tags.recaptcha_html()
self.failUnless(settings.RECAPTCHA_PUBLIC_KEY in html)
self.failUnless(html.startswith('<script'))
class TestGoogleTags(unittest.TestCase):
def test_google_analytics(self):
from fixcity.bmabr.templatetags import google_analytics
from django.conf import settings
html = google_analytics.google_analytics()
self.failUnless(settings.GOOGLE_ANALYTICS_KEY in html)
self.failUnless(html.startswith('<script'))
| import unittest
import mock
import django.conf
class TestRecaptchaTags(unittest.TestCase):
def test_recaptcha_html(self):
from fixcity.bmabr.templatetags import recaptcha_tags
from django.conf import settings
html = recaptcha_tags.recaptcha_html()
self.failUnless(settings.RECAPTCHA_PUBLIC_KEY in html)
self.failUnless(html.startswith('<script'))
class TestGoogleTags(unittest.TestCase):
@mock.patch_object(django.conf, 'settings')
def test_google_analytics(self, mock_settings):
from fixcity.bmabr.templatetags import google_analytics
mock_settings.GOOGLE_ANALYTICS_KEY = 'xyzpdq'
html = google_analytics.google_analytics()
self.failUnless('xyzpdq' in html)
self.failUnless(html.startswith('<script'))
# For some reason this doesn't work if I put it in a separate
# test case... the google_analytics() function keeps a
# reference to the OLD mock_settings instance with the
# 'xyzpdq' value!
mock_settings.GOOGLE_ANALYTICS_KEY = ''
html = google_analytics.google_analytics()
self.assertEqual(html, '')
| Fix tests to work when GOOGLE_ANALYTICS_KEY is not set. | Fix tests to work when GOOGLE_ANALYTICS_KEY is not set.
| Python | agpl-3.0 | openplans/fixcity,openplans/fixcity | ---
+++
@@ -1,4 +1,8 @@
import unittest
+import mock
+
+import django.conf
+
class TestRecaptchaTags(unittest.TestCase):
@@ -12,9 +16,18 @@
class TestGoogleTags(unittest.TestCase):
- def test_google_analytics(self):
+ @mock.patch_object(django.conf, 'settings')
+ def test_google_analytics(self, mock_settings):
from fixcity.bmabr.templatetags import google_analytics
- from django.conf import settings
+ mock_settings.GOOGLE_ANALYTICS_KEY = 'xyzpdq'
html = google_analytics.google_analytics()
- self.failUnless(settings.GOOGLE_ANALYTICS_KEY in html)
+ self.failUnless('xyzpdq' in html)
self.failUnless(html.startswith('<script'))
+
+ # For some reason this doesn't work if I put it in a separate
+ # test case... the google_analytics() function keeps a
+ # reference to the OLD mock_settings instance with the
+ # 'xyzpdq' value!
+ mock_settings.GOOGLE_ANALYTICS_KEY = ''
+ html = google_analytics.google_analytics()
+ self.assertEqual(html, '') |
c5938719a70debb521d7592bc0919dfc3f71bd22 | gaphor/diagram/classes/generalization.py | gaphor/diagram/classes/generalization.py | """
Generalization --
"""
from gi.repository import GObject
from gaphor import UML
from gaphor.diagram.diagramline import DiagramLine
class GeneralizationItem(DiagramLine):
__uml__ = UML.Generalization
__relationship__ = "general", None, "specific", "generalization"
def __init__(self, id=None, model=None):
DiagramLine.__init__(self, id, model)
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
| """
Generalization --
"""
from gi.repository import GObject
from gaphor import UML
from gaphor.UML.modelfactory import stereotypes_str
from gaphor.diagram.presentation import LinePresentation
from gaphor.diagram.shapes import Box, Text
from gaphor.diagram.support import represents
@represents(UML.Generalization)
class GeneralizationItem(LinePresentation):
def __init__(self, id=None, model=None):
super().__init__(id, model)
self.shape_middle = Box(
Text(
text=lambda: stereotypes_str(self.subject),
style={"min-width": 0, "min-height": 0},
)
)
self.watch("subject.appliedStereotype.classifier.name")
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
| Use new line style for Generalization item | Use new line style for Generalization item
| Python | lgpl-2.1 | amolenaar/gaphor,amolenaar/gaphor | ---
+++
@@ -5,16 +5,24 @@
from gi.repository import GObject
from gaphor import UML
-from gaphor.diagram.diagramline import DiagramLine
+from gaphor.UML.modelfactory import stereotypes_str
+from gaphor.diagram.presentation import LinePresentation
+from gaphor.diagram.shapes import Box, Text
+from gaphor.diagram.support import represents
-class GeneralizationItem(DiagramLine):
+@represents(UML.Generalization)
+class GeneralizationItem(LinePresentation):
+ def __init__(self, id=None, model=None):
+ super().__init__(id, model)
- __uml__ = UML.Generalization
- __relationship__ = "general", None, "specific", "generalization"
-
- def __init__(self, id=None, model=None):
- DiagramLine.__init__(self, id, model)
+ self.shape_middle = Box(
+ Text(
+ text=lambda: stereotypes_str(self.subject),
+ style={"min-width": 0, "min-height": 0},
+ )
+ )
+ self.watch("subject.appliedStereotype.classifier.name")
def draw_head(self, context):
cr = context.cairo |
f9a349f902cb527314c22b384ddcf18de105cfc4 | utilities.py | utilities.py | import re
__author__ = 'tigge'
from wand.image import Image
def fix_image(filename, max_width):
with Image(filename=filename) as img:
img.auto_orient()
if img.width > max_width:
ratio = img.height / img.width
img.resize(width=max_width, height=round(max_width * ratio))
img.type = 'optimize'
img.compression_quality = 80
img.save(filename=filename)
def fix_google_drive_download_url(url):
url = re.sub(r"https://drive\.google\.com/file/d/(.*?)/view\?usp=.*",
r"https://docs.google.com/uc?authuser=0&id=\1&export=download", url)
return url
def fix_dropbox_download_url(url):
url = url[:-5] + "?dl=1" if url.startswith("https://www.dropbox.com") and url.endswith("?dl=0") else url
return url
| import re
__author__ = 'tigge'
from wand.image import Image
def fix_image(filename, max_width):
with Image(filename=filename) as img:
img.auto_orient()
if img.width > max_width:
ratio = img.height / img.width
img.resize(width=max_width, height=round(max_width * ratio))
img.type = 'optimize'
img.compression_quality = 80
img.save(filename=filename)
def fix_google_drive_download_url(url):
url = re.sub(r"https://drive\.google\.com/(?:a/.*){0,1}file/d/(.*?)/view\?usp=.*",
r"https://drive.google.com/uc?authuser=0&id=\1&export=download", url)
return url
def fix_dropbox_download_url(url):
url = url[:-5] + "?dl=1" if url.startswith("https://www.dropbox.com") and url.endswith("?dl=0") else url
return url
| Support Google Apps drive urls | Support Google Apps drive urls
| Python | mit | Tigge/trello-to-web,Tigge/trello-to-web | ---
+++
@@ -17,8 +17,8 @@
def fix_google_drive_download_url(url):
- url = re.sub(r"https://drive\.google\.com/file/d/(.*?)/view\?usp=.*",
- r"https://docs.google.com/uc?authuser=0&id=\1&export=download", url)
+ url = re.sub(r"https://drive\.google\.com/(?:a/.*){0,1}file/d/(.*?)/view\?usp=.*",
+ r"https://drive.google.com/uc?authuser=0&id=\1&export=download", url)
return url
|
593020cf6305701e712d04b5bb3e68612dcf7bb4 | fireplace/cards/wog/warrior.py | fireplace/cards/wog/warrior.py | from ..utils import *
##
# Minions
##
# Spells
class OG_276:
"Blood Warriors"
play = Give(CONTROLLER, Copy(FRIENDLY_MINIONS + DAMAGED))
class OG_314:
"Blood To Ichor"
play = Hit(TARGET,1), Dead(TARGET) | Summon(CONTROLLER, "OG_314b")
| from ..utils import *
##
# Minions
class OG_149:
"Ravaging Ghoul"
play = Hit(ALL_MINIONS - SELF, 1)
class OG_218:
"Bloodhoof Brave"
enrage = Refresh(SELF, buff="OG_218e")
OG_218e = buff(atk=3)
class OG_312:
"N'Zoth's First Mate"
play = Summon(CONTROLLER, "OG_058")
##
# Spells
class OG_276:
"Blood Warriors"
play = Give(CONTROLLER, Copy(FRIENDLY_MINIONS + DAMAGED))
class OG_314:
"Blood To Ichor"
play = Hit(TARGET,1), Dead(TARGET) | Summon(CONTROLLER, "OG_314b")
| Implement Ravaging Ghoul, Bloodhoof Brave, N'Zoth's First Mate | Implement Ravaging Ghoul, Bloodhoof Brave, N'Zoth's First Mate
| Python | agpl-3.0 | beheh/fireplace,NightKev/fireplace,jleclanche/fireplace | ---
+++
@@ -3,6 +3,22 @@
##
# Minions
+
+class OG_149:
+ "Ravaging Ghoul"
+ play = Hit(ALL_MINIONS - SELF, 1)
+
+
+class OG_218:
+ "Bloodhoof Brave"
+ enrage = Refresh(SELF, buff="OG_218e")
+
+OG_218e = buff(atk=3)
+
+
+class OG_312:
+ "N'Zoth's First Mate"
+ play = Summon(CONTROLLER, "OG_058")
## |
a85d3f87860d63d1b950ec754531439dab1bd7cb | dwitter/dweet/urls.py | dwitter/dweet/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^id/(?P<dweet_id>[0-9]*)$', views.fullscreen_dweet,
name='fullscreen_dweet'),
url(r'^blank$', views.blank_dweet, name='blank_dweet'),
]
handler404 = views.handler404
| from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^id/(?P<dweet_id>[0-9]+)$', views.fullscreen_dweet,
name='fullscreen_dweet'),
url(r'^blank$', views.blank_dweet, name='blank_dweet'),
]
handler404 = views.handler404
| Fix url regex that allowed illegal url | Fix url regex that allowed illegal url
Old url pattern allowed /id/ (without a number), which resulted in a
500 (and an email). And lately google spiders seems to do this daily.
| Python | apache-2.0 | lionleaf/dwitter,lionleaf/dwitter,lionleaf/dwitter | ---
+++
@@ -2,7 +2,7 @@
from . import views
urlpatterns = [
- url(r'^id/(?P<dweet_id>[0-9]*)$', views.fullscreen_dweet,
+ url(r'^id/(?P<dweet_id>[0-9]+)$', views.fullscreen_dweet,
name='fullscreen_dweet'),
url(r'^blank$', views.blank_dweet, name='blank_dweet'),
] |
0549d03dfaba874b5ff56bcc3ab1b21c2ac4fec3 | src/armet/connectors/bottle/__init__.py | src/armet/connectors/bottle/__init__.py | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import
import bottle # flake8: noqa
# TODO: Add additional checks to assert that flask is actually
# in use and available.
# Detected connector.
return True
except ImportError:
# Failed to import.
return False
| Add detection support for bottle. | Add detection support for bottle.
| Python | mit | armet/python-armet | ---
+++
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+from __future__ import print_function, unicode_literals, division
+
+
+def is_available(*capacities):
+ """
+ Detects if the environment is available for use in
+ the (optionally) specified capacities.
+ """
+ try:
+ # Attempted import
+ import bottle # flake8: noqa
+
+ # TODO: Add additional checks to assert that flask is actually
+ # in use and available.
+
+ # Detected connector.
+ return True
+
+ except ImportError:
+ # Failed to import.
+ return False | |
f64cb7c25d541421359dd8b5e1709444046d3fa0 | gittools.py | gittools.py | import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = file('.git/HEAD').read().strip()
if commithash.startswith("ref: "):
commithash = file(commithash[5:]).read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
| import os
import sha
def gitsha(path):
h = sha.sha()
data = file(path).read()
h.update("blob %d\0" % len(data))
h.update(data)
return h.hexdigest()
def git_info():
commithash = os.popen('git-rev-parse --verify HEAD').read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else:
print "NOT updating git cache, local changes might not be detected"
changed = bool(os.popen('git-diff-index -r HEAD').read())
return commithash, changed
def snapshot_info():
info = file('commithash').read().split('\n')
commithash = info[0]
changed = False
for line in [a for a in info[2:] if a]:
[mode, tag, sha, path] = line.split(None, 4)
if tag != 'blob':
continue
if gitsha(path) != sha:
changed = True
break
return commithash, changed
def get_info():
try:
return git_info()
except:
try:
return snapshot_info()
except:
return 'Unknown', False
| Make resolving of git commit id use git-rev-parse. | OTHER: Make resolving of git commit id use git-rev-parse.
| Python | lgpl-2.1 | oneman/xmms2-oneman,theefer/xmms2,chrippa/xmms2,krad-radio/xmms2-krad,oneman/xmms2-oneman,six600110/xmms2,mantaraya36/xmms2-mantaraya36,oneman/xmms2-oneman,dreamerc/xmms2,oneman/xmms2-oneman-old,krad-radio/xmms2-krad,theefer/xmms2,oneman/xmms2-oneman,krad-radio/xmms2-krad,theefer/xmms2,six600110/xmms2,six600110/xmms2,xmms2/xmms2-stable,oneman/xmms2-oneman-old,xmms2/xmms2-stable,theefer/xmms2,oneman/xmms2-oneman-old,xmms2/xmms2-stable,chrippa/xmms2,oneman/xmms2-oneman,mantaraya36/xmms2-mantaraya36,mantaraya36/xmms2-mantaraya36,krad-radio/xmms2-krad,mantaraya36/xmms2-mantaraya36,theeternalsw0rd/xmms2,dreamerc/xmms2,theeternalsw0rd/xmms2,chrippa/xmms2,dreamerc/xmms2,oneman/xmms2-oneman-old,dreamerc/xmms2,chrippa/xmms2,mantaraya36/xmms2-mantaraya36,xmms2/xmms2-stable,mantaraya36/xmms2-mantaraya36,theefer/xmms2,oneman/xmms2-oneman-old,theeternalsw0rd/xmms2,six600110/xmms2,dreamerc/xmms2,oneman/xmms2-oneman,krad-radio/xmms2-krad,theefer/xmms2,theefer/xmms2,six600110/xmms2,chrippa/xmms2,theeternalsw0rd/xmms2,theeternalsw0rd/xmms2,theeternalsw0rd/xmms2,six600110/xmms2,xmms2/xmms2-stable,oneman/xmms2-oneman,xmms2/xmms2-stable,krad-radio/xmms2-krad,mantaraya36/xmms2-mantaraya36,chrippa/xmms2 | ---
+++
@@ -9,9 +9,7 @@
return h.hexdigest()
def git_info():
- commithash = file('.git/HEAD').read().strip()
- if commithash.startswith("ref: "):
- commithash = file(commithash[5:]).read().strip()
+ commithash = os.popen('git-rev-parse --verify HEAD').read().strip()
if os.getuid() == os.stat(".git/index").st_uid:
os.system('git-update-index --refresh >/dev/null')
else: |
15cd72fd68bad8dbb2eab293c4abf8700a782219 | test/trainer_test.py | test/trainer_test.py | import theanets
import util
class TestTrainer(util.MNIST):
def setUp(self):
super(TestTrainer, self).setUp()
self.exp = theanets.Experiment(
theanets.Autoencoder,
layers=(self.DIGIT_SIZE, 10, self.DIGIT_SIZE))
def assert_progress(self, algo, **kwargs):
trainer = self.exp.itertrain(self.images, optimize=algo, **kwargs)
costs0 = next(trainer)
costs1 = next(trainer)
costs2 = next(trainer)
assert costs2['loss'] < costs0['loss']
def test_sgd(self):
self.assert_progress('sgd', learning_rate=1e-4)
def test_nag(self):
self.assert_progress('nag', learning_rate=1e-4)
def test_rprop(self):
self.assert_progress('rprop', learning_rate=1e-4)
def test_rmsprop(self):
self.assert_progress('rmsprop', learning_rate=1e-4)
def test_adadelta(self):
self.assert_progress('adadelta', learning_rate=1e-4)
def test_cg(self):
self.assert_progress('cg')
def test_layerwise(self):
self.exp = theanets.Experiment(
theanets.Autoencoder,
layers=(self.DIGIT_SIZE, 10, 10, self.DIGIT_SIZE))
self.assert_progress('layerwise')
| import theanets
import util
class TestTrainer(util.MNIST):
def setUp(self):
super(TestTrainer, self).setUp()
self.exp = theanets.Experiment(
theanets.Autoencoder,
layers=(self.DIGIT_SIZE, 10, self.DIGIT_SIZE))
def assert_progress(self, algo, **kwargs):
trainer = self.exp.itertrain(self.images, optimize=algo, **kwargs)
t0, v0 = next(trainer)
t1, v1 = next(trainer)
t2, v2 = next(trainer)
assert t2['loss'] < t0['loss']
def test_sgd(self):
self.assert_progress('sgd', learning_rate=1e-4)
def test_nag(self):
self.assert_progress('nag', learning_rate=1e-4)
def test_rprop(self):
self.assert_progress('rprop', learning_rate=1e-4)
def test_rmsprop(self):
self.assert_progress('rmsprop', learning_rate=1e-4)
def test_adadelta(self):
self.assert_progress('adadelta', learning_rate=1e-4)
def test_cg(self):
self.assert_progress('cg')
def test_layerwise(self):
self.exp = theanets.Experiment(
theanets.Autoencoder,
layers=(self.DIGIT_SIZE, 10, 10, self.DIGIT_SIZE))
self.assert_progress('layerwise')
| Update test to work with new monitors. | Update test to work with new monitors.
| Python | mit | devdoer/theanets,lmjohns3/theanets,chrinide/theanets | ---
+++
@@ -12,10 +12,10 @@
def assert_progress(self, algo, **kwargs):
trainer = self.exp.itertrain(self.images, optimize=algo, **kwargs)
- costs0 = next(trainer)
- costs1 = next(trainer)
- costs2 = next(trainer)
- assert costs2['loss'] < costs0['loss']
+ t0, v0 = next(trainer)
+ t1, v1 = next(trainer)
+ t2, v2 = next(trainer)
+ assert t2['loss'] < t0['loss']
def test_sgd(self):
self.assert_progress('sgd', learning_rate=1e-4) |
1d2eef3bf6a1a5c9b5a1f34c224d3a9651e77d73 | gocd/response.py | gocd/response.py | import json
class Response(object):
def __init__(self, status_code, body, headers=None):
self.status_code = status_code
self._body = body
self._body_parsed = None
self.content_type = headers['content-type'].split(';')[0]
self.headers = headers
@property
def is_ok(self):
return self.status_code == 200
@property
def payload(self):
if self.content_type.startswith('application/json'):
if not self._body_parsed:
self._body_parsed = json.loads(self._body)
return self._body_parsed
else:
return self._body
@classmethod
def from_request(cls, response):
return Response(
response.code,
response.read(),
response.headers,
)
@classmethod
def from_http_error(cls, http_error):
return Response(
http_error.code,
http_error.read(),
http_error.headers,
)
| import json
class Response(object):
def __init__(self, status_code, body, headers=None, ok_status=None):
self.status_code = status_code
self._body = body
self._body_parsed = None
self.content_type = headers['content-type'].split(';')[0]
self.headers = headers
self.ok_status = ok_status or 200
@property
def is_ok(self):
return self.status_code == self.ok_status
@property
def payload(self):
if self.content_type.startswith('application/json'):
if not self._body_parsed:
self._body_parsed = json.loads(self._body)
return self._body_parsed
else:
return self._body
@classmethod
def from_request(cls, response, ok_status=None):
return Response(
response.code,
response.read(),
response.headers,
ok_status=ok_status
)
@classmethod
def from_http_error(cls, http_error):
return Response(
http_error.code,
http_error.read(),
http_error.headers,
)
| Add configurable ok status code for Response | Add configurable ok status code for Response
When scheduling a pipeline the successful code is 202, so tihis needs
to be configurable.
| Python | mit | henriquegemignani/py-gocd,gaqzi/py-gocd | ---
+++
@@ -2,16 +2,17 @@
class Response(object):
- def __init__(self, status_code, body, headers=None):
+ def __init__(self, status_code, body, headers=None, ok_status=None):
self.status_code = status_code
self._body = body
self._body_parsed = None
self.content_type = headers['content-type'].split(';')[0]
self.headers = headers
+ self.ok_status = ok_status or 200
@property
def is_ok(self):
- return self.status_code == 200
+ return self.status_code == self.ok_status
@property
def payload(self):
@@ -24,11 +25,12 @@
return self._body
@classmethod
- def from_request(cls, response):
+ def from_request(cls, response, ok_status=None):
return Response(
response.code,
response.read(),
response.headers,
+ ok_status=ok_status
)
@classmethod |
89fcfbd71a4fc9e8a5aaea65c106a092cbd27ac0 | src/plone.example/plone/example/todo.py | src/plone.example/plone/example/todo.py | # -*- encoding: utf-8 -*-
from aiohttp.web import Response
from zope.interface import Attribute
from zope.interface import Interface
class ITodo(Interface):
title = Attribute("""Title""")
done = Attribute("""Done""")
class View(object):
def __init__(self, context, request):
self.context = context
self.request = request
async def __call__(self):
return Response(text='Hello World!')
| # -*- encoding: utf-8 -*-
from aiohttp.web import Response
from plone.supermodel import model
from zope import schema
class ITodo(model.Schema):
title = schema.TextLine(title=u"Title",
required=False)
done = schema.Bool(title=u"Done",
required=False)
class View(object):
def __init__(self, context, request):
self.context = context
self.request = request
async def __call__(self):
return Response(text='Hello World!')
| Use supermodel/zope.schema for Todo field definition | Use supermodel/zope.schema for Todo field definition
| Python | bsd-2-clause | plone/plone.server,plone/plone.server | ---
+++
@@ -1,12 +1,14 @@
# -*- encoding: utf-8 -*-
from aiohttp.web import Response
-from zope.interface import Attribute
-from zope.interface import Interface
+from plone.supermodel import model
+from zope import schema
-class ITodo(Interface):
- title = Attribute("""Title""")
- done = Attribute("""Done""")
+class ITodo(model.Schema):
+ title = schema.TextLine(title=u"Title",
+ required=False)
+ done = schema.Bool(title=u"Done",
+ required=False)
class View(object): |
548423bb940a38c599ac3b82bf8a1207b5112646 | viewer_examples/plugins/lineprofile_rgb.py | viewer_examples/plugins/lineprofile_rgb.py | from skimage import data
from skimage.viewer import ImageViewer
from skimage.viewer.plugins.lineprofile import LineProfile
image = data.chelsea()
viewer = ImageViewer(image)
viewer += LineProfile()
line, profiles = viewer.show()[0]
| from skimage import data
from skimage.viewer import ImageViewer
from skimage.viewer.plugins.lineprofile import LineProfile
image = data.chelsea()
viewer = ImageViewer(image)
viewer += LineProfile()
line, rgb_profiles = viewer.show()[0]
| Update variable name for RGB line profile | Update variable name for RGB line profile
| Python | bsd-3-clause | paalge/scikit-image,pratapvardhan/scikit-image,jwiggins/scikit-image,dpshelio/scikit-image,bsipocz/scikit-image,paalge/scikit-image,rjeli/scikit-image,Britefury/scikit-image,GaZ3ll3/scikit-image,blink1073/scikit-image,ClinicalGraphics/scikit-image,juliusbierk/scikit-image,robintw/scikit-image,Hiyorimi/scikit-image,WarrenWeckesser/scikits-image,paalge/scikit-image,blink1073/scikit-image,michaelaye/scikit-image,chriscrosscutler/scikit-image,pratapvardhan/scikit-image,ofgulban/scikit-image,keflavich/scikit-image,bsipocz/scikit-image,dpshelio/scikit-image,WarrenWeckesser/scikits-image,Midafi/scikit-image,warmspringwinds/scikit-image,michaelaye/scikit-image,michaelpacer/scikit-image,ClinicalGraphics/scikit-image,oew1v07/scikit-image,bennlich/scikit-image,SamHames/scikit-image,robintw/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,SamHames/scikit-image,emon10005/scikit-image,SamHames/scikit-image,chriscrosscutler/scikit-image,ajaybhat/scikit-image,Britefury/scikit-image,juliusbierk/scikit-image,bennlich/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,warmspringwinds/scikit-image,GaZ3ll3/scikit-image,youprofit/scikit-image,youprofit/scikit-image,emon10005/scikit-image,newville/scikit-image,oew1v07/scikit-image,michaelpacer/scikit-image,keflavich/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,SamHames/scikit-image,newville/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,rjeli/scikit-image,vighneshbirodkar/scikit-image,Hiyorimi/scikit-image | ---
+++
@@ -6,4 +6,4 @@
image = data.chelsea()
viewer = ImageViewer(image)
viewer += LineProfile()
-line, profiles = viewer.show()[0]
+line, rgb_profiles = viewer.show()[0] |
97bdc9679e9162a4ce101824dc37ac0f6e57e3d7 | fanstatic/checksum.py | fanstatic/checksum.py | import os
import hashlib
from datetime import datetime
VCS_NAMES = ['.svn', '.git', '.bzr', '.hg']
IGNORED_EXTENSIONS = ['.swp', '.tmp', '.pyc', '.pyo']
def list_directory(path, include_directories=True):
# Skip over any VCS directories.
for root, dirs, files in os.walk(path):
for dir in VCS_NAMES:
try:
dirs.remove(dir)
except ValueError:
pass
# We are also interested in the directories.
if include_directories:
yield os.path.join(root)
for file in files:
_, ext = os.path.splitext(file)
if ext in IGNORED_EXTENSIONS:
continue
yield os.path.join(root, file)
def mtime(path):
latest = 0
for path in list_directory(path):
mtime = os.path.getmtime(path)
latest = max(mtime, latest)
return datetime.fromtimestamp(latest).isoformat()[:22]
def md5(path):
chcksm = hashlib.md5()
for path in list_directory(path, include_directories=False):
chcksm.update(path)
try:
f = open(path, 'rb')
while True:
# 256kb chunks.
# XXX how to optimize chunk size?
chunk = f.read(0x40000)
if not chunk:
break
chcksm.update(chunk)
finally:
f.close()
return chcksm.hexdigest()
| import os
import hashlib
from datetime import datetime
VCS_NAMES = ['.svn', '.git', '.bzr', '.hg']
IGNORED_EXTENSIONS = ['.swp', '.tmp', '.pyc', '.pyo']
def list_directory(path, include_directories=True):
# Skip over any VCS directories.
for root, dirs, files in os.walk(path):
for dir in VCS_NAMES:
try:
dirs.remove(dir)
except ValueError:
pass
# We are also interested in the directories.
if include_directories:
yield os.path.join(root)
for file in files:
_, ext = os.path.splitext(file)
if ext in IGNORED_EXTENSIONS:
continue
yield os.path.join(root, file)
def mtime(path):
latest = 0
for path in list_directory(path):
mtime = os.path.getmtime(path)
latest = max(mtime, latest)
return datetime.fromtimestamp(latest).isoformat()[:22]
def md5(path):
chcksm = hashlib.md5()
for path in sorted(list(list_directory(path, include_directories=False))):
chcksm.update(path)
try:
f = open(path, 'rb')
while True:
# 256kb chunks.
# XXX how to optimize chunk size?
chunk = f.read(0x40000)
if not chunk:
break
chcksm.update(chunk)
finally:
f.close()
return chcksm.hexdigest()
| Fix given path order for Google App Engine | Fix given path order for Google App Engine
| Python | bsd-3-clause | MiCHiLU/fanstatic-tools,MiCHiLU/fanstatic-gae,MiCHiLU/fanstatic-gae | ---
+++
@@ -33,7 +33,7 @@
def md5(path):
chcksm = hashlib.md5()
- for path in list_directory(path, include_directories=False):
+ for path in sorted(list(list_directory(path, include_directories=False))):
chcksm.update(path)
try:
f = open(path, 'rb') |
d8557db156e277078e10584b2ee32320ac808772 | sentry/client/handlers.py | sentry/client/handlers.py | import logging
import sys
class SentryHandler(logging.Handler):
def emit(self, record):
from sentry.client.models import get_client
from sentry.client.middleware import SentryLogMiddleware
# Fetch the request from a threadlocal variable, if available
request = getattr(SentryLogMiddleware.thread, 'request', None)
# Avoid typical config issues by overriding loggers behavior
if record.name == 'sentry.errors':
print >> sys.stderr, "Recursive log message sent to SentryHandler"
print >> sys.stderr, record.message
return
try:
get_client().create_from_record(record, request=request)
except Exception, e:
print >> sys.stderr, "SentryHandler failure to create log record"
print >> sys.stderr, record.message
try:
import logbook
except ImportError:
pass
else:
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
from sentry.client.models import get_client
# Avoid typical config issues by overriding loggers behavior
if record.name == 'sentry.errors':
print >> sys.stderr, "Recursive log message sent to SentryHandler"
print >> sys.stderr, record.message
return
kwargs = dict(
message=record.message,
level=record.level,
logger=record.channel,
data=record.extra,
)
client = get_client()
if record.exc_info:
return client.create_from_exception(record.exc_info, **kwargs)
return client.create_from_text(**kwargs)
| import logging
import sys
class SentryHandler(logging.Handler):
def emit(self, record):
from sentry.client.models import get_client
from sentry.client.middleware import SentryLogMiddleware
# Fetch the request from a threadlocal variable, if available
request = getattr(SentryLogMiddleware.thread, 'request', None)
# Avoid typical config issues by overriding loggers behavior
if record.name == 'sentry.errors':
print >> sys.stderr, "Recursive log message sent to SentryHandler"
print >> sys.stderr, record.message
return
get_client().create_from_record(record, request=request)
try:
import logbook
except ImportError:
pass
else:
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
from sentry.client.models import get_client
# Avoid typical config issues by overriding loggers behavior
if record.name == 'sentry.errors':
print >> sys.stderr, "Recursive log message sent to SentryHandler"
print >> sys.stderr, record.message
return
kwargs = dict(
message=record.message,
level=record.level,
logger=record.channel,
data=record.extra,
)
client = get_client()
if record.exc_info:
return client.create_from_exception(record.exc_info, **kwargs)
return client.create_from_text(**kwargs)
| Revert "fixed the issue of a client throwing exception - log to stderr and return gracefully" | Revert "fixed the issue of a client throwing exception - log to stderr and return gracefully"
This reverts commit 4cecd6558ec9457d0c3e933023a8a1f77714eee5.
| Python | bsd-3-clause | icereval/raven-python,patrys/opbeat_python,smarkets/raven-python,beeftornado/sentry,gg7/sentry,NickPresta/sentry,JamesMura/sentry,hzy/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,jokey2k/sentry,dirtycoder/opbeat_python,icereval/raven-python,jmagnusson/raven-python,johansteffner/raven-python,mvaled/sentry,JackDanger/sentry,fuziontech/sentry,zenefits/sentry,jokey2k/sentry,JamesMura/sentry,ronaldevers/raven-python,daevaorn/sentry,hzy/raven-python,SilentCircle/sentry,looker/sentry,jean/sentry,korealerts1/sentry,mitsuhiko/sentry,daevaorn/sentry,drcapulet/sentry,JTCunning/sentry,chayapan/django-sentry,nicholasserra/sentry,looker/sentry,WoLpH/django-sentry,nicholasserra/sentry,TedaLIEz/sentry,korealerts1/sentry,lopter/raven-python-old,ifduyue/sentry,ifduyue/sentry,akalipetis/raven-python,Natim/sentry,mvaled/sentry,primepix/django-sentry,nikolas/raven-python,dbravender/raven-python,nicholasserra/sentry,fotinakis/sentry,alex/sentry,JackDanger/sentry,arthurlogilab/raven-python,tarkatronic/opbeat_python,camilonova/sentry,BuildingLink/sentry,ifduyue/sentry,JTCunning/sentry,looker/sentry,akheron/raven-python,mvaled/sentry,imankulov/sentry,wujuguang/sentry,ewdurbin/sentry,alexm92/sentry,dbravender/raven-python,getsentry/raven-python,someonehan/raven-python,ronaldevers/raven-python,imankulov/sentry,recht/raven-python,1tush/sentry,alex/sentry,camilonova/sentry,1tush/sentry,ticosax/opbeat_python,beniwohli/apm-agent-python,zenefits/sentry,rdio/sentry,drcapulet/sentry,akheron/raven-python,ewdurbin/sentry,daikeren/opbeat_python,patrys/opbeat_python,SilentCircle/sentry,jmagnusson/raven-python,jean/sentry,nikolas/raven-python,mvaled/sentry,wong2/sentry,alex/raven,ronaldevers/raven-python,percipient/raven-python,songyi199111/sentry,Photonomie/raven-python,wujuguang/sentry,mitsuhiko/raven,mitsuhiko/sentry,fotinakis/sentry,dcramer/sentry-old,kevinastone/sentry,dirtycoder/opbeat_python,Goldmund-Wyldebeast-Wunderliebe/raven-python,felixbuenemann/sentry,boneyao/sentry,johansteffner/raven-python,BuildingLink/sentry,icereval/raven-python,BayanGroup/sentry,nikolas/raven-python,dbravender/raven-python,beniwohli/apm-agent-python,inspirehep/raven-python,ifduyue/sentry,WoLpH/django-sentry,BuildingLink/sentry,imankulov/sentry,ngonzalvez/sentry,vperron/sentry,akalipetis/raven-python,someonehan/raven-python,vperron/sentry,Kryz/sentry,mvaled/sentry,BayanGroup/sentry,pauloschilling/sentry,ewdurbin/raven-python,beeftornado/sentry,fotinakis/sentry,rdio/sentry,pauloschilling/sentry,danriti/raven-python,kevinastone/sentry,jean/sentry,Natim/sentry,ticosax/opbeat_python,dcramer/sentry-old,Kronuz/django-sentry,daikeren/opbeat_python,felixbuenemann/sentry,llonchj/sentry,pauloschilling/sentry,arthurlogilab/raven-python,gg7/sentry,patrys/opbeat_python,lepture/raven-python,mitsuhiko/raven,hongliang5623/sentry,arthurlogilab/raven-python,percipient/raven-python,alexm92/sentry,lepture/raven-python,ngonzalvez/sentry,looker/sentry,llonchj/sentry,NickPresta/sentry,tbarbugli/sentry_fork,camilonova/sentry,tarkatronic/opbeat_python,recht/raven-python,argonemyth/sentry,primepix/django-sentry,getsentry/raven-python,JTCunning/sentry,hongliang5623/sentry,danriti/raven-python,kevinlondon/sentry,fuziontech/sentry,akheron/raven-python,smarkets/raven-python,ewdurbin/raven-python,kevinlondon/sentry,alexm92/sentry,ifduyue/sentry,percipient/raven-python,jean/sentry,patrys/opbeat_python,gencer/sentry,SilentCircle/sentry,jbarbuto/raven-python,songyi199111/sentry,dcramer/sentry-old,wong2/sentry,beni55/sentry,rdio/sentry,BuildingLink/sentry,chayapan/django-sentry,daevaorn/sentry,akalipetis/raven-python,ngonzalvez/sentry,Goldmund-Wyldebeast-Wunderliebe/raven-python,zenefits/sentry,argonemyth/sentry,daevaorn/sentry,jbarbuto/raven-python,fotinakis/sentry,boneyao/sentry,mvaled/sentry,jmp0xf/raven-python,jbarbuto/raven-python,tbarbugli/sentry_fork,ewdurbin/sentry,boneyao/sentry,Natim/sentry,dirtycoder/opbeat_python,arthurlogilab/raven-python,primepix/django-sentry,jbarbuto/raven-python,drcapulet/sentry,gencer/sentry,Photonomie/raven-python,someonehan/raven-python,beniwohli/apm-agent-python,JamesMura/sentry,NickPresta/sentry,beni55/sentry,Kronuz/django-sentry,JamesMura/sentry,NickPresta/sentry,gencer/sentry,ticosax/opbeat_python,openlabs/raven,Kronuz/django-sentry,hongliang5623/sentry,smarkets/raven-python,gg7/sentry,jmagnusson/raven-python,hzy/raven-python,tarkatronic/opbeat_python,kevinlondon/sentry,JamesMura/sentry,WoLpH/django-sentry,beniwohli/apm-agent-python,collective/mr.poe,danriti/raven-python,korealerts1/sentry,BuildingLink/sentry,Photonomie/raven-python,jean/sentry,gencer/sentry,icereval/raven-python,alex/sentry,JackDanger/sentry,beni55/sentry,1tush/sentry,Kryz/sentry,johansteffner/raven-python,SilentCircle/sentry,felixbuenemann/sentry,smarkets/raven-python,recht/raven-python,kevinastone/sentry,gencer/sentry,inspirehep/raven-python,inspirehep/raven-python,BayanGroup/sentry,TedaLIEz/sentry,lepture/raven-python,wong2/sentry,beeftornado/sentry,inspirehep/raven-python,getsentry/raven-python,llonchj/sentry,daikeren/opbeat_python,looker/sentry,nikolas/raven-python,jmp0xf/raven-python,ewdurbin/raven-python,TedaLIEz/sentry,jokey2k/sentry,rdio/sentry,jmp0xf/raven-python,Kryz/sentry,tbarbugli/sentry_fork,zenefits/sentry,chayapan/django-sentry,songyi199111/sentry,argonemyth/sentry,wujuguang/sentry,fuziontech/sentry,zenefits/sentry,Goldmund-Wyldebeast-Wunderliebe/raven-python,vperron/sentry | ---
+++
@@ -15,11 +15,7 @@
print >> sys.stderr, record.message
return
- try:
- get_client().create_from_record(record, request=request)
- except Exception, e:
- print >> sys.stderr, "SentryHandler failure to create log record"
- print >> sys.stderr, record.message
+ get_client().create_from_record(record, request=request)
try:
import logbook |
199aee78cb494169eb8b73fbb65de5ae40a5d436 | other/wrapping-cpp/swig/c++/test_mylib.py | other/wrapping-cpp/swig/c++/test_mylib.py | import os
import mylib
os.system('make all')
def test_squared():
assert 16. == mylib.squared(4)
def test_myfunction():
assert 16. == mylib.myfunction(mylib.squared, 4)
os.system('make clean')
| import os
import pytest
@pytest.fixture
def setup(request):
def teardown():
print("Running make clean")
os.system('make clean')
print("Completed finaliser")
request.addfinalizer(teardown)
os.system('make clean')
os.system('make all')
def test_squared(setup):
import mylib
assert 16. == mylib.squared(4)
def test_myfunction(setup):
import mylib
assert 16. == mylib.myfunction(mylib.squared, 4)
| Use pytest fixture to deal with compilation | Use pytest fixture to deal with compilation
| Python | bsd-2-clause | fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python | ---
+++
@@ -1,14 +1,24 @@
import os
-import mylib
-os.system('make all')
+
+import pytest
+
+@pytest.fixture
+def setup(request):
+ def teardown():
+ print("Running make clean")
+ os.system('make clean')
+ print("Completed finaliser")
+ request.addfinalizer(teardown)
+ os.system('make clean')
+ os.system('make all')
-def test_squared():
+def test_squared(setup):
+ import mylib
assert 16. == mylib.squared(4)
-def test_myfunction():
+def test_myfunction(setup):
+ import mylib
assert 16. == mylib.myfunction(mylib.squared, 4)
-
-os.system('make clean') |
08a09d1b258eb775f641c5edf3aba4ac1c522652 | build/get_landmines.py | build/get_landmines.py | #!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This file emits the list of reasons why a particular build needs to be clobbered
(or a list of 'landmines').
"""
import sys
def main():
"""
ALL LANDMINES ARE EMITTED FROM HERE.
"""
print 'Need to clobber after ICU52 roll.'
print 'Landmines test.'
print 'Activating MSVS 2013.'
print 'Revert activation of MSVS 2013.'
print 'Activating MSVS 2013 again.'
print 'Clobber after ICU roll.'
return 0
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This file emits the list of reasons why a particular build needs to be clobbered
(or a list of 'landmines').
"""
import sys
def main():
"""
ALL LANDMINES ARE EMITTED FROM HERE.
"""
print 'Need to clobber after ICU52 roll.'
print 'Landmines test.'
print 'Activating MSVS 2013.'
print 'Revert activation of MSVS 2013.'
print 'Activating MSVS 2013 again.'
print 'Clobber after ICU roll.'
print 'Clobber after change to src/messages.h.'
return 0
if __name__ == '__main__':
sys.exit(main())
| Add landmine after change to messages.h | Add landmine after change to messages.h
BUG=N
LOG=N
TBR=machenbach@chromium.org
Review URL: https://codereview.chromium.org/1174523004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#28875}
| Python | mit | UniversalFuture/moosh,UniversalFuture/moosh,UniversalFuture/moosh,UniversalFuture/moosh | ---
+++
@@ -21,6 +21,7 @@
print 'Revert activation of MSVS 2013.'
print 'Activating MSVS 2013 again.'
print 'Clobber after ICU roll.'
+ print 'Clobber after change to src/messages.h.'
return 0
|
b1906e66dc9f7ce7d164d0df2622e8c2213e1692 | tests/query_test/test_chars.py | tests/query_test/test_chars.py | #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
| #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
| Fix char test to only run on test/none. | Fix char test to only run on test/none.
Change-Id: I8f5ac5a6e7399ce2fdbe78d07ae24deaa1d7532d
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4326
Tested-by: jenkins
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com>
| Python | apache-2.0 | andybab/Impala,gistic/PublicSpatialImpala,ImpalaToGo/ImpalaToGo,andybab/Impala,andybab/Impala,andybab/Impala,gistic/PublicSpatialImpala,placrosse/ImpalaToGo,andybab/Impala,gistic/PublicSpatialImpala,AtScaleInc/Impala,placrosse/ImpalaToGo,andybab/Impala,AtScaleInc/Impala,placrosse/ImpalaToGo,ImpalaToGo/ImpalaToGo,ImpalaToGo/ImpalaToGo,gistic/PublicSpatialImpala,placrosse/ImpalaToGo,ImpalaToGo/ImpalaToGo,AtScaleInc/Impala,gistic/PublicSpatialImpala,placrosse/ImpalaToGo,ImpalaToGo/ImpalaToGo,gistic/PublicSpatialImpala,AtScaleInc/Impala,ImpalaToGo/ImpalaToGo,AtScaleInc/Impala,AtScaleInc/Impala,placrosse/ImpalaToGo | ---
+++
@@ -36,7 +36,8 @@
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
- v.get_value('table_format').file_format in ['text'])
+ v.get_value('table_format').file_format in ['text'] and
+ v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector) |
d2cbcad65914ccd26b57dcec12c048c3524ecdc4 | src/cclib/__init__.py | src/cclib/__init__.py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""A library for parsing and interpreting results from computational chemistry packages.
The goals of cclib are centered around the reuse of data obtained from various
computational chemistry programs and typically contained in output files. Specifically,
cclib extracts (parses) data from the output files generated by multiple programs
and provides a consistent interface to access them.
Currently supported programs:
ADF, Firefly, GAMESS(US), GAMESS-UK, Gaussian,
Jaguar, Molpro, MOPAC, NWChem, ORCA, Psi, Q-Chem
Another aim is to facilitate the implementation of algorithms that are not specific
to any particular computational chemistry package and to maximise interoperability
with other open source computational chemistry and cheminformatic software libraries.
To this end, cclib provides a number of bridges to help transfer data to other libraries
as well as example methods that take parsed data as input.
"""
__version__ = "1.5"
from . import parser
from . import progress
from . import method
from . import bridge
from . import io
# The test module can be imported if it was installed with cclib.
try:
from . import test
except ImportError:
pass
| # -*- coding: utf-8 -*-
#
# Copyright (c) 2016, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""A library for parsing and interpreting results from computational chemistry packages.
The goals of cclib are centered around the reuse of data obtained from various
computational chemistry programs and typically contained in output files. Specifically,
cclib extracts (parses) data from the output files generated by multiple programs
and provides a consistent interface to access them.
Currently supported programs:
ADF, Firefly, GAMESS(US), GAMESS-UK, Gaussian,
Jaguar, Molpro, MOPAC, NWChem, ORCA, Psi, Q-Chem
Another aim is to facilitate the implementation of algorithms that are not specific
to any particular computational chemistry package and to maximise interoperability
with other open source computational chemistry and cheminformatic software libraries.
To this end, cclib provides a number of bridges to help transfer data to other libraries
as well as example methods that take parsed data as input.
"""
__version__ = "1.5"
from . import parser
from . import progress
from . import method
from . import bridge
from . import io
# The test module can be imported if it was installed with cclib.
try:
from . import test
except ImportError:
pass
# The objects below constitute our public API. These names will not change
# over time. Names in the sub-modules will typically also be backwards
# compatible, but may sometimes change when code is moved around.
ccopen = io.ccopen
| Add alias cclib.ccopen for easy access | Add alias cclib.ccopen for easy access
| Python | bsd-3-clause | langner/cclib,gaursagar/cclib,ATenderholt/cclib,cclib/cclib,berquist/cclib,berquist/cclib,langner/cclib,berquist/cclib,ATenderholt/cclib,gaursagar/cclib,langner/cclib,cclib/cclib,cclib/cclib | ---
+++
@@ -36,3 +36,8 @@
from . import test
except ImportError:
pass
+
+# The objects below constitute our public API. These names will not change
+# over time. Names in the sub-modules will typically also be backwards
+# compatible, but may sometimes change when code is moved around.
+ccopen = io.ccopen |
fa08cec02a0cddfbb2fca4962b01044c323f3737 | test/functional/test_framework/__init__.py | test/functional/test_framework/__init__.py | # Copyright (c) 2018 The Bitcoin Unlimited developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.ionlib.ionlib import init, bin2hex, signTxInput, randombytes, pubkey, spendscript, addrbin, txid, SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE, SIGHASH_ANYONECANPAY, ScriptMachine, ScriptFlags, ScriptError, Error, ION
| # Copyright (c) 2018 The Bitcoin Unlimited developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.ionlib import init, bin2hex, signTxInput, randombytes, pubkey, spendscript, addrbin, txid, SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE, SIGHASH_ANYONECANPAY, ScriptMachine, ScriptFlags, ScriptError, Error, ION
| Remove extra iolib fron init.py | Remove extra iolib fron init.py
| Python | mit | ionomy/ion,ionomy/ion,ionomy/ion,ionomy/ion,ionomy/ion,ionomy/ion | ---
+++
@@ -2,4 +2,4 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-from test_framework.ionlib.ionlib import init, bin2hex, signTxInput, randombytes, pubkey, spendscript, addrbin, txid, SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE, SIGHASH_ANYONECANPAY, ScriptMachine, ScriptFlags, ScriptError, Error, ION
+from test_framework.ionlib import init, bin2hex, signTxInput, randombytes, pubkey, spendscript, addrbin, txid, SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE, SIGHASH_ANYONECANPAY, ScriptMachine, ScriptFlags, ScriptError, Error, ION |
5fbf5bac84a7ededde99e109d206681af99c112c | sort/bubble_sort_optimized/python/bubble_sort_optimized.py | sort/bubble_sort_optimized/python/bubble_sort_optimized.py | class Solution:
def bubbleSortOptimized(self, nums: List[int]) -> None:
if len(nums) == 1:
return nums
else:
swapped = False
while not swapped:
swapped = True
for i in range(0, len(nums)-1):
if nums[i] > nums[i+1]:
swapped = False
nums[i], nums[i+1] = nums[i+1], nums[i] | class Solution:
def bubbleSortOptimized(self, nums):
if len(nums) == 1:
return nums
else:
swapped = False
while not swapped:
swapped = True
for i in range(0, len(nums)-1):
if nums[i] > nums[i+1]:
swapped = False
nums[i], nums[i+1] = nums[i+1], nums[i] | Add dijkstras algorithm and depth first search in python | Add dijkstras algorithm and depth first search in python
| Python | cc0-1.0 | ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms | ---
+++
@@ -1,5 +1,5 @@
class Solution:
- def bubbleSortOptimized(self, nums: List[int]) -> None:
+ def bubbleSortOptimized(self, nums):
if len(nums) == 1:
return nums
else: |
807f0e14acf7685c2655c6843423104ffc9bb015 | tests/integration/test_repos_deployment.py | tests/integration/test_repos_deployment.py | import github3
from .helper import IntegrationHelper
def find(func, iterable):
return next(filter(func, iterable))
class TestDeployment(IntegrationHelper):
def test_create_status(self):
"""
Test that using a Deployment instance, a user can create a status.
"""
self.basic_login()
cassette_name = self.cassette_name('create_status')
with self.recorder.use_cassette(cassette_name):
repository = self.gh.repository('sigmavirus24', 'github3.py')
assert repository is not None
deployment = find(lambda d: d.id == 801,
repository.iter_deployments())
assert deployment is not None
status = deployment.create_status('success')
assert isinstance(status, github3.repos.deployment.DeploymentStatus)
def test_iter_statuses(self):
"""
Test that using a Deployment instance, a user can retrieve statuses.
"""
cassette_name = self.cassette_name('statuses')
with self.recorder.use_cassette(cassette_name):
repository = self.gh.repository('sigmavirus24', 'github3.py')
assert repository is not None
deployment = find(lambda d: d.id == 801,
repository.iter_deployments())
assert deployment is not None
statuses = list(deployment.iter_statuses(5))
for status in statuses:
assert isinstance(status,
github3.repos.deployment.DeploymentStatus)
| import github3
from .helper import IntegrationHelper
def find(func, iterable):
return next(iter(filter(func, iterable)))
class TestDeployment(IntegrationHelper):
def test_create_status(self):
"""
Test that using a Deployment instance, a user can create a status.
"""
self.basic_login()
cassette_name = self.cassette_name('create_status')
with self.recorder.use_cassette(cassette_name):
repository = self.gh.repository('sigmavirus24', 'github3.py')
assert repository is not None
deployment = find(lambda d: d.id == 801,
repository.iter_deployments())
assert deployment is not None
status = deployment.create_status('success')
assert isinstance(status, github3.repos.deployment.DeploymentStatus)
def test_iter_statuses(self):
"""
Test that using a Deployment instance, a user can retrieve statuses.
"""
cassette_name = self.cassette_name('statuses')
with self.recorder.use_cassette(cassette_name):
repository = self.gh.repository('sigmavirus24', 'github3.py')
assert repository is not None
deployment = find(lambda d: d.id == 801,
repository.iter_deployments())
assert deployment is not None
statuses = list(deployment.iter_statuses(5))
for status in statuses:
assert isinstance(status,
github3.repos.deployment.DeploymentStatus)
| Fix test helper once and for all | Fix test helper once and for all
| Python | bsd-3-clause | icio/github3.py,jim-minter/github3.py,itsmemattchung/github3.py,agamdua/github3.py,wbrefvem/github3.py,christophelec/github3.py,balloob/github3.py,h4ck3rm1k3/github3.py,krxsky/github3.py,degustaf/github3.py,ueg1990/github3.py,sigmavirus24/github3.py | ---
+++
@@ -4,7 +4,7 @@
def find(func, iterable):
- return next(filter(func, iterable))
+ return next(iter(filter(func, iterable)))
class TestDeployment(IntegrationHelper): |
be202ab0f991dad50a4eaf200f4a719c99b8bab2 | tests/middleware/test_https_redirect.py | tests/middleware/test_https_redirect.py | from starlette.applications import Starlette
from starlette.middleware.httpsredirect import HTTPSRedirectMiddleware
from starlette.responses import PlainTextResponse
from starlette.testclient import TestClient
def test_https_redirect_middleware():
app = Starlette()
app.add_middleware(HTTPSRedirectMiddleware)
@app.route("/")
def homepage(request):
return PlainTextResponse("OK", status_code=200)
client = TestClient(app, base_url="https://testserver")
response = client.get("/")
assert response.status_code == 200
client = TestClient(app)
response = client.get("/", allow_redirects=False)
assert response.status_code == 308
assert response.headers["location"] == "https://testserver/"
client = TestClient(app, base_url="http://testserver:80")
response = client.get("/", allow_redirects=False)
assert response.status_code == 308
assert response.headers["location"] == "https://testserver/"
client = TestClient(app, base_url="http://testserver:443")
response = client.get("/", allow_redirects=False)
assert response.status_code == 308
assert response.headers["location"] == "https://testserver/"
client = TestClient(app, base_url="http://testserver:123")
response = client.get("/", allow_redirects=False)
assert response.status_code == 308
assert response.headers["location"] == "https://testserver:123/"
| from starlette.applications import Starlette
from starlette.middleware.httpsredirect import HTTPSRedirectMiddleware
from starlette.responses import PlainTextResponse
from starlette.testclient import TestClient
def test_https_redirect_middleware():
app = Starlette()
app.add_middleware(HTTPSRedirectMiddleware)
@app.route("/")
def homepage(request):
return PlainTextResponse("OK", status_code=200)
client = TestClient(app, base_url="https://testserver")
response = client.get("/")
assert response.status_code == 200
client = TestClient(app)
response = client.get("/", allow_redirects=False)
assert response.status_code == 307
assert response.headers["location"] == "https://testserver/"
client = TestClient(app, base_url="http://testserver:80")
response = client.get("/", allow_redirects=False)
assert response.status_code == 307
assert response.headers["location"] == "https://testserver/"
client = TestClient(app, base_url="http://testserver:443")
response = client.get("/", allow_redirects=False)
assert response.status_code == 307
assert response.headers["location"] == "https://testserver/"
client = TestClient(app, base_url="http://testserver:123")
response = client.get("/", allow_redirects=False)
assert response.status_code == 307
assert response.headers["location"] == "https://testserver:123/"
| Fix test case for https redirects | Fix test case for https redirects
| Python | bsd-3-clause | encode/starlette,encode/starlette | ---
+++
@@ -19,20 +19,20 @@
client = TestClient(app)
response = client.get("/", allow_redirects=False)
- assert response.status_code == 308
+ assert response.status_code == 307
assert response.headers["location"] == "https://testserver/"
client = TestClient(app, base_url="http://testserver:80")
response = client.get("/", allow_redirects=False)
- assert response.status_code == 308
+ assert response.status_code == 307
assert response.headers["location"] == "https://testserver/"
client = TestClient(app, base_url="http://testserver:443")
response = client.get("/", allow_redirects=False)
- assert response.status_code == 308
+ assert response.status_code == 307
assert response.headers["location"] == "https://testserver/"
client = TestClient(app, base_url="http://testserver:123")
response = client.get("/", allow_redirects=False)
- assert response.status_code == 308
+ assert response.status_code == 307
assert response.headers["location"] == "https://testserver:123/" |
f0daa1674ef6fd59173fe7d1904f8598f7418f8b | tests/mock_vws/test_inactive_project.py | tests/mock_vws/test_inactive_project.py | """
Tests for inactive projects.
"""
| """
Tests for inactive projects.
"""
import pytest
from tests.utils import VuforiaServerCredentials
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInactiveProject:
"""
Tests for inactive projects.
"""
def test_inactive_project(
self,
verify_mock_vuforia_inactive: VuforiaServerCredentials,
) -> None:
"""
X
"""
| Add stub for inactive project test | Add stub for inactive project test
| Python | mit | adamtheturtle/vws-python,adamtheturtle/vws-python | ---
+++
@@ -1,3 +1,22 @@
"""
Tests for inactive projects.
"""
+
+import pytest
+
+from tests.utils import VuforiaServerCredentials
+
+
+@pytest.mark.usefixtures('verify_mock_vuforia')
+class TestInactiveProject:
+ """
+ Tests for inactive projects.
+ """
+
+ def test_inactive_project(
+ self,
+ verify_mock_vuforia_inactive: VuforiaServerCredentials,
+ ) -> None:
+ """
+ X
+ """ |
bcc962313107ec4714b2e53d793f172c4a77576f | src/shipit_code_coverage/shipit_code_coverage/coveralls.py | src/shipit_code_coverage/shipit_code_coverage/coveralls.py | import requests
def upload(data):
r = requests.post('https://coveralls.io/api/v1/jobs', files={
'json_file': data
})
try:
print(r.json())
except ValueError:
raise Exception('Failure to submit data. Response [%s]: %s' % (r.status_code, r.text)) # NOQA
| import gzip
import requests
def upload(data):
r = requests.post('https://coveralls.io/api/v1/jobs', files={
'json_file': ('json_file', gzip.compress(data), 'gzip/json')
})
try:
print(r.json())
except ValueError:
raise Exception('Failure to submit data. Response [%s]: %s' % (r.status_code, r.text)) # NOQA
| Compress data sent to Coveralls using gzip | shipit_code_coverage: Compress data sent to Coveralls using gzip
| Python | mpl-2.0 | lundjordan/services,garbas/mozilla-releng-services,mozilla-releng/services,La0/mozilla-relengapi,La0/mozilla-relengapi,lundjordan/services,garbas/mozilla-releng-services,lundjordan/services,srfraser/services,mozilla-releng/services,srfraser/services,garbas/mozilla-releng-services,mozilla-releng/services,lundjordan/services,mozilla-releng/services,srfraser/services,garbas/mozilla-releng-services,La0/mozilla-relengapi,La0/mozilla-relengapi,srfraser/services | ---
+++
@@ -1,9 +1,10 @@
+import gzip
import requests
def upload(data):
r = requests.post('https://coveralls.io/api/v1/jobs', files={
- 'json_file': data
+ 'json_file': ('json_file', gzip.compress(data), 'gzip/json')
})
try: |
0e7be2adf1101ae842dddb3db3217957a8e5957f | iati/core/rulesets.py | iati/core/rulesets.py | """A module containg a core representation of IATI Rulesets."""
class Ruleset(object):
"""Representation of a Ruleset as defined within the IATI SSOT."""
pass
class Rule(object):
"""Representation of a Rule contained within a Ruleset.
Acts as a base class for specific types of Rule that actually do something.
"""
pass
class NoMoreThanOne(Rule):
"""Representation of a Rule that checks that there is no more than one Element matching a given XPath."""
pass
| """A module containg a core representation of IATI Rulesets.
Todo:
Implement Rulesets (and Rules). Likely worth completing the Codelist implementation first since the two will be similar.
"""
class Ruleset(object):
"""Representation of a Ruleset as defined within the IATI SSOT."""
pass
class Rule(object):
"""Representation of a Rule contained within a Ruleset.
Acts as a base class for specific types of Rule that actually do something.
"""
pass
class NoMoreThanOne(Rule):
"""Representation of a Rule that checks that there is no more than one Element matching a given XPath."""
pass
| Add a ruleset module TODO | Add a ruleset module TODO
| Python | mit | IATI/iati.core,IATI/iati.core | ---
+++
@@ -1,4 +1,8 @@
-"""A module containg a core representation of IATI Rulesets."""
+"""A module containg a core representation of IATI Rulesets.
+
+Todo:
+ Implement Rulesets (and Rules). Likely worth completing the Codelist implementation first since the two will be similar.
+"""
class Ruleset(object): |
bf46ceea3c77f87768be7773fe2b26112a151606 | src/waypoints_reader/scripts/yaml_reader.py | src/waypoints_reader/scripts/yaml_reader.py | #!/usr/bin/env python
# coding UTF-8
import yaml
import rospy
from goal_sender_msgs.srv import ApplyGoals
from goal_sender_msgs.msg import GoalSequence
from goal_sender_msgs.msg import Waypoint
def read_yaml(path):
f = open(path, 'r')
waypoints = yaml.load(f)
f.close()
return waypoints
def get_waypoints():
sequence = GoalSequence()
for waypoint_data in read_yaml(rospy.get_param('~path', 'waypoints.yaml')):
waypoint = Waypoint(name = waypoint_data.get('name', ""),
x = waypoint_data['x'], # required
y = waypoint_data['y'], # required
radius = waypoint_data['radius'], # required
importance = waypoint_data.get('importance', 0),
drag = waypoint_data.get('drag', 0))
sequence.waypoints.append(waypoint)
return sequence
if __name__ == '__main__':
rospy.init_node('yaml_reader', anonymous=True)
goal_sequence = get_waypoints()
rospy.wait_for_service('apply_goals')
try:
apply_goals = rospy.ServiceProxy('apply_goals', ApplyGoals)
resp = apply_goals(goal_sequence)
print resp.message
except rospy.ServiceException, e:
print e
| #!/usr/bin/env python
# coding UTF-8
import yaml
import rospy
from goal_sender_msgs.srv import ApplyGoals
from goal_sender_msgs.msg import GoalSequence
from goal_sender_msgs.msg import Waypoint
def read_yaml(path):
f = open(path, 'r')
waypoints = yaml.load(f)
f.close()
return waypoints
def get_waypoints():
sequence = GoalSequence()
for waypoint_data in read_yaml(rospy.get_param('~path', 'waypoints.yaml')):
waypoint = Waypoint(name = waypoint_data.get('name', ""),
x = waypoint_data['x'], # required
y = waypoint_data['y'], # required
radius = waypoint_data['radius'], # required
importance = waypoint_data.get('importance', 0),
drag = waypoint_data.get('drag', 0))
sequence.waypoints.append(waypoint)
return sequence
if __name__ == '__main__':
rospy.init_node('yaml_reader', anonymous=True)
goal_sequence = get_waypoints()
rospy.wait_for_service('apply_goals')
try:
apply_goals = rospy.ServiceProxy('apply_goals', ApplyGoals)
resp = apply_goals(goal_sequence)
print resp.message
except rospy.ServiceException, e:
print e
except rospy.ROSInterruptException:
pass
| Add errer handling for python code | Add errer handling for python code
| Python | bsd-3-clause | CIR-KIT/fifth_robot_pkg,CIR-KIT/fifth_robot_pkg,CIR-KIT/fifth_robot_pkg | ---
+++
@@ -36,3 +36,5 @@
print resp.message
except rospy.ServiceException, e:
print e
+ except rospy.ROSInterruptException:
+ pass |
a977f0540410e5994405f3f6e579e866f1e3c119 | steel/bits/__init__.py | steel/bits/__init__.py | from steel.common import Remainder
from steel.common.fields import *
from steel.byte.base import *
from steel.fields import *
| from steel.common import Remainder
from steel.common.fields import *
from steel.base import *
from steel.fields import *
| Update the bits package to use new imports | Update the bits package to use new imports
| Python | bsd-3-clause | gulopine/steel | ---
+++
@@ -1,5 +1,5 @@
from steel.common import Remainder
from steel.common.fields import *
-from steel.byte.base import *
+from steel.base import *
from steel.fields import * |
76ca05a20fcdf36dc81d3ec98048f89f98325d68 | imhotep_jsl/plugin.py | imhotep_jsl/plugin.py | from imhotep.tools import Tool
from collections import defaultdict
import re
class JSL(Tool):
regex = re.compile(
r'^(?P<type>[WE]) '
r'(?P<filename>.*?) L(?P<line_number>\d+): (?P<message>.*)$')
def invoke(self, dirname, filenames=set()):
retval = defaultdict(lambda: defaultdict(list))
cmd = 'find %s -name "*.js" | xargs jsl' % dirname
output = self.executor(cmd)
for line in output.split('\n'):
match = self.regex.search(line)
if match is None:
continue
message = '%s: %s' % (match.group('type'), match.group('message'))
filename = match.group('filename')[len(dirname) + 1:]
retval[filename][match.group('line_number')].append(message)
return retval
| from imhotep.tools import Tool
from collections import defaultdict
import re
class JSL(Tool):
regex = re.compile(
r'^(?P<type>[WE]) '
r'(?P<filename>.*?) L(?P<line_number>\d+): (?P<message>.*)$')
def invoke(self, dirname, filenames=set()):
retval = defaultdict(lambda: defaultdict(list))
if len(filenames) == 0:
cmd = 'find %s -name "*.js" | xargs jsl' % dirname
else:
js_files = []
for filename in filenames:
if '.js' in filename:
js_files.append("%s/%s" % (dirname, filename))
cmd = 'jsl %s' % ' '.join(js_files)
output = self.executor(cmd)
for line in output.split('\n'):
match = self.regex.search(line)
if match is None:
continue
message = '%s: %s' % (match.group('type'), match.group('message'))
filename = match.group('filename')[len(dirname) + 1:]
retval[filename][match.group('line_number')].append(message)
return retval
| Add support for imhotep to pass a list of filenames. | Add support for imhotep to pass a list of filenames.
| Python | mit | hayes/imhotep_jsl | ---
+++
@@ -10,8 +10,14 @@
def invoke(self, dirname, filenames=set()):
retval = defaultdict(lambda: defaultdict(list))
-
- cmd = 'find %s -name "*.js" | xargs jsl' % dirname
+ if len(filenames) == 0:
+ cmd = 'find %s -name "*.js" | xargs jsl' % dirname
+ else:
+ js_files = []
+ for filename in filenames:
+ if '.js' in filename:
+ js_files.append("%s/%s" % (dirname, filename))
+ cmd = 'jsl %s' % ' '.join(js_files)
output = self.executor(cmd)
for line in output.split('\n'):
match = self.regex.search(line) |
2c357a54e30eecb1d7b717be3ed774dcfecc2814 | src/stratis_cli/_actions/_stratis.py | src/stratis_cli/_actions/_stratis.py | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Miscellaneous actions about stratis.
"""
from .._stratisd_constants import RedundancyCodes
from ._connection import get_object
from ._constants import TOP_OBJECT
from ._data import Manager
class StratisActions():
"""
Stratis actions.
"""
@staticmethod
def list_stratisd_redundancy(namespace):
"""
List the stratisd redundancy designations.
"""
# pylint: disable=unused-argument
for code in RedundancyCodes:
print("%s: %d" % (code.name, code.value))
@staticmethod
def list_stratisd_version(namespace):
"""
List the stratisd version.
"""
# pylint: disable=unused-argument
print("%s" % Manager.Properties.Version.Get(get_object(TOP_OBJECT)))
| # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Miscellaneous actions about stratis.
"""
from .._stratisd_constants import RedundancyCodes
from ._connection import get_object
from ._constants import TOP_OBJECT
from ._data import Manager
class StratisActions():
"""
Stratis actions.
"""
@staticmethod
def list_stratisd_redundancy(_namespace):
"""
List the stratisd redundancy designations.
"""
for code in RedundancyCodes:
print("%s: %d" % (code.name, code.value))
@staticmethod
def list_stratisd_version(_namespace):
"""
List the stratisd version.
"""
print("%s" % Manager.Properties.Version.Get(get_object(TOP_OBJECT)))
| Use '_' prefix instead of disabling pylint unused-argument lint | Use '_' prefix instead of disabling pylint unused-argument lint
It is more precise to mark the unused parameters this way.
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com>
| Python | apache-2.0 | stratis-storage/stratis-cli,stratis-storage/stratis-cli | ---
+++
@@ -28,18 +28,16 @@
"""
@staticmethod
- def list_stratisd_redundancy(namespace):
+ def list_stratisd_redundancy(_namespace):
"""
List the stratisd redundancy designations.
"""
- # pylint: disable=unused-argument
for code in RedundancyCodes:
print("%s: %d" % (code.name, code.value))
@staticmethod
- def list_stratisd_version(namespace):
+ def list_stratisd_version(_namespace):
"""
List the stratisd version.
"""
- # pylint: disable=unused-argument
print("%s" % Manager.Properties.Version.Get(get_object(TOP_OBJECT))) |
77ff64c858aa21c9651ccf58c95b739db45cd97b | Instanssi/kompomaatti/templatetags/kompomaatti_base_tags.py | Instanssi/kompomaatti/templatetags/kompomaatti_base_tags.py | # -*- coding: utf-8 -*-
from django import template
from Instanssi.kompomaatti.models import Compo, Competition
register = template.Library()
@register.inclusion_tag('kompomaatti/compo_nav_items.html')
def render_base_compos_nav(event_id):
return {
'event_id': event_id,
'compos': Compo.objects.filter(active=True, event_id=event_id)
}
@register.inclusion_tag('kompomaatti/competition_nav_items.html')
def render_base_competitions_nav(event_id):
return {
'event_id': event_id,
'competitions': Competition.objects.filter(active=True, event_id=event_id)
}
| # -*- coding: utf-8 -*-
from django import template
from Instanssi.kompomaatti.models import Compo, Competition
register = template.Library()
@register.inclusion_tag('kompomaatti/tags/compo_nav_items.html')
def render_base_compos_nav(event_id):
return {
'event_id': event_id,
'compos': Compo.objects.filter(active=True, event_id=event_id)
}
@register.inclusion_tag('kompomaatti/tags/competition_nav_items.html')
def render_base_competitions_nav(event_id):
return {
'event_id': event_id,
'competitions': Competition.objects.filter(active=True, event_id=event_id)
}
| Update tags to use new template subdirectory. | kompomaatti: Update tags to use new template subdirectory.
| Python | mit | Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org | ---
+++
@@ -5,14 +5,14 @@
register = template.Library()
-@register.inclusion_tag('kompomaatti/compo_nav_items.html')
+@register.inclusion_tag('kompomaatti/tags/compo_nav_items.html')
def render_base_compos_nav(event_id):
return {
'event_id': event_id,
'compos': Compo.objects.filter(active=True, event_id=event_id)
}
-@register.inclusion_tag('kompomaatti/competition_nav_items.html')
+@register.inclusion_tag('kompomaatti/tags/competition_nav_items.html')
def render_base_competitions_nav(event_id):
return {
'event_id': event_id, |
c2fd8515666476cc0b6760b72b6cd71ef030e6f4 | thinc/neural/tests/unit/Affine/test_init.py | thinc/neural/tests/unit/Affine/test_init.py | # encoding: utf8
from __future__ import unicode_literals
import pytest
from flexmock import flexmock
from hypothesis import given, strategies
import abc
from .... import vec2vec
from ....ops import NumpyOps
@pytest.fixture
def model_with_no_args():
model = vec2vec.Affine(ops=NumpyOps())
return model
def test_Affine_default_name(model_with_no_args):
assert model_with_no_args.name == 'affine'
def test_Affine_defaults_to_cpu(model_with_no_args):
assert isinstance(model_with_no_args.ops, NumpyOps)
def test_Affine_defaults_to_no_layers(model_with_no_args):
assert model_with_no_args.layers == []
def test_Affine_defaults_to_param_descriptions(model_with_no_args):
W_desc, b_desc = model_with_no_args.describe_params
xavier_init = model_with_no_args.ops.xavier_uniform_init
assert W_desc == ('W-affine', (None, None), xavier_init)
assert b_desc == ('b-affine', (None,), None)
def test_Model_defaults_to_no_output_shape(model_with_no_args):
assert model_with_no_args.output_shape == None
def test_Model_defaults_to_no_input_shape(model_with_no_args):
assert model_with_no_args.input_shape == None
def test_Model_defaults_to_0_size(model_with_no_args):
assert model_with_no_args.size == None
| # encoding: utf8
from __future__ import unicode_literals
import pytest
from mock import Mock, patch
from hypothesis import given, strategies
import abc
from ...._classes.affine import Affine
from ....ops import NumpyOps
@pytest.fixture
def model():
orig_desc = dict(Affine.descriptions)
orig_on_init = list(Affine.on_init_hooks)
Affine.descriptions = {
name: Mock(desc) for (name, desc) in Affine.descriptions.items()
}
Affine.on_init_hooks = [Mock(hook) for hook in Affine.on_init_hooks]
model = Affine()
for attr in model.descriptions:
setattr(model, attr, None)
Affine.descriptions = dict(orig_desc)
Affine.on_init_hooks = orig_on_init
return model
def test_Affine_default_name(model):
assert model.name == 'affine'
def test_Affine_calls_default_descriptions(model):
assert len(model.descriptions) == 5
for name, desc in model.descriptions.items():
desc.assert_called()
assert 'nB' in model.descriptions
assert 'nI' in model.descriptions
assert 'nO' in model.descriptions
assert 'W' in model.descriptions
assert 'b' in model.descriptions
def test_Affine_calls_init_hooks(model):
for hook in model.on_init_hooks:
hook.assert_called()
| Test Affine init calls hooks correctly, and sets descriptors | Test Affine init calls hooks correctly, and sets descriptors
| Python | mit | explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc | ---
+++
@@ -1,46 +1,45 @@
# encoding: utf8
from __future__ import unicode_literals
import pytest
-from flexmock import flexmock
+from mock import Mock, patch
from hypothesis import given, strategies
import abc
-from .... import vec2vec
+from ...._classes.affine import Affine
from ....ops import NumpyOps
@pytest.fixture
-def model_with_no_args():
- model = vec2vec.Affine(ops=NumpyOps())
+def model():
+ orig_desc = dict(Affine.descriptions)
+ orig_on_init = list(Affine.on_init_hooks)
+ Affine.descriptions = {
+ name: Mock(desc) for (name, desc) in Affine.descriptions.items()
+ }
+ Affine.on_init_hooks = [Mock(hook) for hook in Affine.on_init_hooks]
+ model = Affine()
+ for attr in model.descriptions:
+ setattr(model, attr, None)
+ Affine.descriptions = dict(orig_desc)
+ Affine.on_init_hooks = orig_on_init
return model
-def test_Affine_default_name(model_with_no_args):
- assert model_with_no_args.name == 'affine'
+def test_Affine_default_name(model):
+ assert model.name == 'affine'
-def test_Affine_defaults_to_cpu(model_with_no_args):
- assert isinstance(model_with_no_args.ops, NumpyOps)
+def test_Affine_calls_default_descriptions(model):
+ assert len(model.descriptions) == 5
+ for name, desc in model.descriptions.items():
+ desc.assert_called()
+ assert 'nB' in model.descriptions
+ assert 'nI' in model.descriptions
+ assert 'nO' in model.descriptions
+ assert 'W' in model.descriptions
+ assert 'b' in model.descriptions
-def test_Affine_defaults_to_no_layers(model_with_no_args):
- assert model_with_no_args.layers == []
-
-
-def test_Affine_defaults_to_param_descriptions(model_with_no_args):
- W_desc, b_desc = model_with_no_args.describe_params
- xavier_init = model_with_no_args.ops.xavier_uniform_init
- assert W_desc == ('W-affine', (None, None), xavier_init)
- assert b_desc == ('b-affine', (None,), None)
-
-
-def test_Model_defaults_to_no_output_shape(model_with_no_args):
- assert model_with_no_args.output_shape == None
-
-
-def test_Model_defaults_to_no_input_shape(model_with_no_args):
- assert model_with_no_args.input_shape == None
-
-
-def test_Model_defaults_to_0_size(model_with_no_args):
- assert model_with_no_args.size == None
+def test_Affine_calls_init_hooks(model):
+ for hook in model.on_init_hooks:
+ hook.assert_called() |
04eae79deb30e63f68534784f1eaf0412cfb1aa9 | reporting_scripts/forum_data.py | reporting_scripts/forum_data.py | from collections import defaultdict
import json
import sys
from base_edx import EdXConnection
from generate_csv_report import CSV
db_name = sys.argv[1]
# Change name of collection as required
connection = EdXConnection(db_name, 'forum' )
collection = connection.get_access_to_collection()
forum_data = collection['forum'].find()
csv_data = []
for document in forum_data:
csv_data.append([document['_id']['oid'], document['author_username'], document['_type'], document.get('title', ''), document['body'], document['created_at']['date']])
headers = ['ID', 'Author Username', 'Type', 'Title', 'Body', 'Created At Date']
output = CSV(csv_data, headers, output_file=db_name+'_forum_data.csv')
output.generate_csv()
| from collections import defaultdict
import json
import sys
from base_edx import EdXConnection
from generate_csv_report import CSV
db_name = sys.argv[1]
# Change name of collection as required
connection = EdXConnection(db_name, 'forum' )
collection = connection.get_access_to_collection()
forum_data = collection['forum'].find()
csv_data = []
for document in forum_data:
csv_data.append([document['_id'], document['author_username'], document['_type'], document.get('title', ''), document['body'], document['created_at']])
headers = ['ID', 'Author Username', 'Type', 'Title', 'Body', 'Created At Date']
output = CSV(csv_data, headers, output_file=db_name+'_forum_data.csv')
output.generate_csv()
| Update script with latest fields | Update script with latest fields
| Python | mit | andyzsf/edx_data_research,andyzsf/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research | ---
+++
@@ -14,7 +14,7 @@
forum_data = collection['forum'].find()
csv_data = []
for document in forum_data:
- csv_data.append([document['_id']['oid'], document['author_username'], document['_type'], document.get('title', ''), document['body'], document['created_at']['date']])
+ csv_data.append([document['_id'], document['author_username'], document['_type'], document.get('title', ''), document['body'], document['created_at']])
headers = ['ID', 'Author Username', 'Type', 'Title', 'Body', 'Created At Date']
output = CSV(csv_data, headers, output_file=db_name+'_forum_data.csv') |
af45e43c46a22f3168ab946bf914a45eae9ade19 | avatar/urls.py | avatar/urls.py | try:
from django.conf.urls import patterns, url
except ImportError:
# Django < 1.4
from django.conf.urls.defaults import patterns, url
from avatar import views
urlpatterns = patterns('',
url(r'^add/$', views.add, name='avatar_add'),
url(r'^change/$', views.change, name='avatar_change'),
url(r'^delete/$', views.delete, name='avatar_delete'),
url(r'^render_primary/(?P<user>[\w\d\@\.\-_]{3,30})/(?P<size>[\d]+)/$',
views.render_primary,
name='avatar_render_primary'),
url(r'^list/(?P<username>[\+\w\@\.]+)/$',
views.avatar_gallery,
name='avatar_gallery'),
url(r'^list/(?P<username>[\+\w\@\.]+)/(?P<id>[\d]+)/$',
views.avatar,
name='avatar'),
)
| try:
from django.conf.urls import patterns, url
except ImportError:
# Django < 1.4
from django.conf.urls.defaults import url
from avatar import views
urlpatterns = [
url(r'^add/$', views.add, name='avatar_add'),
url(r'^change/$', views.change, name='avatar_change'),
url(r'^delete/$', views.delete, name='avatar_delete'),
url(r'^render_primary/(?P<user>[\w\d\@\.\-_]{3,30})/(?P<size>[\d]+)/$',
views.render_primary,
name='avatar_render_primary'),
url(r'^list/(?P<username>[\+\w\@\.]+)/$',
views.avatar_gallery,
name='avatar_gallery'),
url(r'^list/(?P<username>[\+\w\@\.]+)/(?P<id>[\d]+)/$',
views.avatar,
name='avatar'),
]
| Remove replace urlpatterns with simple array, make compatible with Django 1.9 | Remove replace urlpatterns with simple array, make compatible with Django 1.9
| Python | bsd-3-clause | ad-m/django-avatar,ad-m/django-avatar,grantmcconnaughey/django-avatar,grantmcconnaughey/django-avatar,jezdez/django-avatar,jezdez/django-avatar,MachineandMagic/django-avatar,MachineandMagic/django-avatar | ---
+++
@@ -2,11 +2,11 @@
from django.conf.urls import patterns, url
except ImportError:
# Django < 1.4
- from django.conf.urls.defaults import patterns, url
+ from django.conf.urls.defaults import url
from avatar import views
-urlpatterns = patterns('',
+urlpatterns = [
url(r'^add/$', views.add, name='avatar_add'),
url(r'^change/$', views.change, name='avatar_change'),
url(r'^delete/$', views.delete, name='avatar_delete'),
@@ -19,4 +19,4 @@
url(r'^list/(?P<username>[\+\w\@\.]+)/(?P<id>[\d]+)/$',
views.avatar,
name='avatar'),
-)
+] |
3e4eacae396f2101b677859ea01f8b225fa957d2 | branches/extracting_oxford/molly/demos/molly_oxford/urls.py | branches/extracting_oxford/molly/demos/molly_oxford/urls.py | from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
from molly.conf import applications
admin.autodiscover()
urlpatterns = patterns('',
(r'adm/(.*)', admin.site.root),
# These are how we expect all applications to be eventually.
(r'^contact/', applications.contact.urls),
(r'^service-status/', applications.service_status.urls),
(r'^weather/', applications.weather.urls),
(r'^library/', applications.library.urls),
(r'^weblearn/', applications.weblearn.urls),
(r'^podcasts/', applications.podcasts.urls),
(r'^webcams/', applications.webcams.urls),
(r'^results/', applications.results.urls),
(r'^auth/', applications.auth.urls),
(r'^search/', applications.search.urls),
(r'^geolocation/', applications.geolocation.urls),
# These ones still need work
(r'^maps/', include('molly.maps.urls', 'maps', 'maps')),
(r'^osm/', include('molly.osm.urls', 'osm', 'osm')),
(r'', include('molly.core.urls', 'core', 'core')),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^site-media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.SITE_MEDIA_PATH})
)
| from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
from molly.conf import applications
admin.autodiscover()
urlpatterns = patterns('',
(r'adm/(.*)', admin.site.root),
# These are how we expect all applications to be eventually.
(r'^contact/', applications.contact.urls),
(r'^service-status/', applications.service_status.urls),
(r'^weather/', applications.weather.urls),
(r'^library/', applications.library.urls),
(r'^weblearn/', applications.weblearn.urls),
(r'^podcasts/', applications.podcasts.urls),
(r'^webcams/', applications.webcams.urls),
(r'^results/', applications.results.urls),
(r'^auth/', applications.auth.urls),
(r'^search/', applications.search.urls),
(r'^geolocation/', applications.geolocation.urls),
(r'^places/', applications.places.urls),
# These ones still need work
(r'^osm/', include('molly.osm.urls', 'osm', 'osm')),
(r'', include('molly.core.urls', 'core', 'core')),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^site-media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.SITE_MEDIA_PATH})
)
| Update Mobile Oxford views to reflect use of app framework in molly.maps. | Update Mobile Oxford views to reflect use of app framework in
molly.maps.
| Python | apache-2.0 | mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject | ---
+++
@@ -21,9 +21,9 @@
(r'^auth/', applications.auth.urls),
(r'^search/', applications.search.urls),
(r'^geolocation/', applications.geolocation.urls),
+ (r'^places/', applications.places.urls),
# These ones still need work
- (r'^maps/', include('molly.maps.urls', 'maps', 'maps')),
(r'^osm/', include('molly.osm.urls', 'osm', 'osm')),
(r'', include('molly.core.urls', 'core', 'core')), |
d0f18bed554c58873776eefba5b2be1d60926f95 | elevator_cli/io.py | elevator_cli/io.py | # -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
| # -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
import shlex
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = shlex.split(input_str.strip())
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
| Update : protect cli quoted arguments while parsing | Update : protect cli quoted arguments while parsing
| Python | mit | oleiade/Elevator | ---
+++
@@ -3,6 +3,8 @@
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
+
+import shlex
from clint.textui import puts, colored
@@ -24,7 +26,7 @@
def parse_input(input_str, *args, **kwargs):
- input_str = input_str.strip().split()
+ input_str = shlex.split(input_str.strip())
command, args = destructurate(input_str)
return command.upper(), args
|
22499afe4e434377e449226c5b073251165f4151 | src/geoserver/style.py | src/geoserver/style.py | from geoserver.support import ResourceInfo, atom_link
class Style(ResourceInfo):
def __init__(self,catalog, node):
self.catalog = catalog
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
self.name = self.metadata.find("name").text
self.filename = self.metadata.find("filename").text
def __repr__(self):
return "Style[%s]" % self.name
| from geoserver.support import ResourceInfo, atom_link
class Style(ResourceInfo):
def __init__(self,catalog, node):
self.catalog = catalog
self.name = node.find("name").text
self.href = atom_link(node)
self.update()
def update(self):
ResourceInfo.update(self)
self.name = self.metadata.find("name").text
self.filename = self.metadata.find("filename").text
# Get the raw sld
sld_url = self.href.replace(".xml", ".sld")
sld_xml = self.catalog.get_xml(sld_url)
# Obtain the user style node where title and name are located
user_style = sld_xml.find("{http://www.opengis.net/sld}NamedLayer/{http://www.opengis.net/sld}UserStyle")
# Extract name and title nodes from user_style
name_node = user_style.find("{http://www.opengis.net/sld}Name")
title_node = user_style.find("{http://www.opengis.net/sld}Title")
# Store the text value of sld name and title if present
self.sld_name = name_node.text if hasattr(name_node, 'text') else None
self.sld_title = title_node.text if hasattr(title_node, 'text') else None
def __repr__(self):
return "Style[%s]" % self.name
| Add basic support for inspecting the body of SLDs | Add basic support for inspecting the body of SLDs
| Python | mit | cristianzamar/gsconfig,Geode/gsconfig,scottp-dpaw/gsconfig,boundlessgeo/gsconfig,afabiani/gsconfig,garnertb/gsconfig.py | ---
+++
@@ -11,6 +11,17 @@
ResourceInfo.update(self)
self.name = self.metadata.find("name").text
self.filename = self.metadata.find("filename").text
+ # Get the raw sld
+ sld_url = self.href.replace(".xml", ".sld")
+ sld_xml = self.catalog.get_xml(sld_url)
+ # Obtain the user style node where title and name are located
+ user_style = sld_xml.find("{http://www.opengis.net/sld}NamedLayer/{http://www.opengis.net/sld}UserStyle")
+ # Extract name and title nodes from user_style
+ name_node = user_style.find("{http://www.opengis.net/sld}Name")
+ title_node = user_style.find("{http://www.opengis.net/sld}Title")
+ # Store the text value of sld name and title if present
+ self.sld_name = name_node.text if hasattr(name_node, 'text') else None
+ self.sld_title = title_node.text if hasattr(title_node, 'text') else None
def __repr__(self):
return "Style[%s]" % self.name |
f48066555e6a4f778887b0600e13b304d36c9529 | minutes/forms.py | minutes/forms.py | from django.forms import ModelForm, Textarea, ChoiceField
from django.utils.functional import lazy
from .models import Meeting, Folder
MD_INPUT = {
'class': 'markdown-input'
}
def sorted_folders():
return sorted([(x.pk, str(x)) for x in Folder.objects.all()], key=lambda x: x[1])
class MeetingForm(ModelForm):
folder = ChoiceField(choices=sorted_folders)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class Meta:
model = Meeting
fields = ['name', 'folder', 'title', 'body', 'date']
widgets = {
'body': Textarea(attrs=MD_INPUT),
}
def clean_folder(self):
return Folder.objects.get(pk=self.cleaned_data['folder'])
| from django.forms import ModelForm, Textarea, ChoiceField
from django.urls import reverse_lazy
from .models import Meeting, Folder
MD_INPUT = {
'class': 'markdown-input',
'data-endpoint': reverse_lazy('utilities:preview_safe')
}
def sorted_folders():
return sorted([(x.pk, str(x)) for x in Folder.objects.all()], key=lambda x: x[1])
class MeetingForm(ModelForm):
folder = ChoiceField(choices=sorted_folders)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class Meta:
model = Meeting
fields = ['name', 'folder', 'title', 'body', 'date']
widgets = {
'body': Textarea(attrs=MD_INPUT),
}
def clean_folder(self):
return Folder.objects.get(pk=self.cleaned_data['folder'])
| Allow admin preview in minutes as well | Allow admin preview in minutes as well
| Python | isc | ashbc/tgrsite,ashbc/tgrsite,ashbc/tgrsite | ---
+++
@@ -1,10 +1,11 @@
from django.forms import ModelForm, Textarea, ChoiceField
-from django.utils.functional import lazy
+from django.urls import reverse_lazy
from .models import Meeting, Folder
MD_INPUT = {
- 'class': 'markdown-input'
+ 'class': 'markdown-input',
+ 'data-endpoint': reverse_lazy('utilities:preview_safe')
}
|
1b325d5d910039a3702db8a1b00127870625b209 | test/conftest.py | test/conftest.py | from coverage import coverage
cov = coverage(source=('doubles',))
cov.start()
from doubles.pytest import pytest_runtest_call # noqa
def pytest_sessionfinish(session, exitstatus):
cov.stop()
def pytest_terminal_summary(terminalreporter):
print "\nCoverage report:\n"
cov.report(show_missing=True, ignore_errors=True, file=terminalreporter._tw)
cov.html_report()
| from coverage import coverage
cov = coverage(source=('doubles',))
cov.start()
from doubles.pytest import pytest_runtest_call # noqa
def pytest_sessionfinish(session, exitstatus):
cov.stop()
cov.save()
def pytest_terminal_summary(terminalreporter):
print "\nCoverage report:\n"
cov.report(show_missing=True, ignore_errors=True, file=terminalreporter._tw)
cov.html_report()
| Save coverage data file to disk for Coveralls. | Save coverage data file to disk for Coveralls.
| Python | mit | uber/doubles | ---
+++
@@ -8,6 +8,7 @@
def pytest_sessionfinish(session, exitstatus):
cov.stop()
+ cov.save()
def pytest_terminal_summary(terminalreporter): |
99952c977eee74ecc95a6af4b2867738850bc435 | topoflow_utils/hook.py | topoflow_utils/hook.py | def get_dtype(parameter_value):
"""Get the TopoFlow data type of a parameter.
Parameters
----------
parameter_value : object
An object, a scalar.
"""
try:
float(parameter_value)
except ValueError:
return 'string'
else:
return 'float'
def assign_parameters(env, file_list):
"""Assign values for input parameters in a TopoFlow component.
A subset of TopoFlow input parameters can take a scalar value, or,
through an uploaded file, a time series, a grid, or a grid
sequence. This function assigns such parameters a scalar value, or
the name of a file, based on the user's selection in WMT.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
file_list : list
A list of file names used by the component.
"""
terminator = '_ptype'
for key in env.copy().iterkeys():
if key.endswith(terminator):
key_root, sep, end = key.partition(terminator)
if env[key] == 'Scalar':
env[key_root] = env[key_root + '_scalar']
else:
env[key_root] = env[key_root + '_file']
file_list.append(key_root)
env[key_root + '_dtype'] = get_dtype(env[key_root])
| """Routines used by WMT hooks for TopoFlow components."""
choices_map = {
'Yes': 1,
'No': 0
}
units_map = {
'meters': 'm^2',
'kilometers': 'km^2'
}
def get_dtype(parameter_value):
"""Get the TopoFlow data type of a parameter.
Parameters
----------
parameter_value : object
An object, a scalar.
"""
try:
float(parameter_value)
except ValueError:
return 'string'
else:
return 'float'
def assign_parameters(env, file_list):
"""Assign values for input parameters in a TopoFlow component.
A subset of TopoFlow input parameters can take a scalar value, or,
through an uploaded file, a time series, a grid, or a grid
sequence. This function assigns such parameters a scalar value, or
the name of a file, based on the user's selection in WMT.
Parameters
----------
env : dict
A dict of component parameter values from WMT.
file_list : list
A list of file names used by the component.
"""
terminator = '_ptype'
for key in env.copy().iterkeys():
if key.endswith(terminator):
key_root, sep, end = key.partition(terminator)
if env[key] == 'Scalar':
env[key_root] = env[key_root + '_scalar']
else:
env[key_root] = env[key_root + '_file']
file_list.append(key_root)
env[key_root + '_dtype'] = get_dtype(env[key_root])
| Add choices_map and units_map global variables | Add choices_map and units_map global variables
| Python | mit | csdms/topoflow-utils | ---
+++
@@ -1,3 +1,15 @@
+"""Routines used by WMT hooks for TopoFlow components."""
+
+choices_map = {
+ 'Yes': 1,
+ 'No': 0
+}
+units_map = {
+ 'meters': 'm^2',
+ 'kilometers': 'km^2'
+}
+
+
def get_dtype(parameter_value):
"""Get the TopoFlow data type of a parameter.
|
95c2f9b9b0a64611a482d25993b0fa289f19ece8 | test_settings.py | test_settings.py | DATABASES = {'default':{
'NAME':':memory:',
'ENGINE':'django.db.backends.sqlite3'
}}
# install the bare minimum for
# testing django-brake
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'brake',
)
# This is where our ratelimiting information is stored.
# Unfortunately, the DummyCache doesn't work for our purposes.
CACHE_BACKEND = 'memcached://127.0.0.1:11211/'
# point to ourselves as the root urlconf, define no patterns (see below)
ROOT_URLCONF = 'test_settings'
# set this to turn off an annoying "you're doing it wrong" message
SECRET_KEY = 'HAHAHA ratelimits!'
# turn this file into a pseudo-urls.py.
from django.conf.urls.defaults import *
urlpatterns = patterns('',)
| DATABASES = {'default':{
'NAME':':memory:',
'ENGINE':'django.db.backends.sqlite3'
}}
# install the bare minimum for
# testing django-brake
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'brake',
)
# This is where our ratelimiting information is stored.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'
}
}
# Might be good to also test against real memcached.
#CACHE_BACKEND = 'memcached://127.0.0.1:11211/'
# point to ourselves as the root urlconf, define no patterns (see below)
ROOT_URLCONF = 'test_settings'
# set this to turn off an annoying "you're doing it wrong" message
SECRET_KEY = 'HAHAHA ratelimits!'
# turn this file into a pseudo-urls.py.
from django.conf.urls.defaults import *
urlpatterns = patterns('',)
| Use LocMemCache instead of real memcache | Use LocMemCache instead of real memcache
| Python | bsd-3-clause | SilentCircle/django-brake,skorokithakis/django-brake,skorokithakis/django-brake,SilentCircle/django-brake | ---
+++
@@ -13,8 +13,13 @@
# This is where our ratelimiting information is stored.
-# Unfortunately, the DummyCache doesn't work for our purposes.
-CACHE_BACKEND = 'memcached://127.0.0.1:11211/'
+CACHES = {
+ 'default': {
+ 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'
+ }
+}
+# Might be good to also test against real memcached.
+#CACHE_BACKEND = 'memcached://127.0.0.1:11211/'
# point to ourselves as the root urlconf, define no patterns (see below)
ROOT_URLCONF = 'test_settings' |
3f7fc98591d11b0a67372699371843f9fd019e36 | numpy/version.py | numpy/version.py | version='0.9.7'
import os
svn_version_file = os.path.join(os.path.dirname(__file__),
'core','__svn_version__.py')
if os.path.isfile(svn_version_file):
import imp
svn = imp.load_module('numpy.core.__svn_version__',
open(svn_version_file),
svn_version_file,
('.py','U',1))
version += '.'+svn.version
| version='0.9.9'
import os
svn_version_file = os.path.join(os.path.dirname(__file__),
'core','__svn_version__.py')
if os.path.isfile(svn_version_file):
import imp
svn = imp.load_module('numpy.core.__svn_version__',
open(svn_version_file),
svn_version_file,
('.py','U',1))
version += '.'+svn.version
| Update head revision to 0.9.9 | Update head revision to 0.9.9
| Python | bsd-3-clause | githubmlai/numpy,dwf/numpy,pizzathief/numpy,kiwifb/numpy,dwf/numpy,immerrr/numpy,AustereCuriosity/numpy,pbrod/numpy,matthew-brett/numpy,jankoslavic/numpy,has2k1/numpy,numpy/numpy,mortada/numpy,ssanderson/numpy,ESSS/numpy,dch312/numpy,grlee77/numpy,simongibbons/numpy,rherault-insa/numpy,hainm/numpy,skymanaditya1/numpy,ViralLeadership/numpy,dwillmer/numpy,BMJHayward/numpy,WillieMaddox/numpy,charris/numpy,ogrisel/numpy,endolith/numpy,Srisai85/numpy,SunghanKim/numpy,madphysicist/numpy,argriffing/numpy,jschueller/numpy,stuarteberg/numpy,astrofrog/numpy,brandon-rhodes/numpy,cjermain/numpy,simongibbons/numpy,pbrod/numpy,dwf/numpy,ewmoore/numpy,seberg/numpy,WarrenWeckesser/numpy,jschueller/numpy,kirillzhuravlev/numpy,rhythmsosad/numpy,solarjoe/numpy,bertrand-l/numpy,ajdawson/numpy,joferkington/numpy,grlee77/numpy,GaZ3ll3/numpy,endolith/numpy,cowlicks/numpy,ssanderson/numpy,brandon-rhodes/numpy,mingwpy/numpy,NextThought/pypy-numpy,embray/numpy,mhvk/numpy,skymanaditya1/numpy,dwillmer/numpy,mattip/numpy,SunghanKim/numpy,b-carter/numpy,ChristopherHogan/numpy,BMJHayward/numpy,rudimeier/numpy,MichaelAquilina/numpy,mwiebe/numpy,nguyentu1602/numpy,pelson/numpy,pbrod/numpy,chiffa/numpy,ChristopherHogan/numpy,Dapid/numpy,jorisvandenbossche/numpy,jonathanunderwood/numpy,jakirkham/numpy,MichaelAquilina/numpy,larsmans/numpy,immerrr/numpy,jankoslavic/numpy,rmcgibbo/numpy,shoyer/numpy,kirillzhuravlev/numpy,sinhrks/numpy,jonathanunderwood/numpy,sonnyhu/numpy,shoyer/numpy,empeeu/numpy,moreati/numpy,andsor/numpy,endolith/numpy,numpy/numpy,mindw/numpy,sinhrks/numpy,numpy/numpy-refactor,CMartelLML/numpy,mhvk/numpy,jorisvandenbossche/numpy,Anwesh43/numpy,rhythmsosad/numpy,maniteja123/numpy,ViralLeadership/numpy,abalkin/numpy,gmcastil/numpy,kirillzhuravlev/numpy,numpy/numpy-refactor,bmorris3/numpy,andsor/numpy,pelson/numpy,mingwpy/numpy,WarrenWeckesser/numpy,seberg/numpy,sigma-random/numpy,pbrod/numpy,naritta/numpy,dch312/numpy,astrofrog/numpy,gfyoung/numpy,mattip/numpy,gfyoung/numpy,shoyer/numpy,yiakwy/numpy,ajdawson/numpy,MSeifert04/numpy,mingwpy/numpy,ContinuumIO/numpy,tynn/numpy,ddasilva/numpy,matthew-brett/numpy,MichaelAquilina/numpy,stefanv/numpy,matthew-brett/numpy,dato-code/numpy,BabeNovelty/numpy,rmcgibbo/numpy,nbeaver/numpy,rmcgibbo/numpy,CMartelLML/numpy,has2k1/numpy,BMJHayward/numpy,SiccarPoint/numpy,ChanderG/numpy,leifdenby/numpy,simongibbons/numpy,musically-ut/numpy,MSeifert04/numpy,gmcastil/numpy,charris/numpy,sonnyhu/numpy,tdsmith/numpy,naritta/numpy,mortada/numpy,numpy/numpy,numpy/numpy-refactor,rmcgibbo/numpy,astrofrog/numpy,jankoslavic/numpy,moreati/numpy,groutr/numpy,rgommers/numpy,ekalosak/numpy,sonnyhu/numpy,embray/numpy,dato-code/numpy,pdebuyl/numpy,bertrand-l/numpy,ESSS/numpy,cowlicks/numpy,drasmuss/numpy,behzadnouri/numpy,skwbc/numpy,rgommers/numpy,ChanderG/numpy,dwf/numpy,Anwesh43/numpy,seberg/numpy,chatcannon/numpy,jorisvandenbossche/numpy,KaelChen/numpy,dwillmer/numpy,bringingheavendown/numpy,pizzathief/numpy,nbeaver/numpy,dwf/numpy,jakirkham/numpy,ChristopherHogan/numpy,tdsmith/numpy,nbeaver/numpy,njase/numpy,kiwifb/numpy,ahaldane/numpy,empeeu/numpy,bmorris3/numpy,tdsmith/numpy,MaPePeR/numpy,bmorris3/numpy,brandon-rhodes/numpy,dimasad/numpy,felipebetancur/numpy,WillieMaddox/numpy,SunghanKim/numpy,simongibbons/numpy,yiakwy/numpy,rajathkumarmp/numpy,Dapid/numpy,groutr/numpy,hainm/numpy,pizzathief/numpy,githubmlai/numpy,stuarteberg/numpy,charris/numpy,SunghanKim/numpy,mortada/numpy,mindw/numpy,Eric89GXL/numpy,abalkin/numpy,endolith/numpy,rudimeier/numpy,anntzer/numpy,sinhrks/numpy,drasmuss/numpy,mhvk/numpy,stefanv/numpy,WarrenWeckesser/numpy,skymanaditya1/numpy,pdebuyl/numpy,matthew-brett/numpy,anntzer/numpy,dch312/numpy,Yusa95/numpy,naritta/numpy,jorisvandenbossche/numpy,sigma-random/numpy,ChanderG/numpy,BabeNovelty/numpy,KaelChen/numpy,b-carter/numpy,skymanaditya1/numpy,grlee77/numpy,pdebuyl/numpy,mattip/numpy,felipebetancur/numpy,andsor/numpy,yiakwy/numpy,WarrenWeckesser/numpy,sigma-random/numpy,numpy/numpy,tynn/numpy,Yusa95/numpy,leifdenby/numpy,argriffing/numpy,felipebetancur/numpy,CMartelLML/numpy,dimasad/numpy,jankoslavic/numpy,stefanv/numpy,chatcannon/numpy,Srisai85/numpy,MSeifert04/numpy,immerrr/numpy,charris/numpy,kirillzhuravlev/numpy,bmorris3/numpy,dimasad/numpy,rajathkumarmp/numpy,mathdd/numpy,trankmichael/numpy,jorisvandenbossche/numpy,Linkid/numpy,cowlicks/numpy,has2k1/numpy,ContinuumIO/numpy,dato-code/numpy,musically-ut/numpy,joferkington/numpy,stefanv/numpy,ewmoore/numpy,larsmans/numpy,rhythmsosad/numpy,maniteja123/numpy,Eric89GXL/numpy,mingwpy/numpy,rgommers/numpy,numpy/numpy-refactor,bringingheavendown/numpy,chiffa/numpy,mwiebe/numpy,utke1/numpy,rherault-insa/numpy,musically-ut/numpy,mathdd/numpy,NextThought/pypy-numpy,jakirkham/numpy,mhvk/numpy,ssanderson/numpy,MaPePeR/numpy,mindw/numpy,maniteja123/numpy,tacaswell/numpy,Linkid/numpy,jakirkham/numpy,trankmichael/numpy,ahaldane/numpy,njase/numpy,Yusa95/numpy,cjermain/numpy,ewmoore/numpy,BabeNovelty/numpy,tacaswell/numpy,mwiebe/numpy,GaZ3ll3/numpy,rgommers/numpy,anntzer/numpy,abalkin/numpy,WarrenWeckesser/numpy,empeeu/numpy,shoyer/numpy,trankmichael/numpy,has2k1/numpy,andsor/numpy,ddasilva/numpy,nguyentu1602/numpy,BabeNovelty/numpy,ekalosak/numpy,matthew-brett/numpy,ViralLeadership/numpy,yiakwy/numpy,ChanderG/numpy,dimasad/numpy,pbrod/numpy,bringingheavendown/numpy,mortada/numpy,madphysicist/numpy,astrofrog/numpy,embray/numpy,mathdd/numpy,madphysicist/numpy,b-carter/numpy,sinhrks/numpy,jschueller/numpy,MSeifert04/numpy,cjermain/numpy,seberg/numpy,embray/numpy,githubmlai/numpy,pelson/numpy,larsmans/numpy,pyparallel/numpy,GrimDerp/numpy,grlee77/numpy,stuarteberg/numpy,ewmoore/numpy,ahaldane/numpy,immerrr/numpy,madphysicist/numpy,felipebetancur/numpy,behzadnouri/numpy,anntzer/numpy,Eric89GXL/numpy,simongibbons/numpy,ogrisel/numpy,leifdenby/numpy,pizzathief/numpy,ewmoore/numpy,dwillmer/numpy,behzadnouri/numpy,pizzathief/numpy,GrimDerp/numpy,joferkington/numpy,nguyentu1602/numpy,AustereCuriosity/numpy,tdsmith/numpy,MichaelAquilina/numpy,pelson/numpy,empeeu/numpy,numpy/numpy-refactor,ahaldane/numpy,ddasilva/numpy,tynn/numpy,gfyoung/numpy,ahaldane/numpy,gmcastil/numpy,ajdawson/numpy,musically-ut/numpy,brandon-rhodes/numpy,sigma-random/numpy,joferkington/numpy,ogrisel/numpy,BMJHayward/numpy,pyparallel/numpy,MSeifert04/numpy,SiccarPoint/numpy,AustereCuriosity/numpy,larsmans/numpy,njase/numpy,solarjoe/numpy,Anwesh43/numpy,Dapid/numpy,CMartelLML/numpy,rudimeier/numpy,utke1/numpy,NextThought/pypy-numpy,solarjoe/numpy,ogrisel/numpy,ChristopherHogan/numpy,KaelChen/numpy,dch312/numpy,groutr/numpy,Anwesh43/numpy,mathdd/numpy,MaPePeR/numpy,stefanv/numpy,githubmlai/numpy,Srisai85/numpy,GaZ3ll3/numpy,Linkid/numpy,rhythmsosad/numpy,argriffing/numpy,cjermain/numpy,sonnyhu/numpy,mattip/numpy,cowlicks/numpy,grlee77/numpy,rajathkumarmp/numpy,astrofrog/numpy,ekalosak/numpy,Srisai85/numpy,skwbc/numpy,NextThought/pypy-numpy,Yusa95/numpy,jakirkham/numpy,KaelChen/numpy,ContinuumIO/numpy,mhvk/numpy,madphysicist/numpy,jonathanunderwood/numpy,shoyer/numpy,tacaswell/numpy,bertrand-l/numpy,ESSS/numpy,Linkid/numpy,pdebuyl/numpy,chatcannon/numpy,rajathkumarmp/numpy,utke1/numpy,MaPePeR/numpy,ekalosak/numpy,hainm/numpy,dato-code/numpy,rudimeier/numpy,SiccarPoint/numpy,mindw/numpy,moreati/numpy,nguyentu1602/numpy,embray/numpy,rherault-insa/numpy,Eric89GXL/numpy,SiccarPoint/numpy,jschueller/numpy,GrimDerp/numpy,stuarteberg/numpy,chiffa/numpy,pelson/numpy,hainm/numpy,skwbc/numpy,WillieMaddox/numpy,GaZ3ll3/numpy,ogrisel/numpy,drasmuss/numpy,trankmichael/numpy,kiwifb/numpy,GrimDerp/numpy,naritta/numpy,ajdawson/numpy,pyparallel/numpy | ---
+++
@@ -1,4 +1,4 @@
-version='0.9.7'
+version='0.9.9'
import os
svn_version_file = os.path.join(os.path.dirname(__file__), |
3341ba14250f086ab11aa55d8d72de14fe95aceb | tests/test_db.py | tests/test_db.py | # -*- coding: utf-8 -*-
import os
import unittest
from flask import Flask
from coaster.utils import buid
from coaster.sqlalchemy import BaseMixin
from nodular.db import db
class User(BaseMixin, db.Model):
__tablename__ = 'user'
userid = db.Column(db.Unicode(22), nullable=False, default=buid, unique=True)
username = db.Column(db.Unicode(250), nullable=True)
app = Flask(__name__, instance_relative_config=True)
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get(
'SQLALCHEMY_DATABASE_URI', 'postgresql://postgres@localhost/myapp_test')
app.config['SQLALCHEMY_ECHO'] = False
db.init_app(app)
db.app = app
class TestDatabaseFixture(unittest.TestCase):
def setUp(self):
self.app = app
db.create_all()
self.user1 = User(username=u'user1')
db.session.add(self.user1)
app.testing = True
def tearDown(self):
db.session.rollback()
db.drop_all()
db.session.remove()
| # -*- coding: utf-8 -*-
import os
import unittest
from flask import Flask
from coaster.utils import buid
from coaster.sqlalchemy import BaseMixin
from nodular.db import db
class User(BaseMixin, db.Model):
__tablename__ = 'user'
userid = db.Column(db.Unicode(22), nullable=False, default=buid, unique=True)
username = db.Column(db.Unicode(250), nullable=True)
app = Flask(__name__, instance_relative_config=True)
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get(
'SQLALCHEMY_DATABASE_URI', 'postgresql://postgres@localhost/myapp_test')
app.config['SQLALCHEMY_ECHO'] = False
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
db.app = app
class TestDatabaseFixture(unittest.TestCase):
def setUp(self):
self.app = app
db.create_all()
self.user1 = User(username=u'user1')
db.session.add(self.user1)
app.testing = True
def tearDown(self):
db.session.rollback()
db.drop_all()
db.session.remove()
| Disable warning for unused feature | Disable warning for unused feature
| Python | bsd-2-clause | hasgeek/nodular,hasgeek/nodular | ---
+++
@@ -18,6 +18,7 @@
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get(
'SQLALCHEMY_DATABASE_URI', 'postgresql://postgres@localhost/myapp_test')
app.config['SQLALCHEMY_ECHO'] = False
+app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
db.app = app
|
63f7ba49a3577a35de59490185ae9c06c20e1cc8 | ticketing/app.py | ticketing/app.py | from django.conf.urls.defaults import patterns, url
from oscar.core.application import Application
from ticketing import views
class TicketingApplication(Application):
name = 'ticketing'
ticket_list_view = views.TicketListView
ticket_create_view = views.TicketCreateView
ticket_update_view = views.TicketUpdateView
def get_urls(self):
urlpatterns = super(TicketingApplication, self).get_urls()
urlpatterns += patterns('',
url(
r'accounts/support/$',
self.ticket_list_view.as_view(),
name='customer-ticket-list'
),
url(
r'accounts/support/ticket/create/$',
self.ticket_create_view.as_view(),
name='customer-ticket-create'
),
url(
r'accounts/support/ticket/(?P<pk>\d+)/update/$',
self.ticket_update_view.as_view(),
name='customer-ticket-update'
),
)
return self.post_process_urls(urlpatterns)
application = TicketingApplication()
| from django.conf.urls.defaults import patterns, url
from django.contrib.auth.decorators import login_required
from oscar.core.application import Application
from ticketing import views
class TicketingApplication(Application):
name = 'ticketing'
ticket_list_view = views.TicketListView
ticket_create_view = views.TicketCreateView
ticket_update_view = views.TicketUpdateView
def get_urls(self):
urlpatterns = super(TicketingApplication, self).get_urls()
urlpatterns += patterns('',
url(
r'accounts/support/$',
self.ticket_list_view.as_view(),
name='customer-ticket-list'
),
url(
r'accounts/support/ticket/create/$',
self.ticket_create_view.as_view(),
name='customer-ticket-create'
),
url(
r'accounts/support/ticket/(?P<pk>\d+)/update/$',
self.ticket_update_view.as_view(),
name='customer-ticket-update'
),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return login_required
application = TicketingApplication()
| Fix login permissions for ticketing | Fix login permissions for ticketing
| Python | bsd-3-clause | snowball-one/django-oscar-support,snowball-one/django-oscar-support,snowball-one/django-oscar-support | ---
+++
@@ -1,4 +1,5 @@
from django.conf.urls.defaults import patterns, url
+from django.contrib.auth.decorators import login_required
from oscar.core.application import Application
@@ -34,5 +35,8 @@
)
return self.post_process_urls(urlpatterns)
+ def get_url_decorator(self, url_name):
+ return login_required
+
application = TicketingApplication() |
e6a1e9670e857119c7e6c9250849ee4edd026bad | tests/settings_base.py | tests/settings_base.py | import os
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'tests.app',
]
STATIC_URL = '/static/'
SECRET_KEY = 'foobar'
SITE_ID = 1234 # Needed for 1.3 compatibility
# Used to construct unique test database names to allow detox to run multiple
# versions at the same time
uid = os.getenv('UID', '')
if uid:
db_suffix = '_%s' % uid
else:
db_suffix = ''
| import os
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'tests.app',
]
STATIC_URL = '/static/'
SECRET_KEY = 'foobar'
SITE_ID = 1234 # Needed for 1.3 compatibility
# Used to construct unique test database names to allow detox to run multiple
# versions at the same time
uid = os.getenv('UID', '')
if uid:
db_suffix = '_%s' % uid
else:
db_suffix = ''
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
| Make sure AuthenticationMiddleware is defined in settings during tests. | Make sure AuthenticationMiddleware is defined in settings
during tests.
| Python | bsd-3-clause | ktosiek/pytest-django,RonnyPfannschmidt/pytest_django,pombredanne/pytest_django,hoh/pytest-django,davidszotten/pytest-django,ojake/pytest-django,bforchhammer/pytest-django,reincubate/pytest-django,pelme/pytest-django,thedrow/pytest-django,aptivate/pytest-django,tomviner/pytest-django,felixonmars/pytest-django | ---
+++
@@ -22,3 +22,12 @@
db_suffix = '_%s' % uid
else:
db_suffix = ''
+
+MIDDLEWARE_CLASSES = (
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.common.CommonMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ 'django.middleware.clickjacking.XFrameOptionsMiddleware',
+) |
0f51a231b3b11e89a82a429be0adb6422fd01d8b | tests/test_Sanitize.py | tests/test_Sanitize.py | import unittest
from s2f.Sanitize import (
sanitize_prefix,
sanitize_for_latex
)
class TestSanitize(unittest.TestCase):
""" Test case for the Sanitize module """
def testSanitizePrefix(self):
""" The function sanitize_prefix should only allow for lower case ASCII
letters, digits and the hyphen. Upper case letters are supposed to be
converted to lower case, everything else is supposed to be omitted.
"""
self.assertEqual(sanitize_prefix('_. Testü@# - .5$ç§÷≠0π00'),
'test-5000')
def testSanitizeForLatex(self):
""" LaTeX special characters are supposed to be escaped. """
self.assertEqual(sanitize_for_latex('I am 100% certain!'),
r'I am 100\% certain!')
self.assertEqual(sanitize_for_latex('Toto & Harry'),
r'Toto \& Harry')
self.assertEqual(sanitize_for_latex('~50%'), r'\~50\%')
self.assertEqual(sanitize_for_latex('%_&~'), r'\%\_\&\~')
| #coding=utf-8
import unittest
from s2f.Sanitize import (
sanitize_prefix,
sanitize_for_latex
)
class TestSanitize(unittest.TestCase):
""" Test case for the Sanitize module """
def testSanitizePrefix(self):
""" The function sanitize_prefix should only allow for lower case ASCII
letters, digits and the hyphen. Upper case letters are supposed to be
converted to lower case, everything else is supposed to be omitted.
"""
self.assertEqual(sanitize_prefix('_. Testü@# - .5$ç§÷≠0π00'),
'test-5000')
def testSanitizeForLatex(self):
""" LaTeX special characters are supposed to be escaped. """
self.assertEqual(sanitize_for_latex('I am 100% certain!'),
r'I am 100\% certain!')
self.assertEqual(sanitize_for_latex('Toto & Harry'),
r'Toto \& Harry')
self.assertEqual(sanitize_for_latex('~50%'), r'\~50\%')
self.assertEqual(sanitize_for_latex('%_&~'), r'\%\_\&\~')
| Add UTF8 compatibility switch for py2. | Add UTF8 compatibility switch for py2.
| Python | mit | kdungs/lhcb-hltflow | ---
+++
@@ -1,3 +1,5 @@
+#coding=utf-8
+
import unittest
from s2f.Sanitize import ( |
64750014a91669b6067459a14743a6c5e6257856 | umap/__init__.py | umap/__init__.py | from .umap_ import UMAP
# Workaround: https://github.com/numba/numba/issues/3341
import numba
import pkg_resources
__version__ = pkg_resources.get_distribution("umap-learn").version
| from .umap_ import UMAP
# Workaround: https://github.com/numba/numba/issues/3341
import numba
import pkg_resources
try:
__version__ = pkg_resources.get_distribution("umap-learn").version
except pkg_resources.DistributionNotFound:
__version__ = '0.4-dev'
| Patch init import to allow for local dev of UMAP code | Patch init import to allow for local dev of UMAP code
| Python | bsd-3-clause | lmcinnes/umap,lmcinnes/umap | ---
+++
@@ -5,4 +5,7 @@
import pkg_resources
-__version__ = pkg_resources.get_distribution("umap-learn").version
+try:
+ __version__ = pkg_resources.get_distribution("umap-learn").version
+except pkg_resources.DistributionNotFound:
+ __version__ = '0.4-dev' |
c4c9115e7f67a6cc80b283cb0c2cc0e378e08ab8 | tests/test_rainflow.py | tests/test_rainflow.py | import unittest, rainflow
class TestRainflowCounting(unittest.TestCase):
series = [0, -2, 1, -3, 5, -1, 3, -4, 4, -2, 0]
cycles = [(3, 0.5), (4, 1.5), (6, 0.5), (8, 1.0), (9, 0.5)]
def test_rainflow_counting(self):
self.assertItemsEqual(rainflow.count_cycles(self.series), self.cycles)
| import unittest, rainflow, random, itertools
class TestRainflowCounting(unittest.TestCase):
# Load series and corresponding cycle counts from ASTM E1049-85
series = [0, -2, 1, -3, 5, -1, 3, -4, 4, -2, 0]
cycles = [(3, 0.5), (4, 1.5), (6, 0.5), (8, 1.0), (9, 0.5)]
def test_rainflow_counting(self):
self.assertItemsEqual(rainflow.count_cycles(self.series), self.cycles)
def test_rainflow_ndigits(self):
series = [x + 0.01 * random.random() for x in self.series]
self.assertNotEqual(rainflow.count_cycles(series), self.cycles)
self.assertEqual(rainflow.count_cycles(series, ndigits=1), self.cycles)
def test_series_with_zero_derivatives(self):
series = itertools.chain(*([x, x] for x in self.series))
self.assertEqual(rainflow.count_cycles(series), self.cycles)
| Add additional tests for count_cycles | Add additional tests for count_cycles
| Python | mit | iamlikeme/rainflow | ---
+++
@@ -1,9 +1,19 @@
-import unittest, rainflow
+import unittest, rainflow, random, itertools
class TestRainflowCounting(unittest.TestCase):
+ # Load series and corresponding cycle counts from ASTM E1049-85
series = [0, -2, 1, -3, 5, -1, 3, -4, 4, -2, 0]
cycles = [(3, 0.5), (4, 1.5), (6, 0.5), (8, 1.0), (9, 0.5)]
def test_rainflow_counting(self):
self.assertItemsEqual(rainflow.count_cycles(self.series), self.cycles)
+
+ def test_rainflow_ndigits(self):
+ series = [x + 0.01 * random.random() for x in self.series]
+ self.assertNotEqual(rainflow.count_cycles(series), self.cycles)
+ self.assertEqual(rainflow.count_cycles(series, ndigits=1), self.cycles)
+
+ def test_series_with_zero_derivatives(self):
+ series = itertools.chain(*([x, x] for x in self.series))
+ self.assertEqual(rainflow.count_cycles(series), self.cycles) |
757c2552c360be67ee5841c12c0da005afe631b0 | tests/test_settings.py | tests/test_settings.py | DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
APPEND_SLASH = False
DATA_UPLOAD_MAX_MEMORY_SIZE = None
| DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
APPEND_SLASH = False
| Remove test setting that’s no longer necessary | Remove test setting that’s no longer necessary
| Python | mit | thomasw/djproxy | ---
+++
@@ -22,4 +22,3 @@
STATIC_URL = '/'
APPEND_SLASH = False
-DATA_UPLOAD_MAX_MEMORY_SIZE = None |
8bebd6f9c785333f670167e477f96944d1f45534 | matrix/matrix_test.py | matrix/matrix_test.py | import unittest
from matrix import Matrix
class MatrixTest(unittest.TestCase):
def test_extract_a_row(self):
matrix = Matrix("1 2\n10 20")
self.assertEqual([1, 2], matrix.rows[0])
def test_extract_same_row_again(self):
matrix = Matrix("9 7\n8 6")
self.assertEqual([9, 7], matrix.rows[0])
def test_extract_other_row(self):
matrix = Matrix("9 8 7\n19 18 17")
self.assertEqual([19, 18, 17], matrix.rows[1])
def test_extract_other_row_again(self):
matrix = Matrix("1 4 9\n16 25 36")
self.assertEqual([16, 25, 36], matrix.rows[1])
def test_extract_a_column(self):
matrix = Matrix("1 2 3\n4 5 6\n7 8 9\n 8 7 6")
self.assertEqual([1, 4, 7, 8], matrix.columns[0])
def test_extract_another_column(self):
matrix = Matrix("89 1903 3\n18 3 1\n9 4 800")
self.assertEqual([1903, 3, 4], matrix.columns[1])
if __name__ == '__main__':
unittest.main()
| import unittest
from matrix import Matrix
class MatrixTest(unittest.TestCase):
def test_extract_a_row(self):
matrix = Matrix("1 2\n10 20")
self.assertEqual([1, 2], matrix.rows[0])
def test_extract_same_row_again(self):
matrix = Matrix("9 7\n8 6")
self.assertEqual([9, 7], matrix.rows[0])
def test_extract_other_row(self):
matrix = Matrix("9 8 7\n19 18 17")
self.assertEqual([19, 18, 17], matrix.rows[1])
def test_extract_other_row_again(self):
matrix = Matrix("1 4 9\n16 25 36")
self.assertEqual([16, 25, 36], matrix.rows[1])
def test_extract_a_column(self):
matrix = Matrix("1 2 3\n4 5 6\n7 8 9\n8 7 6")
self.assertEqual([1, 4, 7, 8], matrix.columns[0])
def test_extract_another_column(self):
matrix = Matrix("89 1903 3\n18 3 1\n9 4 800")
self.assertEqual([1903, 3, 4], matrix.columns[1])
if __name__ == '__main__':
unittest.main()
| Fix whitespace issue in matrix assertion | Fix whitespace issue in matrix assertion
| Python | mit | mweb/python,jmluy/xpython,ZacharyRSmith/xpython,outkaj/xpython,pheanex/xpython,Peque/xpython,behrtam/xpython,exercism/python,exercism/xpython,N-Parsons/exercism-python,behrtam/xpython,smalley/python,mweb/python,pombredanne/xpython,de2Zotjes/xpython,orozcoadrian/xpython,wobh/xpython,exercism/xpython,oalbe/xpython,rootulp/xpython,exercism/python,smalley/python,oalbe/xpython,jmluy/xpython,orozcoadrian/xpython,N-Parsons/exercism-python,ZacharyRSmith/xpython,pombredanne/xpython,pheanex/xpython,rootulp/xpython,Peque/xpython,de2Zotjes/xpython,wobh/xpython,outkaj/xpython | ---
+++
@@ -21,7 +21,7 @@
self.assertEqual([16, 25, 36], matrix.rows[1])
def test_extract_a_column(self):
- matrix = Matrix("1 2 3\n4 5 6\n7 8 9\n 8 7 6")
+ matrix = Matrix("1 2 3\n4 5 6\n7 8 9\n8 7 6")
self.assertEqual([1, 4, 7, 8], matrix.columns[0])
def test_extract_another_column(self): |
4f3646e07d592d5da214977732298b680b8fdee7 | zou/app/blueprints/crud/task_status.py | zou/app/blueprints/crud/task_status.py | from zou.app.models.task_status import TaskStatus
from zou.app.services import tasks_service
from .base import BaseModelResource, BaseModelsResource
class TaskStatusesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, TaskStatus)
def check_read_permissions(self):
return True
def post_creation(self, instance):
tasks_service.clear_task_status_cache(str(instance.id))
return instance.serialize()
class TaskStatusResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, TaskStatus)
def check_read_permissions(self, instance):
return True
def post_update(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
def post_delete(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
| from zou.app.models.task_status import TaskStatus
from zou.app.services import tasks_service
from .base import BaseModelResource, BaseModelsResource
class TaskStatusesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, TaskStatus)
def check_read_permissions(self):
return True
def post_creation(self, instance):
tasks_service.clear_task_status_cache(str(instance.id))
return instance.serialize()
class TaskStatusResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, TaskStatus)
def check_read_permissions(self, instance):
return True
def pre_update(self, instance_dict, data):
if data.get("is_default", False):
status = TaskStatus.get_by(is_default=True)
status.update({"is_default": None})
return instance_dict
def post_update(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
def post_delete(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
| Allow to modify the is_default flag | [tasks] Allow to modify the is_default flag
| Python | agpl-3.0 | cgwire/zou | ---
+++
@@ -22,6 +22,12 @@
def check_read_permissions(self, instance):
return True
+ def pre_update(self, instance_dict, data):
+ if data.get("is_default", False):
+ status = TaskStatus.get_by(is_default=True)
+ status.update({"is_default": None})
+ return instance_dict
+
def post_update(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict |
3dc92d34f80762899959b2721bfcfcac78bcb069 | modloader/__init__.py | modloader/__init__.py | import renpy
import os
import sys
import modinfo
import importlib
print('AWSW Mod Loader Init')
def get_mod_path():
"""Get the mod path
Returns:
The full path to the mods folder
"""
#TODO: Use a path combining function
return renpy.config.gamedir + "/mods/"
# By setting the import path to the mod folder, we can do something like `import mod`
# NOTE: To import files in the modloader/ folder, do `from modloader import ...`
# If we add the modloader to the path, the modlist would get reloaded again
sys.path.append(get_mod_path())
for mod in os.listdir(get_mod_path()):
# Some mods require resources to be recognized by renpy. If a folder exists, force renpy to load it
resource_dir = get_mod_path() + mod + "/resource"
if os.path.isdir(resource_dir):
renpy.config.searchpath.append(resource_dir)
# Try importing the mod. If all goes well, the mod is imported through the Mod class
print("Begin mod load: {}".format(mod))
try:
importlib.import_module(mod)
except Exception, e:
print("Exception while loading: {}".format(mod))
print(e)
raise e # Raise it again even though the stacktrace provides nothing useful
# After all mods are loaded, call their respective mod_complete functions
for mod_name, mod in modinfo.get_mods().iteritems():
print("Completing mod {}".format(mod_name))
mod.mod_complete()
# force renpy to reindex all game files
renpy.loader.old_config_archives = None
| import renpy
import os
import sys
import modinfo
import importlib
print('AWSW Mod Loader Init')
def get_mod_path():
"""Get the mod path
Returns:
The full path to the mods folder
"""
#TODO: Use a path combining function
return renpy.config.gamedir + "/mods/"
# By setting the import path to the mod folder, we can do something like `import mod`
# NOTE: To import files in the modloader/ folder, do `from modloader import ...`
# If we add the modloader to the path, the modlist would get reloaded again
sys.path.append(get_mod_path())
for mod in os.listdir(get_mod_path()):
# Some mods require resources to be recognized by renpy. If a folder exists, force renpy to load it
resource_dir = get_mod_path() + mod + "/resource"
if os.path.isdir(resource_dir):
renpy.config.searchpath.append(resource_dir)
# Try importing the mod. If all goes well, the mod is imported through the Mod class
print("Begin mod load: {}".format(mod))
#try:
importlib.import_module(mod)
# After all mods are loaded, call their respective mod_complete functions
for mod_name, mod in modinfo.get_mods().iteritems():
print("Completing mod {}".format(mod_name))
mod.mod_complete()
# force renpy to reindex all game files
renpy.loader.old_config_archives = None
| Stop catching import errors to improve stack trace readability | Stop catching import errors to improve stack trace readability
| Python | mit | AWSW-Modding/AWSW-Modtools | ---
+++
@@ -28,12 +28,8 @@
# Try importing the mod. If all goes well, the mod is imported through the Mod class
print("Begin mod load: {}".format(mod))
- try:
- importlib.import_module(mod)
- except Exception, e:
- print("Exception while loading: {}".format(mod))
- print(e)
- raise e # Raise it again even though the stacktrace provides nothing useful
+ #try:
+ importlib.import_module(mod)
# After all mods are loaded, call their respective mod_complete functions
for mod_name, mod in modinfo.get_mods().iteritems(): |
01f2e41608e83fb4308c44c30ac9bb4fc6d49c86 | server/kcaa/manipulators/automission.py | server/kcaa/manipulators/automission.py | #!/usr/bin/env python
import logging
import time
import base
from kcaa import screens
logger = logging.getLogger('kcaa.manipulators.automission')
class CheckMissionResult(base.Manipulator):
def run(self):
logger.info('Checking mission result')
yield self.screen.check_mission_result()
class AutoCheckMissionResult(base.AutoManipulator):
@classmethod
def can_trigger(cls, owner):
if not screens.in_category(owner.screen_id, screens.PORT):
return
mission_list = owner.objects.get('MissionList')
if not mission_list:
return
now = int(1000 * time.time())
count = 0
for mission in mission_list.missions:
# Make sure the ETA has passed 10000 milliseconds ago.
if mission.eta and mission.eta + 10000 < now:
count += 1
if count != 0:
return {'count': count}
def run(self, count):
for _ in xrange(count):
yield self.do_manipulator(CheckMissionResult)
| #!/usr/bin/env python
import logging
import time
import base
from kcaa import screens
logger = logging.getLogger('kcaa.manipulators.automission')
class CheckMissionResult(base.Manipulator):
def run(self):
logger.info('Checking mission result')
yield self.screen.check_mission_result()
class AutoCheckMissionResult(base.AutoManipulator):
@classmethod
def can_trigger(cls, owner):
if not screens.in_category(owner.screen_id, screens.PORT):
return
mission_list = owner.objects.get('MissionList')
if not mission_list:
return
now = int(1000 * time.time())
count = 0
for mission in mission_list.missions:
if mission.eta and mission.eta < now:
count += 1
if count != 0:
return {'count': count}
def run(self, count):
for _ in xrange(count):
yield self.do_manipulator(CheckMissionResult)
| Stop confirming ETA is 10 seconds ago, as it's possible that AutoFleetCharge interrupt within that duration. | Stop confirming ETA is 10 seconds ago, as it's possible that AutoFleetCharge
interrupt within that duration.
| Python | apache-2.0 | kcaa/kcaa,kcaa/kcaa,kcaa/kcaa,kcaa/kcaa | ---
+++
@@ -29,8 +29,7 @@
now = int(1000 * time.time())
count = 0
for mission in mission_list.missions:
- # Make sure the ETA has passed 10000 milliseconds ago.
- if mission.eta and mission.eta + 10000 < now:
+ if mission.eta and mission.eta < now:
count += 1
if count != 0:
return {'count': count} |
a0114088c9b9ec1bf52ef2c9cfb46743754fe4e1 | vagrant/roles/db/molecule/default/tests/test_default.py | vagrant/roles/db/molecule/default/tests/test_default.py | import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
# check if MongoDB is listen concrete host and port
def test_mongo_host_port(host):
socket = host.socket("tcp://127.0.0.1:27017")
assert socket.is_listening
# check if MongoDB is enabled and running
def test_mongo_running_and_enabled(host):
mongo = host.service("mongod")
assert mongo.is_running
assert mongo.is_enabled
# check if configuration file contains the required line
def test_config_file(File):
config_file = File('/etc/mongod.conf')
assert config_file.contains('bindIp: 0.0.0.0')
assert config_file.is_file
| import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
# check if MongoDB is listen concrete host and port
def test_mongo_host_port(host):
socket = host.socket("tcp://127.0.0.1:27017")
assert socket.is_listening
# check if MongoDB is enabled and running
def test_mongo_running_and_enabled(host):
mongo = host.service("mongod")
assert mongo.is_running
assert mongo.is_enabled
# check if configuration file contains the required line
def test_config_file(File):
config_file = File('/etc/mongod.conf')
assert config_file.is_file
assert config_file.contains('bindIp: 0.0.0.0')
assert config_file.contains('port: 27017')
| Edit test method 'test_config_file' to check configuration network interface | Edit test method 'test_config_file' to check configuration network interface
| Python | mit | DmitriySh/infra,DmitriySh/infra,DmitriySh/infra | ---
+++
@@ -19,6 +19,6 @@
# check if configuration file contains the required line
def test_config_file(File):
config_file = File('/etc/mongod.conf')
+ assert config_file.is_file
assert config_file.contains('bindIp: 0.0.0.0')
- assert config_file.is_file
-
+ assert config_file.contains('port: 27017') |
bc1e8baeb2698462dae5556e033c37a505cfcb2c | altair/examples/bar_chart_with_labels.py | altair/examples/bar_chart_with_labels.py | """
Simple Bar Chart with Labels
============================
This example shows a basic horizontal bar chart with labels created with Altair.
"""
# category: bar charts
import altair as alt
import pandas as pd
source = pd.DataFrame({
'a': ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I'],
'b': [28, 55, 43, 91, 81, 53, 19, 87, 52]
})
bars = alt.Chart(source).mark_bar().encode(
y='a',
x='b'
)
text = bars.mark_text(
align='left',
baseline='middle',
dx=3
).encode(
text='b'
)
bars + text
| """
Bar Chart with Labels
=====================
This example shows a basic horizontal bar chart with labels created with Altair.
"""
# category: bar charts
import altair as alt
from vega_datasets import data
source = data.wheat.url
bars = alt.Chart(source).mark_bar().encode(
x='wheat:Q',
y="year:O"
)
text = bars.mark_text(
align='left',
baseline='middle',
dx=3 # Nudges text to right so it doesn't appear on top of the bar
).encode(
text='wheat:Q'
)
(bars + text).properties(height=900)
| Standardize bar chat with labels example to use source consistent with other examples | DOC: Standardize bar chat with labels example to use source consistent with other examples | Python | bsd-3-clause | altair-viz/altair,jakevdp/altair | ---
+++
@@ -1,28 +1,25 @@
"""
-Simple Bar Chart with Labels
-============================
+Bar Chart with Labels
+=====================
This example shows a basic horizontal bar chart with labels created with Altair.
"""
# category: bar charts
import altair as alt
-import pandas as pd
+from vega_datasets import data
-source = pd.DataFrame({
- 'a': ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I'],
- 'b': [28, 55, 43, 91, 81, 53, 19, 87, 52]
-})
+source = data.wheat.url
bars = alt.Chart(source).mark_bar().encode(
- y='a',
- x='b'
+ x='wheat:Q',
+ y="year:O"
)
text = bars.mark_text(
align='left',
baseline='middle',
- dx=3
+ dx=3 # Nudges text to right so it doesn't appear on top of the bar
).encode(
- text='b'
+ text='wheat:Q'
)
-bars + text
+(bars + text).properties(height=900) |
0fbd183a95c65eb80bb813368eeb045e9c43b630 | ray/util.py | ray/util.py | import json
import itertools as it
all_sizes = [0.7, 1.0, 1.6, 3.5, 5.0]
all_types = ['Color', 'Texture', 'Edge', 'Orientation']
full_feature_set = list(it.product(all_types, all_sizes))
default_feature_set = list(it.product(all_types[:-1], all_sizes[1:-1]))
def write_segmentation_pipeline_json(jsonfn, ilfn, ilbfn, outdir='.'):
d = {}
d['images'] = [{'name': ilbfn}]
d['session'] = ilfn
d['output_dir'] = outdir
d['features'] = default_feature_set
with open(jsonfn, 'w') as f:
json.dump(d, f)
| import json
import itertools as it
all_sizes = [0.7, 1.0, 1.6, 3.5, 5.0]
all_types = ['Color', 'Texture', 'Edge', 'Orientation']
full_feature_set = list(it.product(all_types, all_sizes))
default_feature_set = list(it.product(all_types[:-1], all_sizes[1:-1]))
def write_segmentation_pipeline_json(jsonfn, ilfn, ilbfns, outdir='.'):
if isinstance(ilbfns, str) or isinstance(ilbfns, unicode):
ilbfns = [ilbfns]
d = {}
d['images'] = [{'name': ilbfn} for ilbfn in ilbfns]
d['session'] = ilfn
d['output_dir'] = outdir
d['features'] = default_feature_set
with open(jsonfn, 'w') as f:
json.dump(d, f)
| Allow multiple batch files for seg pipeline json | Allow multiple batch files for seg pipeline json
| Python | bsd-3-clause | janelia-flyem/gala,jni/gala,jni/ray | ---
+++
@@ -7,9 +7,11 @@
full_feature_set = list(it.product(all_types, all_sizes))
default_feature_set = list(it.product(all_types[:-1], all_sizes[1:-1]))
-def write_segmentation_pipeline_json(jsonfn, ilfn, ilbfn, outdir='.'):
+def write_segmentation_pipeline_json(jsonfn, ilfn, ilbfns, outdir='.'):
+ if isinstance(ilbfns, str) or isinstance(ilbfns, unicode):
+ ilbfns = [ilbfns]
d = {}
- d['images'] = [{'name': ilbfn}]
+ d['images'] = [{'name': ilbfn} for ilbfn in ilbfns]
d['session'] = ilfn
d['output_dir'] = outdir
d['features'] = default_feature_set |
7b9b144ce8e7fca38500f5f0c4e2f5ec3b5d9e0f | tests/px_rambar_test.py | tests/px_rambar_test.py | import os
import sys
from px import px_rambar
from px import px_terminal
def test_render_bar_happy_path():
names_and_numbers = [(u"apa", 1000), (u"bepa", 300), (u"cepa", 50)] + [
(u"long tail", 1)
] * 300
assert px_rambar.render_bar(10, names_and_numbers) == (
px_terminal.red(u" apa ")
+ px_terminal.yellow(u" b")
+ px_terminal.blue(u" ")
+ px_terminal.inverse_video(u" ")
)
| # coding=utf-8
import os
import sys
from px import px_rambar
from px import px_terminal
def test_render_bar_happy_path():
names_and_numbers = [(u"apa", 1000), (u"bepa", 300), (u"cepa", 50)] + [
(u"long tail", 1)
] * 300
assert px_rambar.render_bar(10, names_and_numbers) == (
px_terminal.red(u" apa ")
+ px_terminal.yellow(u" b")
+ px_terminal.blue(u" ")
+ px_terminal.inverse_video(u" ")
)
def test_render_bar_happy_path_unicode():
names_and_numbers = [(u"åpa", 1000), (u"bäpa", 300), (u"cäpa", 50)] + [
(u"lång svans", 1)
] * 300
assert px_rambar.render_bar(10, names_and_numbers) == (
px_terminal.red(u" åpa ")
+ px_terminal.yellow(u" b")
+ px_terminal.blue(u" ")
+ px_terminal.inverse_video(u" ")
)
| Verify rambar can do unicode | Verify rambar can do unicode
| Python | mit | walles/px,walles/px | ---
+++
@@ -1,3 +1,5 @@
+# coding=utf-8
+
import os
import sys
@@ -15,3 +17,15 @@
+ px_terminal.blue(u" ")
+ px_terminal.inverse_video(u" ")
)
+
+
+def test_render_bar_happy_path_unicode():
+ names_and_numbers = [(u"åpa", 1000), (u"bäpa", 300), (u"cäpa", 50)] + [
+ (u"lång svans", 1)
+ ] * 300
+ assert px_rambar.render_bar(10, names_and_numbers) == (
+ px_terminal.red(u" åpa ")
+ + px_terminal.yellow(u" b")
+ + px_terminal.blue(u" ")
+ + px_terminal.inverse_video(u" ")
+ ) |
43edc7a519cb2e7c49a112f816c5192908ac7e6b | tests/test_validator.py | tests/test_validator.py | import pytest
from web_test_base import *
class TestIATIValidator(WebTestBase):
requests_to_load = {
'IATI Validator': {
'url': 'http://validator.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://iatistandard.org/" in result
| import pytest
from web_test_base import *
class TestIATIValidator(WebTestBase):
requests_to_load = {
'IATI Validator': {
'url': 'http://validator.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "http://iatistandard.org/" in result
def test_contains_form(self, loaded_request):
"""
Test that the validator contains a form on each of three tabs.
"""
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="status"]/div/form')) == 1
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="fileTab"]/div/form')) == 1
assert len(utility.locate_xpath_result(loaded_request, '//*[@id="extra"]/div/form')) == 1
| Check forms on validator page | Check forms on validator page
Add a test to check that each of the three forms exist on the
validator page.
This test does not check whether the three forms work correctly.
| Python | mit | IATI/IATI-Website-Tests | ---
+++
@@ -15,3 +15,11 @@
result = utility.get_links_from_page(loaded_request)
assert "http://iatistandard.org/" in result
+
+ def test_contains_form(self, loaded_request):
+ """
+ Test that the validator contains a form on each of three tabs.
+ """
+ assert len(utility.locate_xpath_result(loaded_request, '//*[@id="status"]/div/form')) == 1
+ assert len(utility.locate_xpath_result(loaded_request, '//*[@id="fileTab"]/div/form')) == 1
+ assert len(utility.locate_xpath_result(loaded_request, '//*[@id="extra"]/div/form')) == 1 |
390f7ff95755feadc25236feb1eb92655e113b38 | fancypages/__init__.py | fancypages/__init__.py | from __future__ import absolute_import
import os
__version__ = VERSION = "0.3.0"
def get_fancypages_paths(path, use_with_oscar=False):
""" Get absolute paths for *path* relative to the project root """
paths = []
if use_with_oscar:
from fancypages.contrib import oscar_fancypages
base_dir = os.path.dirname(os.path.abspath(oscar_fancypages.__file__))
paths.append(os.path.join(base_dir, path))
return paths + [
os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
def get_required_apps():
return [
'django_extensions',
# used for image thumbnailing
'sorl.thumbnail',
# framework used for the internal API
'rest_framework',
# provides a convenience layer around model inheritance
# that makes lookup of nested models easier. This is used
# for the content block hierarchy.
'model_utils',
# migration handling
'south',
]
def get_fancypages_apps(use_with_oscar=False):
apps = ['fancypages.assets', 'fancypages']
if use_with_oscar:
apps += ['fancypages.contrib.oscar_fancypages']
return apps
| from __future__ import absolute_import
import os
__version__ = VERSION = "0.3.0"
def get_fancypages_paths(path, use_with_oscar=False):
""" Get absolute paths for *path* relative to the project root """
paths = []
if use_with_oscar:
from fancypages.contrib import oscar_fancypages
base_dir = os.path.dirname(os.path.abspath(oscar_fancypages.__file__))
paths.append(os.path.join(base_dir, path))
return paths + [
os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
def get_required_apps():
apps = [
'django_extensions',
# used for image thumbnailing
'sorl.thumbnail',
# framework used for the internal API
'rest_framework',
# provides a convenience layer around model inheritance
# that makes lookup of nested models easier. This is used
# for the content block hierarchy.
'model_utils',
]
import django
if django.VERSION[1] < 7:
apps.append('south')
return apps
def get_fancypages_apps(use_with_oscar=False):
apps = ['fancypages.assets', 'fancypages']
if use_with_oscar:
apps += ['fancypages.contrib.oscar_fancypages']
return apps
| Remove south from app helper method for Django 1.7+ | Remove south from app helper method for Django 1.7+
| Python | bsd-3-clause | tangentlabs/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages | ---
+++
@@ -17,7 +17,7 @@
def get_required_apps():
- return [
+ apps = [
'django_extensions',
# used for image thumbnailing
'sorl.thumbnail',
@@ -27,9 +27,13 @@
# that makes lookup of nested models easier. This is used
# for the content block hierarchy.
'model_utils',
- # migration handling
- 'south',
]
+
+ import django
+ if django.VERSION[1] < 7:
+ apps.append('south')
+
+ return apps
def get_fancypages_apps(use_with_oscar=False): |
402f71cc65f714cf880c9c9569e83f5bcd47ec72 | paintstore/widgets.py | paintstore/widgets.py | from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
class ColorPickerWidget(forms.TextInput):
class Media:
css = {
"all": ("%s/%s" % (settings.STATIC_URL, "paintstore/css/colorpicker.css"),)
}
js = (
("%s/%s" % (settings.STATIC_URL, "paintstore/jquery_1.7.2.js")),
("%s/%s" % (settings.STATIC_URL, "paintstore/colorpicker.js"))
)
input_type = 'colorpicker'
def render(self, name, value, attrs=None):
script = u"""<script type='text/javascript'>
$(document).ready(function(){
$('#%s').ColorPicker({
onSubmit: function(hsb, hex, rgb, el, parent) {
$(el).val('#' + hex);
$(el).ColorPickerHide();
},
onBeforeShow: function () {
$(this).ColorPickerSetColor(this.value);
}
}).bind('keyup', function(){
$(this).ColorPickerSetColor(this.value.replace('#', ''));
});
});
</script>
""" % ("id_%s" % name,)
super_render = super(ColorPickerWidget, self).render(name, value, attrs)
return mark_safe(u"%s%s" % (super_render, script))
| from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
class ColorPickerWidget(forms.TextInput):
class Media:
css = {
"all": ("%s/%s" % (settings.STATIC_URL, "paintstore/css/colorpicker.css"),)
}
js = (
("%s/%s" % (settings.STATIC_URL, "paintstore/jquery_1.7.2.js")),
("%s/%s" % (settings.STATIC_URL, "paintstore/colorpicker.js"))
)
input_type = 'colorpicker'
def render(self, name, value, attrs=None):
script = u"""<script type='text/javascript'>
$(document).ready(function(){{
$('#{0}').ColorPicker({{
onSubmit: function(hsb, hex, rgb, el, parent) {{
$(el).val('#' + hex);
$(el).ColorPickerHide();
$('#{0}').css('background-color', '#' + hex);
}},
onBeforeShow: function () {{
$(this).ColorPickerSetColor(this.value);
}}
}}).bind('keyup', function(){{
$(this).ColorPickerSetColor(this.value.replace('#', ''));
}});
$('#{0}').css('background-color', $('#{0}').val());
}});
</script>
""".format(u'id_'+name)
super_render = super(ColorPickerWidget, self).render(name, value, attrs)
return mark_safe(u"%s%s" % (super_render, script))
| Change the background to reflect the color chosen | Change the background to reflect the color chosen | Python | mit | jamescw/django-paintstore,jamescw/django-paintstore | ---
+++
@@ -18,21 +18,23 @@
def render(self, name, value, attrs=None):
script = u"""<script type='text/javascript'>
- $(document).ready(function(){
- $('#%s').ColorPicker({
- onSubmit: function(hsb, hex, rgb, el, parent) {
+ $(document).ready(function(){{
+ $('#{0}').ColorPicker({{
+ onSubmit: function(hsb, hex, rgb, el, parent) {{
$(el).val('#' + hex);
$(el).ColorPickerHide();
- },
- onBeforeShow: function () {
+ $('#{0}').css('background-color', '#' + hex);
+ }},
+ onBeforeShow: function () {{
$(this).ColorPickerSetColor(this.value);
- }
- }).bind('keyup', function(){
+ }}
+ }}).bind('keyup', function(){{
$(this).ColorPickerSetColor(this.value.replace('#', ''));
- });
- });
+ }});
+ $('#{0}').css('background-color', $('#{0}').val());
+ }});
</script>
- """ % ("id_%s" % name,)
+ """.format(u'id_'+name)
super_render = super(ColorPickerWidget, self).render(name, value, attrs)
return mark_safe(u"%s%s" % (super_render, script)) |
bad2fea8a3a8e7a7d1da9ee83ec48657824eaa07 | tests/test_filesize.py | tests/test_filesize.py | from rich import filesize
def test_traditional():
assert filesize.decimal(0) == "0 bytes"
assert filesize.decimal(1) == "1 byte"
assert filesize.decimal(2) == "2 bytes"
assert filesize.decimal(1000) == "1.0 kB"
assert filesize.decimal(1.5 * 1000 * 1000) == "1.5 MB"
def test_pick_unit_and_suffix():
units = ["bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
assert filesize.pick_unit_and_suffix(50, units, 1024) == (1, "bytes")
assert filesize.pick_unit_and_suffix(2048, units, 1024) == (1024, "KB")
| from rich import filesize
def test_traditional():
assert filesize.decimal(0) == "0 bytes"
assert filesize.decimal(1) == "1 byte"
assert filesize.decimal(2) == "2 bytes"
assert filesize.decimal(1000) == "1.0 kB"
assert filesize.decimal(1.5 * 1000 * 1000) == "1.5 MB"
assert filesize.decimal(0, precision=2) == "0 bytes"
assert filesize.decimal(1111, precision=0) == "1 kB"
assert filesize.decimal(1111, precision=1) == "1.1 kB"
assert filesize.decimal(1111, precision=2) == "1.11 kB"
assert filesize.decimal(1111, separator="") == "1.1kB"
def test_pick_unit_and_suffix():
units = ["bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
assert filesize.pick_unit_and_suffix(50, units, 1024) == (1, "bytes")
assert filesize.pick_unit_and_suffix(2048, units, 1024) == (1024, "KB")
| Add some tests for new decimal() params | Add some tests for new decimal() params
| Python | mit | willmcgugan/rich | ---
+++
@@ -7,6 +7,11 @@
assert filesize.decimal(2) == "2 bytes"
assert filesize.decimal(1000) == "1.0 kB"
assert filesize.decimal(1.5 * 1000 * 1000) == "1.5 MB"
+ assert filesize.decimal(0, precision=2) == "0 bytes"
+ assert filesize.decimal(1111, precision=0) == "1 kB"
+ assert filesize.decimal(1111, precision=1) == "1.1 kB"
+ assert filesize.decimal(1111, precision=2) == "1.11 kB"
+ assert filesize.decimal(1111, separator="") == "1.1kB"
def test_pick_unit_and_suffix(): |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.