commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
08ce22e8c467f7fb7da056e098ac88b64c3096dc | step_stool/content.py | step_stool/content.py | __author__ = 'Chris Krycho'
__copyright__ = '2013 Chris Krycho'
from logging import error
from os import path, walk
from sys import exit
try:
from markdown import Markdown
from mixins import DictAsMember
except ImportError as import_error:
error(import_error)
exit()
def convert_source(config):
'''
Convert all Markdown pages to HTML and metadata pairs. Pairs are keyed to
file names slugs (without the original file extension).
'''
md = Markdown(extensions=config.markdown_extensions, output_format='html5')
converted = {}
for root, dirs, file_names in walk(config.site.content.source):
for file_name in file_names:
file_path = path.join(root, file_name)
md_text = open(file_path, 'r').read()
content = md.convert(md_text)
plain_slug, extension = path.splitext(file_name)
converted[plain_slug] = {'content': content, 'meta': md.Meta}
return DictAsMember(converted)
| __author__ = 'Chris Krycho'
__copyright__ = '2013 Chris Krycho'
from logging import error
from os import path, walk
from sys import exit
try:
from markdown import Markdown
from mixins import DictAsMember
except ImportError as import_error:
error(import_error)
exit()
def convert_source(config):
'''
Convert all Markdown pages to HTML and metadata pairs. Pairs are keyed to
file names slugs (without the original file extension).
'''
md = Markdown(extensions=config.markdown_extensions, output_format='html5')
converted = {}
for root, dirs, file_names in walk(config.site.content.source):
for file_name in file_names:
file_path = path.join(root, file_name)
plain_slug, extension = path.splitext(file_name)
with open(file_path) as file:
md_text = file.read()
content = md.convert(md_text)
converted[plain_slug] = {'content': content, 'meta': md.Meta}
return DictAsMember(converted)
| Fix file open/read/close - there was no close() call in previous version! Use context handler instead. | Fix file open/read/close - there was no close() call in previous version! Use context handler instead.
| Python | mit | chriskrycho/step-stool,chriskrycho/step-stool | ---
+++
@@ -25,10 +25,11 @@
for root, dirs, file_names in walk(config.site.content.source):
for file_name in file_names:
file_path = path.join(root, file_name)
- md_text = open(file_path, 'r').read()
- content = md.convert(md_text)
+ plain_slug, extension = path.splitext(file_name)
- plain_slug, extension = path.splitext(file_name)
- converted[plain_slug] = {'content': content, 'meta': md.Meta}
+ with open(file_path) as file:
+ md_text = file.read()
+ content = md.convert(md_text)
+ converted[plain_slug] = {'content': content, 'meta': md.Meta}
return DictAsMember(converted) |
4e3eeba94423399411a763487411b097c4c7972e | rasterfairy/__init__.py | rasterfairy/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Top-level module for RasterFairy"""
from rasterfairy import *
from coonswarp import *
from rfoptimizer import *
from images2gif import *
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Top-level module for RasterFairy"""
from .rasterfairy import *
from .coonswarp import *
from .rfoptimizer import *
# from images2gif import *
| Optimize internal imports from python2 to python3. | Optimize internal imports from python2 to python3.
Also, ignore images2gif import.
| Python | bsd-3-clause | Quasimondo/RasterFairy | ---
+++
@@ -2,8 +2,8 @@
# -*- coding: utf-8 -*-
"""Top-level module for RasterFairy"""
-from rasterfairy import *
-from coonswarp import *
-from rfoptimizer import *
-from images2gif import *
+from .rasterfairy import *
+from .coonswarp import *
+from .rfoptimizer import *
+# from images2gif import *
|
6930782947f604630142b106cb079e627fcff499 | readthedocs/v3/views.py | readthedocs/v3/views.py | import django_filters.rest_framework
from rest_framework.authentication import SessionAuthentication, TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import JSONRenderer
from rest_framework.throttling import UserRateThrottle, AnonRateThrottle
from rest_flex_fields import FlexFieldsModelViewSet
from readthedocs.projects.models import Project
from readthedocs.restapi.permissions import IsOwner
from .serializers import ProjectSerializer
class APIv3Settings:
authentication_classes = (SessionAuthentication, TokenAuthentication)
permission_classes = (IsAuthenticated, IsOwner)
renderer_classes = (JSONRenderer,)
throttle_classes = (UserRateThrottle, AnonRateThrottle)
filter_backends = (django_filters.rest_framework.DjangoFilterBackend,)
class ProjectsViewSet(APIv3Settings, FlexFieldsModelViewSet):
model = Project
lookup_field = 'slug'
lookup_url_kwarg = 'project_slug'
serializer_class = ProjectSerializer
filterset_fields = (
'slug',
'privacy_level',
)
permit_list_expands = [
'users',
'active_versions',
'active_versions.last_build',
'active_versions.last_build.config',
]
def get_queryset(self):
user = self.request.user
return user.projects.all()
| import django_filters.rest_framework as filters
from rest_framework.authentication import SessionAuthentication, TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import JSONRenderer
from rest_framework.throttling import UserRateThrottle, AnonRateThrottle
from rest_flex_fields import FlexFieldsModelViewSet
from readthedocs.projects.models import Project
from readthedocs.restapi.permissions import IsOwner
from .filters import ProjectFilter
from .serializers import ProjectSerializer
class APIv3Settings:
authentication_classes = (SessionAuthentication, TokenAuthentication)
permission_classes = (IsAuthenticated, IsOwner)
renderer_classes = (JSONRenderer,)
throttle_classes = (UserRateThrottle, AnonRateThrottle)
filter_backends = (filters.DjangoFilterBackend,)
class ProjectsViewSet(APIv3Settings, FlexFieldsModelViewSet):
model = Project
lookup_field = 'slug'
lookup_url_kwarg = 'project_slug'
serializer_class = ProjectSerializer
filterset_class = ProjectFilter
permit_list_expands = [
'users',
'active_versions',
'active_versions.last_build',
'active_versions.last_build.config',
]
def get_queryset(self):
user = self.request.user
return user.projects.all()
| Use a class filter to allow expansion | Use a class filter to allow expansion
| Python | mit | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | ---
+++
@@ -1,4 +1,4 @@
-import django_filters.rest_framework
+import django_filters.rest_framework as filters
from rest_framework.authentication import SessionAuthentication, TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import JSONRenderer
@@ -8,6 +8,7 @@
from readthedocs.projects.models import Project
from readthedocs.restapi.permissions import IsOwner
+from .filters import ProjectFilter
from .serializers import ProjectSerializer
@@ -17,7 +18,7 @@
permission_classes = (IsAuthenticated, IsOwner)
renderer_classes = (JSONRenderer,)
throttle_classes = (UserRateThrottle, AnonRateThrottle)
- filter_backends = (django_filters.rest_framework.DjangoFilterBackend,)
+ filter_backends = (filters.DjangoFilterBackend,)
class ProjectsViewSet(APIv3Settings, FlexFieldsModelViewSet):
@@ -26,10 +27,7 @@
lookup_field = 'slug'
lookup_url_kwarg = 'project_slug'
serializer_class = ProjectSerializer
- filterset_fields = (
- 'slug',
- 'privacy_level',
- )
+ filterset_class = ProjectFilter
permit_list_expands = [
'users',
'active_versions', |
11860d9181d7a8e1a3924bc42234903ba96e304d | ForgeGit/forgegit/tests/test_git_app.py | ForgeGit/forgegit/tests/test_git_app.py | import unittest
from pylons import c, g
from ming.orm import ThreadLocalORMSession
from pyforge.tests import helpers
from pyforge.lib import helpers as h
class TestGitApp(unittest.TestCase):
def setUp(self):
helpers.setup_basic_test()
helpers.setup_global_objects()
h.set_context('test', 'src_git')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_templates(self):
assert c.app.templates.endswith('forgegit/templates')
def test_admin_menu(self):
assert c.app.admin_menu() == []
def test_uninstall(self):
c.app.uninstall(c.project)
assert g.mock_amq.pop('audit')
g.mock_amq.setup_handlers()
c.app.uninstall(c.project)
g.mock_amq.handle_all()
| import unittest
from pylons import c, g
from ming.orm import ThreadLocalORMSession
from pyforge.tests import helpers
from pyforge.lib import helpers as h
class TestGitApp(unittest.TestCase):
def setUp(self):
helpers.setup_basic_test()
helpers.setup_global_objects()
h.set_context('test', 'src_git')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_templates(self):
assert c.app.templates.endswith('forgegit/templates')
def test_admin_menu(self):
assert len(c.app.admin_menu()) == 1
def test_uninstall(self):
c.app.uninstall(c.project)
assert g.mock_amq.pop('audit')
g.mock_amq.setup_handlers()
c.app.uninstall(c.project)
g.mock_amq.handle_all()
| Update test to reflect changing git codebase | Update test to reflect changing git codebase
| Python | apache-2.0 | apache/incubator-allura,lym/allura-git,Bitergia/allura,lym/allura-git,leotrubach/sourceforge-allura,Bitergia/allura,leotrubach/sourceforge-allura,lym/allura-git,heiths/allura,Bitergia/allura,leotrubach/sourceforge-allura,apache/incubator-allura,heiths/allura,lym/allura-git,apache/incubator-allura,heiths/allura,lym/allura-git,apache/allura,Bitergia/allura,heiths/allura,apache/allura,apache/allura,heiths/allura,apache/incubator-allura,Bitergia/allura,apache/allura,apache/allura,leotrubach/sourceforge-allura | ---
+++
@@ -20,7 +20,7 @@
assert c.app.templates.endswith('forgegit/templates')
def test_admin_menu(self):
- assert c.app.admin_menu() == []
+ assert len(c.app.admin_menu()) == 1
def test_uninstall(self):
c.app.uninstall(c.project) |
a494260a8f9cf0e3ecf0c428bb70d4066623f1dd | wqflask/utility/elasticsearch_tools.py | wqflask/utility/elasticsearch_tools.py | es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
user_details = None
try:
response = es.search(
index = "users"
, doc_type = "local"
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
user_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return user_details
def save_user(user, user_id, index="users", doc_type="local"):
from time import sleep
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}])
es.create(index, doc_type, body=user, id=user_id)
sleep(1) # Delay 1 second to allow indexing
| es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local")
def save_user(user, user_id):
es_save_data("users", "local", user, user_id)
def get_item_by_unique_column(column_name, column_value, index, doc_type):
item_details = None
try:
response = es.search(
index = index
, doc_type = doc_type
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
item_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return item_details
def es_save_data(index, doc_type, data_item, data_id,):
from time import sleep
es.create(index, doc_type, body=data_item, id=data_id)
sleep(1) # Delay 1 second to allow indexing
| Refactor common items to more generic methods. | Refactor common items to more generic methods.
* Refactor code that can be used in more than one place to a more
generic method/function that's called by other methods
| Python | agpl-3.0 | pjotrp/genenetwork2,DannyArends/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,DannyArends/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2 | ---
+++
@@ -11,25 +11,27 @@
es = None
def get_user_by_unique_column(column_name, column_value):
- user_details = None
+ return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local")
+
+def save_user(user, user_id):
+ es_save_data("users", "local", user, user_id)
+
+def get_item_by_unique_column(column_name, column_value, index, doc_type):
+ item_details = None
try:
response = es.search(
- index = "users"
- , doc_type = "local"
+ index = index
+ , doc_type = doc_type
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
- user_details = response["hits"]["hits"][0]["_source"]
+ item_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
- return user_details
+ return item_details
-def save_user(user, user_id, index="users", doc_type="local"):
+def es_save_data(index, doc_type, data_item, data_id,):
from time import sleep
- es = Elasticsearch([{
- "host": ELASTICSEARCH_HOST
- , "port": ELASTICSEARCH_PORT
- }])
- es.create(index, doc_type, body=user, id=user_id)
+ es.create(index, doc_type, body=data_item, id=data_id)
sleep(1) # Delay 1 second to allow indexing |
c5b8ea3c7f3bf111e36515f92ab3aeb70026771e | openstack-dashboard/dashboard/tests.py | openstack-dashboard/dashboard/tests.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
from django import test
from django.core import mail
from mailer import engine
from mailer import send_mail
class DjangoMailerPresenceTest(test.TestCase):
def test_mailsent(self):
send_mail('subject', 'message_body', 'from@test.com', ['to@test.com'])
engine.send_all()
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'subject')
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
''' Test for django mailer.
This test is pretty much worthless, and should be removed once real testing of
views that send emails is implemented
'''
from django import test
from django.core import mail
from mailer import engine
from mailer import send_mail
class DjangoMailerPresenceTest(test.TestCase):
def test_mailsent(self):
send_mail('subject', 'message_body', 'from@test.com', ['to@test.com'])
engine.send_all()
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'subject')
| Add comment ot openstack test | Add comment ot openstack test
| Python | apache-2.0 | usc-isi/horizon-old,coreycb/horizon,Daniex/horizon,gochist/horizon,saydulk/horizon,CiscoSystems/avos,NCI-Cloud/horizon,pnavarro/openstack-dashboard,Solinea/horizon,promptworks/horizon,Mirantis/mos-horizon,tuskar/tuskar-ui,yjxtogo/horizon,mandeepdhami/horizon,cloud-smokers/openstack-dashboard,Metaswitch/horizon,asomya/test,nvoron23/avos,spring-week-topos/horizon-week,xinwu/horizon,1ukash/horizon,icloudrnd/automation_tools,ntt-pf-lab/horizon,citrix-openstack-build/horizon,xinwu/horizon,tqtran7/horizon,opencloudconsortium/tukey-portal,promptworks/horizon,savi-dev/horizon,yjxtogo/horizon,mrunge/horizon,openstack/horizon,99cloud/keystone_register,developerworks/horizon,bac/horizon,zouyapeng/horizon,zouyapeng/horizon,bac/horizon,yjxtogo/horizon,JioCloud/horizon,agileblaze/OpenStackTwoFactorAuthentication,yeming233/horizon,watonyweng/horizon,zen/openstack-dashboard,Tesora/tesora-horizon,kaiweifan/horizon,damien-dg/horizon,yjxtogo/horizon,kfox1111/horizon,citrix-openstack/horizon,kaiweifan/horizon,xme1226/horizon,bigswitch/horizon,izadorozhna/dashboard_integration_tests,tqtran7/horizon,CiscoSystems/horizon,wangxiangyu/horizon,mrunge/horizon,tuskar/tuskar-ui,NCI-Cloud/horizon,flochaz/horizon,endorphinl/horizon-fork,vladryk/horizon,RudoCris/horizon,orbitfp7/horizon,citrix-openstack-build/horizon,openstack-ja/horizon,CiscoSystems/openstack-dashboard,coreycb/horizon,NeCTAR-RC/horizon,icloudrnd/automation_tools,idjaw/horizon,luhanhan/horizon,NeCTAR-RC/horizon,mdavid/horizon,yeming233/horizon,takeshineshiro/horizon,doug-fish/horizon,noironetworks/horizon,newrocknj/horizon,liyitest/rr,vladryk/horizon,liyitest/rr,ohnoimdead/horizon,VaneCloud/horizon,tsufiev/horizon,endorphinl/horizon,Dark-Hacker/horizon,FNST-OpenStack/horizon,gerrive/horizon,citrix-openstack/horizon,xme1226/horizon,agileblaze/OpenStackTwoFactorAuthentication,sandvine/horizon,CiscoSystems/dashboard-quantum-beta,xme1226/horizon,Frostman/eho-horizon,anthonydillon/horizon,99cloud/keystone_register,davidcusatis/horizon,VaneCloud/horizon,vbannai/disk-qos-horizon,CiscoSystems/avos,wolverineav/horizon,redhat-openstack/horizon,django-leonardo/horizon,froyobin/horizon,ging/horizon,bigswitch/horizon,idjaw/horizon,netscaler/horizon,99cloud/keystone_register,zen/openstack-dashboard,mrunge/horizon_lib,anotherjesse/openstack-dashboard,netscaler/horizon,rdo-management/tuskar-ui,vbannai/disk-qos-horizon,ging/horizon,gochist/horizon,VaneCloud/horizon,ntt-pf-lab/horizon,endorphinl/horizon,tellesnobrega/horizon,flochaz/horizon,henaras/horizon,opencloudconsortium/tukey-portal,citrix-openstack/horizon,ging/horizon,developerworks/horizon,Solinea/horizon,Daniex/horizon,BiznetGIO/horizon,tuskar/tuskar-ui,bac/horizon,dan1/horizon-x509,endorphinl/horizon,j4/horizon,karthik-suresh/horizon,anotherjesse/openstack-dashboard,redhat-cip/horizon,ntt-pf-lab/horizon,davidcusatis/horizon,henaras/horizon,liyitest/rr,icloudrnd/automation_tools,CiscoSystems/horizon,Mirantis/mos-horizon,eayunstack/horizon,philoniare/horizon,dan1/horizon-x509,orbitfp7/horizon,ChameleonCloud/horizon,Hodorable/0602,gochist/horizon,coreycb/horizon,VaneCloud/horizon,ChameleonCloud/horizon,xinwu/horizon,Dark-Hacker/horizon,RudoCris/horizon,mdavid/horizon,saydulk/horizon,aaronorosen/horizon-congress,tqtran7/horizon,dan1/horizon-x509,watonyweng/horizon,rickerc/horizon_audit,Solinea/horizon,orbitfp7/horizon,django-leonardo/horizon,izadorozhna/dashboard_integration_tests,Mirantis/mos-horizon,blueboxgroup/horizon,maestro-hybrid-cloud/horizon,blueboxgroup/horizon,NeCTAR-RC/horizon,rdo-management/tuskar-ui,citrix-openstack-build/horizon,JioCloud/horizon,tsufiev/horizon,Daniex/horizon,yeming233/horizon,damien-dg/horizon,redhat-cip/horizon,mrunge/openstack_horizon,nvoron23/avos,philoniare/horizon,Tesora/tesora-horizon,froyobin/horizon,flochaz/horizon,tsufiev/horizon,ttrifonov/horizon,agileblaze/OpenStackTwoFactorAuthentication,NCI-Cloud/horizon,newrocknj/horizon,tellesnobrega/horizon,rickerc/horizon_audit,Tesora/tesora-horizon,yanheven/console,karthik-suresh/horizon,tellesnobrega/horizon,luhanhan/horizon,newrocknj/horizon,1ukash/horizon,davidcusatis/horizon,Tesora/tesora-horizon,watonyweng/horizon,pranavtendolkr/horizon,promptworks/horizon,anotherjesse/openstack-dashboard,redhat-cip/horizon,newrocknj/horizon,froyobin/horizon,JioCloud/horizon,ChameleonCloud/horizon,luhanhan/horizon,idjaw/horizon,dan1/horizon-x509,rickerc/horizon_audit,endorphinl/horizon-fork,coreycb/horizon,mrunge/horizon_lib,usc-isi/horizon-old,mrunge/horizon_lib,CiscoSystems/avos,zouyapeng/horizon,pnavarro/openstack-dashboard,ging/horizon,eayunstack/horizon,yeming233/horizon,henaras/horizon,pranavtendolkr/horizon,idjaw/horizon,mrunge/openstack_horizon,tanglei528/horizon,django-leonardo/horizon,maestro-hybrid-cloud/horizon,dan1/horizon-proto,r-icarus/openstack_microserver,Hodorable/0602,anthonydillon/horizon,bac/horizon,liyitest/rr,tanglei528/horizon,karthik-suresh/horizon,takeshineshiro/horizon,CiscoSystems/openstack-dashboard,LabAdvComp/tukey_portal,redhat-cip/horizon,Metaswitch/horizon,tanglei528/horizon,CiscoSystems/horizon,dan1/horizon-proto,zouyapeng/horizon,luhanhan/horizon,kfox1111/horizon,aaronorosen/horizon-congress,RudoCris/horizon,takeshineshiro/horizon,kfox1111/horizon,sandvine/horizon,redhat-openstack/horizon,BiznetGIO/horizon,occ-data/tukey-portal,developerworks/horizon,mrunge/horizon,noironetworks/horizon,davidcusatis/horizon,NCI-Cloud/horizon,Metaswitch/horizon,griddynamics/osc-robot-openstack-dashboard,aaronorosen/horizon-congress,BiznetGIO/horizon,ntt-pf-lab/openstack-dashboard,tsufiev/horizon,spring-week-topos/horizon-week,vbannai/disk-qos-horizon,eayunstack/horizon,ohnoimdead/horizon,redhat-openstack/horizon,Hodorable/0602,ttrifonov/horizon,rdo-management/tuskar-ui,sandvine/horizon,promptworks/horizon,doug-fish/horizon,j4/horizon,django-leonardo/horizon,endorphinl/horizon,griddynamics/osc-robot-openstack-dashboard,netscaler/horizon,blueboxgroup/horizon,gerrive/horizon,griddynamics/osc-robot-openstack-dashboard,CiscoSystems/openstack-dashboard,j4/horizon,takeshineshiro/horizon,doug-fish/horizon,Metaswitch/horizon,mdavid/horizon,bigswitch/horizon,LabAdvComp/tukey_portal,agileblaze/OpenStackTwoFactorAuthentication,wolverineav/horizon,CiscoSystems/dashboard-quantum-beta,tqtran7/horizon,ikargis/horizon_fod,openstack-ja/horizon,usc-isi/horizon-old,rdo-management/tuskar-ui,FNST-OpenStack/horizon,r-icarus/openstack_microserver,asomya/test,mandeepdhami/horizon,maestro-hybrid-cloud/horizon,FNST-OpenStack/horizon,watonyweng/horizon,NeCTAR-RC/horizon,philoniare/horizon,CiscoSystems/avos,openstack/horizon,dan1/horizon-proto,savi-dev/horizon,orbitfp7/horizon,ikargis/horizon_fod,spring-week-topos/horizon-week,vladryk/horizon,LabAdvComp/tukey_portal,nvoron23/avos,mandeepdhami/horizon,wolverineav/horizon,ChameleonCloud/horizon,doug-fish/horizon,wolverineav/horizon,savi-dev/horizon,gerrive/horizon,endorphinl/horizon-fork,ikargis/horizon_fod,ntt-pf-lab/openstack-dashboard,mrunge/openstack_horizon,kaiweifan/horizon,BiznetGIO/horizon,asomya/test,vladryk/horizon,yanheven/console,Mirantis/mos-horizon,henaras/horizon,bigswitch/horizon,pnavarro/openstack-dashboard,damien-dg/horizon,icloudrnd/automation_tools,cloud-smokers/openstack-dashboard,openstack/horizon,blueboxgroup/horizon,CiscoSystems/horizon,damien-dg/horizon,redhat-openstack/horizon,ohnoimdead/horizon,dan1/horizon-proto,Dark-Hacker/horizon,philoniare/horizon,maestro-hybrid-cloud/horizon,CiscoSystems/dashboard-quantum-beta,r-icarus/openstack_microserver,1ukash/horizon,saydulk/horizon,Solinea/horizon,zen/openstack-dashboard,nvoron23/avos,anthonydillon/horizon,Hodorable/0602,saydulk/horizon,RudoCris/horizon,wangxiangyu/horizon,endorphinl/horizon-fork,mdavid/horizon,noironetworks/horizon,ntt-pf-lab/openstack-dashboard,ttrifonov/horizon,cloud-smokers/openstack-dashboard,xinwu/horizon,FNST-OpenStack/horizon,sandvine/horizon,wangxiangyu/horizon,gerrive/horizon,karthik-suresh/horizon,mandeepdhami/horizon,openstack-ja/horizon,anthonydillon/horizon,opencloudconsortium/tukey-portal,flochaz/horizon,occ-data/tukey-portal,openstack/horizon,j4/horizon,Frostman/eho-horizon,occ-data/tukey-portal,kfox1111/horizon,pranavtendolkr/horizon,wangxiangyu/horizon,Daniex/horizon,pranavtendolkr/horizon,yanheven/console,noironetworks/horizon,LabAdvComp/tukey_portal,Frostman/eho-horizon,tellesnobrega/horizon,Dark-Hacker/horizon | ---
+++
@@ -1,4 +1,9 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
+''' Test for django mailer.
+
+This test is pretty much worthless, and should be removed once real testing of
+views that send emails is implemented
+'''
from django import test
from django.core import mail |
f5cab8249d5e162285e5fc94ded8bf7ced292986 | test/test_ev3_key.py | test/test_ev3_key.py | from ev3.ev3dev import Key
import unittest
from util import get_input
import time
class TestTone(unittest.TestCase):
def test_tone(self):
d = Key()
get_input('Test keyboard. Hold Up key')
print(d.up)
get_input('Test keyboard. Release Up key')
print(d.up)
get_input('Test keyboard. Hold Down key')
print(d.down)
get_input('Test keyboard. Release Down key')
print(d.down)
get_input('Test keyboard. Hold Left key')
print(d.left)
get_input('Test keyboard. Release Left key')
print(d.left)
get_input('Test keyboard. Hold Right key')
print(d.right)
get_input('Test keyboard. Release Right key')
print(d.right)
get_input('Test keyboard. Hold Backspace key')
print(d.backspace)
get_input('Test keyboard. Release Backspace key')
print(d.backspace)
get_input('Test keyboard. Hold Enter key')
print(d.enter)
get_input('Test keyboard. Release Enter key')
print(d.enter)
if __name__ == '__main__':
unittest.main()
| from ev3.ev3dev import Key
import unittest
from util import get_input
class TestKey(unittest.TestCase):
def test_key(self):
d = Key()
get_input('Test keyboard. Hold Up key')
print(d.up)
get_input('Test keyboard. Release Up key')
print(d.up)
get_input('Test keyboard. Hold Down key')
print(d.down)
get_input('Test keyboard. Release Down key')
print(d.down)
get_input('Test keyboard. Hold Left key')
print(d.left)
get_input('Test keyboard. Release Left key')
print(d.left)
get_input('Test keyboard. Hold Right key')
print(d.right)
get_input('Test keyboard. Release Right key')
print(d.right)
get_input('Test keyboard. Hold Backspace key')
print(d.backspace)
get_input('Test keyboard. Release Backspace key')
print(d.backspace)
get_input('Test keyboard. Hold Enter key')
print(d.enter)
get_input('Test keyboard. Release Enter key')
print(d.enter)
if __name__ == '__main__':
unittest.main()
| Change dname of key test | Change dname of key test
| Python | apache-2.0 | topikachu/python-ev3,MaxNoe/python-ev3,evz/python-ev3,topikachu/python-ev3,MaxNoe/python-ev3,evz/python-ev3 | ---
+++
@@ -1,12 +1,11 @@
from ev3.ev3dev import Key
import unittest
from util import get_input
-import time
-class TestTone(unittest.TestCase):
+class TestKey(unittest.TestCase):
- def test_tone(self):
+ def test_key(self):
d = Key()
get_input('Test keyboard. Hold Up key')
print(d.up) |
a11cc4bae9fa48144b8a755eb3cb17fd707f2a7c | lib/ansible/release.py | lib/ansible/release.py | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
| # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
| Add post modifier to version | Add post modifier to version
| Python | mit | thaim/ansible,thaim/ansible | ---
+++
@@ -19,6 +19,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-__version__ = '2.7.0.a1'
+__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light' |
517a76bc9aec3dbe8c21c96be424da838b5fcc02 | apistar/wsgi.py | apistar/wsgi.py | from typing import Iterable, List, Tuple
from werkzeug.http import HTTP_STATUS_CODES
from apistar import http
__all__ = ['WSGIEnviron', 'WSGIResponse']
STATUS_CODES = {
code: "%d %s" % (code, msg)
for code, msg in HTTP_STATUS_CODES.items()
}
WSGIEnviron = http.WSGIEnviron
class WSGIResponse(object):
__slots__ = ('status', 'headers', 'iterator')
def __init__(self,
status: str,
headers: List[Tuple[str, str]],
iterator: Iterable[bytes]) -> None:
self.status = status
self.headers = headers
self.iterator = iterator
@classmethod
def build(cls, response: http.Response):
try:
status_text = STATUS_CODES[response.status]
except KeyError:
status_text = str(response.status)
return WSGIResponse(
status=status_text,
headers=list(response.headers.items()),
iterator=[response.content]
)
| from typing import Iterable, List, Tuple
from werkzeug.http import HTTP_STATUS_CODES
from apistar import http
__all__ = ['WSGIEnviron', 'WSGIResponse']
STATUS_CODES = {
code: "%d %s" % (code, msg)
for code, msg in HTTP_STATUS_CODES.items()
}
ACCESS_CONTROL_ALLOW_ORIGIN = 'Access-Control-Allow-Origin'
WSGIEnviron = http.WSGIEnviron
class WSGIResponse(object):
__slots__ = ('status', 'headers', 'iterator')
def __init__(self,
status: str,
headers: List[Tuple[str, str]],
iterator: Iterable[bytes]) -> None:
self.status = status
self.headers = headers
self.iterator = iterator
@classmethod
def build(cls, response: http.Response):
try:
status_text = STATUS_CODES[response.status]
except KeyError:
status_text = str(response.status)
headers = list(response.headers.items())
if ACCESS_CONTROL_ALLOW_ORIGIN not in response.headers:
headers += [(ACCESS_CONTROL_ALLOW_ORIGIN, '*')]
return WSGIResponse(
status=status_text,
headers=headers,
iterator=[response.content]
)
| Set "Access-Control-Allow-Origin: *" by default | Set "Access-Control-Allow-Origin: *" by default
| Python | bsd-3-clause | encode/apistar,rsalmaso/apistar,encode/apistar,tomchristie/apistar,tomchristie/apistar,tomchristie/apistar,encode/apistar,rsalmaso/apistar,tomchristie/apistar,encode/apistar,rsalmaso/apistar,rsalmaso/apistar | ---
+++
@@ -12,6 +12,7 @@
for code, msg in HTTP_STATUS_CODES.items()
}
+ACCESS_CONTROL_ALLOW_ORIGIN = 'Access-Control-Allow-Origin'
WSGIEnviron = http.WSGIEnviron
@@ -34,8 +35,13 @@
except KeyError:
status_text = str(response.status)
+ headers = list(response.headers.items())
+
+ if ACCESS_CONTROL_ALLOW_ORIGIN not in response.headers:
+ headers += [(ACCESS_CONTROL_ALLOW_ORIGIN, '*')]
+
return WSGIResponse(
status=status_text,
- headers=list(response.headers.items()),
+ headers=headers,
iterator=[response.content]
) |
c0ab344235fdd7df8e32c499124596d20f9d9e52 | src/tempel/forms.py | src/tempel/forms.py | from django import forms
from tempel import utils
class EntryForm(forms.Form):
language = forms.ChoiceField(choices=utils.get_languages(),
initial="python")
content = forms.CharField(widget=forms.Textarea)
private = forms.BooleanField(required=False)
| from django import forms
from tempel import utils
class EntryForm(forms.Form):
language = forms.ChoiceField(choices=utils.get_languages(),
initial="python")
content = forms.CharField(widget=forms.Textarea)
private = forms.BooleanField(required=False)
class EditForm(forms.Form):
language = forms.ChoiceField(choices=utils.get_languages(),
initial="python")
content = forms.CharField(widget=forms.Textarea)
| Add EditForm that does not have 'private' field. | Add EditForm that does not have 'private' field.
| Python | agpl-3.0 | fajran/tempel | ---
+++
@@ -8,3 +8,8 @@
content = forms.CharField(widget=forms.Textarea)
private = forms.BooleanField(required=False)
+class EditForm(forms.Form):
+ language = forms.ChoiceField(choices=utils.get_languages(),
+ initial="python")
+ content = forms.CharField(widget=forms.Textarea)
+ |
73fa0f555ec140254ecdc09ab17ba1a065861e0c | metakernel/__init__.py | metakernel/__init__.py | from ._metakernel import MetaKernel, IPythonKernel, register_ipython_magics
from . import pexpect
from .replwrap import REPLWrapper, u
from .process_metakernel import ProcessMetaKernel
from .magic import Magic, option
from .parser import Parser
__all__ = ['Magic', 'MetaKernel', 'option']
__version__ = '0.10.6'
del magic, _metakernel, parser, process_metakernel
| from ._metakernel import MetaKernel, IPythonKernel, register_ipython_magics
from . import pexpect
from .replwrap import REPLWrapper, u
from .process_metakernel import ProcessMetaKernel
from .magic import Magic, option
from .parser import Parser
__all__ = ['Magic', 'MetaKernel', 'option']
__version__ = '0.11.0'
del magic, _metakernel, parser, process_metakernel
| Bump version to 0.11.0 and upload to pypi | Bump version to 0.11.0 and upload to pypi
| Python | bsd-3-clause | Calysto/metakernel | ---
+++
@@ -7,6 +7,6 @@
__all__ = ['Magic', 'MetaKernel', 'option']
-__version__ = '0.10.6'
+__version__ = '0.11.0'
del magic, _metakernel, parser, process_metakernel |
c96da14b7bc05d6de7f1ddb9b634ef04ae2e2213 | tests/test_trivia.py | tests/test_trivia.py |
import unittest
from units.trivia import check_answer
class TestCheckAnswer(unittest.TestCase):
def test_correct_answer(self):
self.assertTrue(check_answer("correct", "correct"))
def test_incorrect_answer(self):
self.assertFalse(check_answer("correct", "incorrect"))
if __name__ == "__main__":
unittest.main()
|
import unittest
from units.trivia import check_answer
class TestCheckAnswer(unittest.TestCase):
def test_correct_answer(self):
self.assertTrue(check_answer("correct", "correct"))
def test_incorrect_answer(self):
self.assertFalse(check_answer("correct", "incorrect"))
def test_parentheses_with_article_prefix(self):
self.assertTrue(
check_answer(
"the ISS (the International Space Station)",
"International Space Station"
)
)
self.assertTrue(
check_answer("Holland (The Netherlands)", "Netherlands")
)
if __name__ == "__main__":
unittest.main()
| Test trivia answers in parentheses with article prefixes | [Tests] Test trivia answers in parentheses with article prefixes
| Python | mit | Harmon758/Harmonbot,Harmon758/Harmonbot | ---
+++
@@ -12,6 +12,17 @@
def test_incorrect_answer(self):
self.assertFalse(check_answer("correct", "incorrect"))
+ def test_parentheses_with_article_prefix(self):
+ self.assertTrue(
+ check_answer(
+ "the ISS (the International Space Station)",
+ "International Space Station"
+ )
+ )
+ self.assertTrue(
+ check_answer("Holland (The Netherlands)", "Netherlands")
+ )
+
if __name__ == "__main__":
unittest.main() |
e3cc22064ebb709788c770a8940d0b0f742a8741 | mpfmonitor/_version.py | mpfmonitor/_version.py | # mpf-monitor
__version__ = '0.54.0-dev.0'
__short_version__ = '0.54'
__bcp_version__ = '1.1'
__config_version__ = '5'
__mpf_version_required__ = '0.54.0-dev.0'
version = "MPF Monitor v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
| # mpf-monitor
__version__ = '0.54.0-dev.1'
__short_version__ = '0.54'
__bcp_version__ = '1.1'
__config_version__ = '5'
__mpf_version_required__ = '0.54.0-dev.0'
version = "MPF Monitor v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
| Increment dev version, expect push to PyPi | Increment dev version, expect push to PyPi
| Python | mit | missionpinball/mpf-monitor | ---
+++
@@ -1,5 +1,5 @@
# mpf-monitor
-__version__ = '0.54.0-dev.0'
+__version__ = '0.54.0-dev.1'
__short_version__ = '0.54'
__bcp_version__ = '1.1'
__config_version__ = '5' |
e371842b0efb9a7d160f7909415190fd583b6c68 | tool_requirements.py | tool_requirements.py | # Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# Version requirements for various tools. Checked by tooling (e.g. fusesoc),
# and inserted into the documentation.
#
# Entries are keyed by tool name. The value is either a string giving the
# minimum version number or is a dictionary. If a dictionary, the following
# keys are recognised:
#
# min_version: Required string. Minimum version number.
#
# as_needed: Optional bool. Defaults to False. If set, this tool is not
# automatically required. If it is asked for, the rest of the
# entry gives the required version.
#
__TOOL_REQUIREMENTS__ = {
'edalize': '0.2.0',
'ninja': '1.8.2',
'verilator': '4.104',
'hugo_extended': {
'min_version': '0.71.0',
'as_needed': True
},
'verible': {
'min_version': 'v0.0-808-g1e17daa',
'as_needed': True
},
'vcs': {
'min_version': '2020.03-SP2',
'as_needed': True
}
}
| # Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# Version requirements for various tools. Checked by tooling (e.g. fusesoc),
# and inserted into the documentation.
#
# Entries are keyed by tool name. The value is either a string giving the
# minimum version number or is a dictionary. If a dictionary, the following
# keys are recognised:
#
# min_version: Required string. Minimum version number.
#
# as_needed: Optional bool. Defaults to False. If set, this tool is not
# automatically required. If it is asked for, the rest of the
# entry gives the required version.
#
__TOOL_REQUIREMENTS__ = {
'edalize': '0.2.0',
'ninja': '1.8.2',
'verilator': '4.104',
'hugo_extended': {
'min_version': '0.82.0',
'as_needed': True
},
'verible': {
'min_version': 'v0.0-808-g1e17daa',
'as_needed': True
},
'vcs': {
'min_version': '2020.03-SP2',
'as_needed': True
}
}
| Move to hugo version 0.82.0 | [docs/hugo] Move to hugo version 0.82.0
When adding more pinmux signals and pads, we run into a
funny error where HUGO can't read the generated pinmux register
documentation anymore since the file is too big. This file
limitation has just recently (3 months ago) been removed.
See https://github.com/gohugoio/hugo/pull/8172 for reference.
This commit moves to HUGO 0.82.0 which contains this fix.
Signed-off-by: Michael Schaffner <a005a2d3342a687ea2d05360c953107acbe83e08@opentitan.org>
| Python | apache-2.0 | lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan | ---
+++
@@ -21,7 +21,7 @@
'verilator': '4.104',
'hugo_extended': {
- 'min_version': '0.71.0',
+ 'min_version': '0.82.0',
'as_needed': True
},
'verible': { |
ebb3b727b8d7592b7e9755b3f7665314e668a19d | node/string_literal.py | node/string_literal.py | #!/usr/bin/env python
from nodes import Node
class StringLiteral(Node):
args = 0
results = 1
char = '"'
def __init__(self, string):
self.string = string
@Node.test_func([], [""], "")
@Node.test_func([], ["World"], "World\"")
@Node.test_func([], ["Hello"], "Hello")
def func(self):
"""String literal. Ends with " """
return self.string
def __repr__(self):
return "%s: %r"%(self.__class__.__name__, self.string)
@classmethod
def accepts(cls, code, accept = False):
if accept: code = '"'+code
if code == "": return None, None
if code[0] != StringLiteral.char:return None, None
code = code[1:]
string, sep, code = code.partition(StringLiteral.char)
#if sep == "":
# code = "+"
return code, cls(string) | #!/usr/bin/env python
from nodes import Node
class StringLiteral(Node):
args = 0
results = 1
char = '"'
def __init__(self, string):
self.string = string
@Node.test_func([], [""], "")
@Node.test_func([], ["World"], "World\"")
@Node.test_func([], ["Hello"], "Hello")
def func(self):
"""String literal. Ends with " """
return self.string
def __repr__(self):
return "%s: %r"%(self.__class__.__name__, self.string)
@classmethod
def accepts(cls, code, accept = False):
if accept: code = '"'+code
if code == "":
return None, None
if code[0] != StringLiteral.char:
return None, None
code = code[1:]
rtn = ""
end = False
while code and not end:
rtn += code[0]
code = code[1:]
if rtn.endswith('"'):
if rtn.endswith(r'\"'):
continue
end = True
code = code[:-1]
if rtn.endswith('"') and not rtn.endswith(r'\"'):
rtn = rtn[:-1]
rtn = rtn.replace(r'\"', '"')
return code, cls(rtn)
| Allow `"` to appear in string literals | Allow `"` to appear in string literals
| Python | mit | muddyfish/PYKE,muddyfish/PYKE | ---
+++
@@ -23,10 +23,22 @@
@classmethod
def accepts(cls, code, accept = False):
if accept: code = '"'+code
- if code == "": return None, None
- if code[0] != StringLiteral.char:return None, None
+ if code == "":
+ return None, None
+ if code[0] != StringLiteral.char:
+ return None, None
code = code[1:]
- string, sep, code = code.partition(StringLiteral.char)
- #if sep == "":
- # code = "+"
- return code, cls(string)
+ rtn = ""
+ end = False
+ while code and not end:
+ rtn += code[0]
+ code = code[1:]
+ if rtn.endswith('"'):
+ if rtn.endswith(r'\"'):
+ continue
+ end = True
+ code = code[:-1]
+ if rtn.endswith('"') and not rtn.endswith(r'\"'):
+ rtn = rtn[:-1]
+ rtn = rtn.replace(r'\"', '"')
+ return code, cls(rtn) |
5c2a691ff928c336c35a6ddef38c222b4bb3d2a4 | testproject/manage.py | testproject/manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import os
import sys
try:
import pymysql
pymysql.install_as_MySQLdb()
except ImportError:
pass
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Add support for testing with pymysql | Add support for testing with pymysql
| Python | bsd-3-clause | uranusjr/django-mosql | ---
+++
@@ -1,6 +1,12 @@
#!/usr/bin/env python
import os
import sys
+try:
+ import pymysql
+ pymysql.install_as_MySQLdb()
+except ImportError:
+ pass
+
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.settings") |
1271f3b978d2ab46824ca7b33472bba5b725f9ac | tests/test_profile.py | tests/test_profile.py | import fiona
def test_profile():
with fiona.open('tests/data/coutwildrnp.shp') as src:
assert src.meta['crs_wkt'] == 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]'
def test_profile_creation_wkt(tmpdir):
outfilename = str(tmpdir.join('test.shp'))
with fiona.open('tests/data/coutwildrnp.shp') as src:
profile = src.meta
profile['crs'] = 'bogus'
with fiona.open(outfilename, 'w', **profile) as dst:
assert dst.crs == {'init': 'epsg:4326'}
assert dst.crs_wkt == 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]'
| import os
import tempfile
import fiona
def test_profile():
with fiona.open('tests/data/coutwildrnp.shp') as src:
assert src.meta['crs_wkt'] == 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]'
def test_profile_creation_wkt():
tmpdir = tempfile.mkdtemp()
outfilename = os.path.join(tmpdir, 'test.shp')
with fiona.open('tests/data/coutwildrnp.shp') as src:
profile = src.meta
profile['crs'] = 'bogus'
with fiona.open(outfilename, 'w', **profile) as dst:
assert dst.crs == {'init': 'epsg:4326'}
assert dst.crs_wkt == 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]'
| Rework tmpdir for nose (no pytest) | Rework tmpdir for nose (no pytest)
| Python | bsd-3-clause | perrygeo/Fiona,perrygeo/Fiona,Toblerity/Fiona,rbuffat/Fiona,Toblerity/Fiona,rbuffat/Fiona | ---
+++
@@ -1,3 +1,6 @@
+import os
+import tempfile
+
import fiona
@@ -6,8 +9,9 @@
assert src.meta['crs_wkt'] == 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]'
-def test_profile_creation_wkt(tmpdir):
- outfilename = str(tmpdir.join('test.shp'))
+def test_profile_creation_wkt():
+ tmpdir = tempfile.mkdtemp()
+ outfilename = os.path.join(tmpdir, 'test.shp')
with fiona.open('tests/data/coutwildrnp.shp') as src:
profile = src.meta
profile['crs'] = 'bogus' |
f0e67ca657915e77b1f28bab9fa29f84bfbb8f06 | tests/unit/test_DB.py | tests/unit/test_DB.py | # standard modules
import StringIO
from unittest import TestCase
# custom modules
from iago.DatabaseProvider import DB
class TestDB(TestCase):
def test_read_empty(self):
s = StringIO.StringIO('{}')
d = DB()
try:
d.read(s)
except KeyError:
self.fail('DB cannot handle empty JSON files.')
| # standard modules
import StringIO
from unittest import TestCase
# custom modules
from iago.DatabaseProvider import DB
class TestDB(TestCase):
def test_read_empty(self):
s = StringIO.StringIO('{}')
d = DB()
try:
d.read(s, format='json')
except KeyError:
self.fail('DB cannot handle empty JSON files.')
| Fix test to specify file format | Fix test to specify file format
| Python | mit | ferchault/iago | ---
+++
@@ -11,6 +11,6 @@
s = StringIO.StringIO('{}')
d = DB()
try:
- d.read(s)
+ d.read(s, format='json')
except KeyError:
self.fail('DB cannot handle empty JSON files.') |
0e195e93e0a2f80bc85f8425254e8a1d3c324654 | bockus/books/search_indexes.py | bockus/books/search_indexes.py | from haystack import indexes
from books.models import Book, Series
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
library = indexes.IntegerField(model_attr="library_id")
def get_model(self):
return Book
def index_queryset(self, using=None):
return self.get_model().objects.all()
class SeriesIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
def get_model(self):
return Series
def index_queryset(self, using=None):
return self.get_model().objects.all() | from haystack import indexes
from books.models import Book, Series
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
library = indexes.IntegerField(model_attr="library_id")
def get_model(self):
return Book
def index_queryset(self, using=None):
return self.get_model().objects.all()
class SeriesIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
library = indexes.IntegerField(model_attr="library_id")
def get_model(self):
return Series
def index_queryset(self, using=None):
return self.get_model().objects.all() | Add library property to series search index | Add library property to series search index
| Python | mit | phildini/bockus,phildini/bockus,phildini/bockus | ---
+++
@@ -17,6 +17,7 @@
class SeriesIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
+ library = indexes.IntegerField(model_attr="library_id")
def get_model(self):
return Series |
2094f2ef5a47703a881643b8ca25a632fe54e892 | under_overfitting.py | under_overfitting.py | import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
pipeline = Pipeline([("polynomial_features", poly_features),
("linear_regression", model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring="mean_squared_error", cv=10)
print("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
degrees[i], -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
| import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
pipeline = Pipeline([('polynomial_features', poly_features),
('linear_regression', model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring='mean_squared_error', cv=10)
print('Degree {:>2}: mse = {}, std = {}'.format(
d, -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
| Complete walk of polynomial degrees to find most balance between under and overfitting | Complete walk of polynomial degrees to find most balance between under and overfitting
| Python | mit | noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit | ---
+++
@@ -18,15 +18,15 @@
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
- pipeline = Pipeline([("polynomial_features", poly_features),
- ("linear_regression", model)])
+ pipeline = Pipeline([('polynomial_features', poly_features),
+ ('linear_regression', model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
- scoring="mean_squared_error", cv=10)
+ scoring='mean_squared_error', cv=10)
- print("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
- degrees[i], -scores.mean(), scores.std()))
+ print('Degree {:>2}: mse = {}, std = {}'.format(
+ d, -scores.mean(), scores.std()))
if __name__ == '__main__': |
f915b101b635e644eb9018a8abb9e9c86e6c2a73 | test/test_config.py | test/test_config.py | import os
import stat
from nose.tools import *
from lctools import config
class TestConfig(object):
test_filename = "bebebe"
def setup(self):
fd = open(self.test_filename, 'w')
fd.write("[default]\n")
fd.write("foo = bar\n")
fd.close()
os.chmod(self.test_filename, stat.S_IRUSR)
def test_basic_functionality(self):
config.LC_CONFIG = self.test_filename
conf = config.get_config("default")
assert_true("default" in conf.sections())
assert_equal(conf.get("foo"), "bar")
@raises(RuntimeError)
def test_get_config_permission_checks(self):
os.chmod(self.test_filename, stat.S_IRWXG | stat.S_IRWXO)
config.LC_CONFIG = self.test_filename
config.get_config("default")
def teardown(self):
os.unlink(self.test_filename)
| import os
import stat
from nose.tools import *
from lctools import config
class TestConfig(object):
test_filename = "bebebe"
def setup(self):
fd = open(self.test_filename, 'w')
fd.write("[default]\n")
fd.write("foo = bar\n")
fd.close()
os.chmod(self.test_filename, stat.S_IRUSR)
def test_basic_functionality(self):
config.LC_CONFIG = self.test_filename
conf = config.get_config("default")
assert_true("default" in conf.sections())
assert_equal(conf.get("foo"), "bar")
@raises(RuntimeError)
def test_get_config_permission_checks(self):
os.chmod(self.test_filename, stat.S_IRWXG | stat.S_IRWXO)
config.LC_CONFIG = self.test_filename
config.get_config("default")
def test_defaults(self):
config.LC_CONFIG = self.test_filename
conf = config.get_config("default")
print conf.get("verify_ssl_certs")
def teardown(self):
os.unlink(self.test_filename)
| Add a test for default config settings. | Add a test for default config settings.
| Python | apache-2.0 | novel/lc-tools,novel/lc-tools | ---
+++
@@ -27,5 +27,10 @@
config.LC_CONFIG = self.test_filename
config.get_config("default")
+ def test_defaults(self):
+ config.LC_CONFIG = self.test_filename
+ conf = config.get_config("default")
+ print conf.get("verify_ssl_certs")
+
def teardown(self):
os.unlink(self.test_filename) |
f6b4b16c26ee97d48ba524027a96d17fba63dc80 | project/models.py | project/models.py | import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
def __init__(self, email, password, paid=False, admin=False):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
| import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
def __init__(self, email, password, paid=False, admin=False):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
| Update user model with confirmed and confirmed_at | Update user model with confirmed and confirmed_at
| Python | mit | dylanshine/streamschool,dylanshine/streamschool | ---
+++
@@ -12,12 +12,16 @@
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
+ confirmed = db.Column(db.Boolean, nullable=False, default=False)
+ confirmed_on = db.Column(db.DateTime, nullable=True)
def __init__(self, email, password, paid=False, admin=False):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
+ self.confirmed = confirmed
+ self.confirmed_on = confirmed_on
def is_authenticated(self):
return True |
09333f89a7ce9dfda59401bc59d92a3def9ca80c | mycroft/formatters/formatter_plugin.py | mycroft/formatters/formatter_plugin.py | from enum import Enum, unique
from inspect import signature, isclass
from mycroft.plugin.base_plugin import BasePlugin
from mycroft.util import log
@unique
class Format(Enum):
speech = 1
text = 2
class FormatterPlugin(BasePlugin):
and_ = 'and'
def __init__(self, rt):
super().__init__(rt)
self.formatters = {
str: str,
int: str,
float: lambda x: '{:.2f}'.format(x),
list: self.format_list
}
def format_list(self, obj, fmt):
if len(obj) == 0:
return ''
if len(obj) == 1:
return self.format(obj[0], fmt)
return '{}, {} {}'.format(
', '.join(self.format(obj[:-1], fmt)), self.and_,
self.format(obj[-1], fmt)
)
def add(self, cls, formatter):
self.formatters[cls] = formatter
def format(self, obj, fmt=Format.speech):
handler = self.formatters.get(type(obj))
if not handler:
log.warning('No formatter for', type(obj))
return str(obj)
if isclass(handler) or len(signature(handler).parameters) == 1:
return handler(obj)
else:
return handler(obj, fmt)
| from enum import Enum, unique
from inspect import signature, isclass
from mycroft.plugin.base_plugin import BasePlugin
from mycroft.util import log
@unique
class Format(Enum):
speech = 1
text = 2
class FormatterPlugin(BasePlugin):
and_ = 'and'
def __init__(self, rt):
super().__init__(rt)
self.formatters = {
str: str,
int: str,
float: lambda x: '{:.2f}'.format(x),
list: self.format_list
}
def format_list(self, obj, fmt):
if len(obj) == 0:
return ''
if len(obj) == 1:
return self.format(obj[0], fmt)
return '{}, {} {}'.format(
', '.join(self.format(i, fmt) for i in obj[:-1]), self.and_,
self.format(obj[-1], fmt)
)
def add(self, cls, formatter):
self.formatters[cls] = formatter
def format(self, obj, fmt=Format.speech):
handler = self.formatters.get(type(obj))
if not handler:
log.warning('No formatter for', type(obj))
return str(obj)
if isclass(handler) or len(signature(handler).parameters) == 1:
return handler(obj)
else:
return handler(obj, fmt)
| Fix bug with list formatting | Fix bug with list formatting
Before it would completely fail to format any list of strings
| Python | apache-2.0 | MatthewScholefield/mycroft-simple,MatthewScholefield/mycroft-simple | ---
+++
@@ -29,7 +29,7 @@
if len(obj) == 1:
return self.format(obj[0], fmt)
return '{}, {} {}'.format(
- ', '.join(self.format(obj[:-1], fmt)), self.and_,
+ ', '.join(self.format(i, fmt) for i in obj[:-1]), self.and_,
self.format(obj[-1], fmt)
)
|
3ea7a61be81c0f2094d8b3b0d3a81dec267ac663 | GitSvnServer/client.py | GitSvnServer/client.py |
import parse
import generate as gen
from repos import find_repos
from errors import *
def parse_client_greeting(msg_str):
msg = parse.msg(msg_str)
proto_ver = int(msg[0])
client_caps = msg[1]
url = parse.string(msg[2])
print "ver: %d" % proto_ver
print "caps: %s" % client_caps
print "url: %s" % url
return proto_ver, client_caps, url
def connect(link):
link.send_msg(gen.success(2, 2,
gen.list('ANONYMOUS'),
gen.list('edit-pipeline', 'svndiff1')))
client_resp = link.read_msg()
ver, caps, url = parse_client_greeting(client_resp)
if ver != 2:
raise BadProtoVersion()
repos = find_repos(url)
if repos is None:
link.send_msg(gen.failure(gen.list(210005,
gen.string("No repository found in '%s'" %
url),
gen.string('message.py'), 0)))
return url, caps, repos
|
import parse
import generate as gen
from repos import find_repos
from errors import *
server_capabilities = [
'edit-pipeline', # This is required.
'svndiff1', # We support svndiff1
'absent-entries', # We support absent-dir and absent-dir editor commands
#'commit-revprops', # We don't currently have _any_ revprop support
#'mergeinfo', # Nope, not yet
#'depth', # Nope, not yet
]
def parse_client_greeting(msg_str):
msg = parse.msg(msg_str)
proto_ver = int(msg[0])
client_caps = msg[1]
url = parse.string(msg[2])
print "ver: %d" % proto_ver
print "caps: %s" % client_caps
print "url: %s" % url
return proto_ver, client_caps, url
def connect(link):
# Send the announce message - we only support protocol version 2.
link.send_msg(gen.success(2, 2, gen.list(), gen.list(*server_capabilities)))
client_resp = link.read_msg()
ver, caps, url = parse_client_greeting(client_resp)
if ver != 2:
raise BadProtoVersion()
repos = find_repos(url)
if repos is None:
link.send_msg(gen.failure(gen.list(210005,
gen.string("No repository found in '%s'" %
url),
gen.string('message.py'), 0)))
return url, caps, repos
| Sort out the server announce message | Sort out the server announce message
Tidy up the server announce message a bit. In particular, we might as
well announce the absent-entries capability - we support the commands
even if they currently aren't implemented.
| Python | bsd-3-clause | slonopotamus/git_svn_server | ---
+++
@@ -3,6 +3,15 @@
import generate as gen
from repos import find_repos
from errors import *
+
+server_capabilities = [
+ 'edit-pipeline', # This is required.
+ 'svndiff1', # We support svndiff1
+ 'absent-entries', # We support absent-dir and absent-dir editor commands
+ #'commit-revprops', # We don't currently have _any_ revprop support
+ #'mergeinfo', # Nope, not yet
+ #'depth', # Nope, not yet
+]
def parse_client_greeting(msg_str):
msg = parse.msg(msg_str)
@@ -18,9 +27,8 @@
return proto_ver, client_caps, url
def connect(link):
- link.send_msg(gen.success(2, 2,
- gen.list('ANONYMOUS'),
- gen.list('edit-pipeline', 'svndiff1')))
+ # Send the announce message - we only support protocol version 2.
+ link.send_msg(gen.success(2, 2, gen.list(), gen.list(*server_capabilities)))
client_resp = link.read_msg()
|
cbae1dafb07fda5afcd0f2573c81b6eeb08e6e20 | dependencies.py | dependencies.py | import os, pkgutil, site
if pkgutil.find_loader("gi"):
try:
import gi
print('Found gi:', os.path.abspath(gi.__file__))
gi.require_version('Gst', '1.0')
# from gi.repository import GLib, Gst
except ValueError:
print('Couldn\'t find Gst')
print('Please run \'sudo apt-get install gir1.2-gstreamer-1.0\'')
print('Environment seems to be ok.')
else:
print('No gi installed', '\n',
'Please run \'sudo apt-get install python3-gi\'',
'\n',
'A virtual environment might need extra actions like symlinking, ',
'\n',
'you might need to do a symlink looking similar to this:',
'\n',
'ln -s /usr/lib/python3/dist-packages/gi ',
'/srv/homeassistant/lib/python3.4/site-packages',
'\n',
'run this script inside and outside of the virtual environment to find the paths needed')
print(site.getsitepackages()) | import os
import pkgutil
import site
if pkgutil.find_loader("gi"):
try:
import gi
print('Found gi:', os.path.abspath(gi.__file__))
gi.require_version('Gst', '1.0')
# from gi.repository import GLib, Gst
except ValueError:
print('Couldn\'t find Gst')
print('Please run \'sudo apt-get install gir1.2-gstreamer-1.0\'')
return False
print('Environment seems to be ok.')
else:
print('No gi installed', '\n',
'Please run \'sudo apt-get install python3-gi\'',
'\n',
'A virtual environment might need extra actions like symlinking, ',
'\n',
'you might need to do a symlink looking similar to this:',
'\n',
'ln -s /usr/lib/python3/dist-packages/gi ',
'/srv/homeassistant/lib/python3.4/site-packages',
'\n',
'run this script inside and outside of the virtual environment to find the paths needed')
print(site.getsitepackages()) | Exit program if exception is raised | Exit program if exception is raised
| Python | mit | Kane610/axis | ---
+++
@@ -1,4 +1,6 @@
-import os, pkgutil, site
+import os
+import pkgutil
+import site
if pkgutil.find_loader("gi"):
try:
@@ -9,6 +11,7 @@
except ValueError:
print('Couldn\'t find Gst')
print('Please run \'sudo apt-get install gir1.2-gstreamer-1.0\'')
+ return False
print('Environment seems to be ok.')
else:
print('No gi installed', '\n', |
20547b8cb6d530be7a41e1a49cb526dbbaab8330 | override_settings/tests.py | override_settings/tests.py | from django.conf import settings
from django.test import TestCase
from override_settings import (
override_settings, SETTING_DELETED, with_apps, without_apps)
@override_settings(FOO="abc")
class TestOverrideSettingsDecoratedClass(TestCase):
"""
Provide a decorated class.
"""
def test_override_settings_class_decorator(self):
"""
Settings overwritten at the class level are available in each method.
"""
self.assertEqual(settings.FOO, "abc")
@override_settings(FOO="abc123")
def test_override_settings_method_decorator(self):
"""
Method level overrides overwrite class level overrides.
"""
self.assertEqual(settings.FOO, "abc123")
@override_settings(FOO="321")
def test_override_settings_context_manager(self):
"""
Context managers overwrite method and class level decorations.
"""
with override_settings(FOO="xyz"):
self.assertEqual(settings.FOO, "xyz")
class TestOverrideSettingsUndecoratedClass(TestCase):
"""
Provide an undecorated class.
"""
@override_settings(FOO="123")
def test_override_settings_on_a_method(self):
"""
Override settings can be used for an individual method.
"""
self.assertEqual(settings.FOO, "123")
def test_override_settings_as_context_manager(self):
"""
Can use override_settings as a context manager.
"""
with override_settings(FOO="321"):
self.assertEqual(settings.FOO, "321")
class TestAppModifiers(TestCase):
@with_apps('django.contrib.sites')
def test_with_apps(self):
"""
The `with_apps` decorator adds apps to INSTALLED_APPS.
"""
self.assertTrue('django.contrib.sites' in settings.INSTALLED_APPS)
@without_apps("django.contrib.sites")
def test_without_apps(self):
"""
The `without_apps` decorator removes apps from INSTALLED_APPS.
"""
self.assertFalse('django.contrib.sites' in settings.INSTALLED_APPS)
class TestSettingDeleted(TestCase):
def test_dummy_option_exists(self):
"""
Deleted options should return after the context manager is finished.
"""
self.assertEqual(settings.DUMMY_OPTION, 42)
with override_settings(DUMMY_OPTION=SETTING_DELETED):
self.assertRaises(AttributeError, lambda: settings.DUMMY_OPTION)
self.assertEqual(settings.DUMMY_OPTION, 42)
@override_settings(DUMMY_OPTION=SETTING_DELETED)
def test_delete_dummy_option(self):
"""
Can delete settings at the method level.
"""
self.assertRaises(AttributeError, lambda: settings.DUMMY_OPTION)
def test_dummy_option_exists_after_method_level_delete(self):
"""
Make sure the option returns after deleting it at the method level.
"""
self.assertEqual(settings.DUMMY_OPTION, 42)
| Write a full test suite for override_settings | Write a full test suite for override_settings
| Python | bsd-3-clause | edavis/django-override-settings | ---
+++
@@ -0,0 +1,88 @@
+from django.conf import settings
+from django.test import TestCase
+from override_settings import (
+ override_settings, SETTING_DELETED, with_apps, without_apps)
+
+@override_settings(FOO="abc")
+class TestOverrideSettingsDecoratedClass(TestCase):
+ """
+ Provide a decorated class.
+ """
+ def test_override_settings_class_decorator(self):
+ """
+ Settings overwritten at the class level are available in each method.
+ """
+ self.assertEqual(settings.FOO, "abc")
+
+ @override_settings(FOO="abc123")
+ def test_override_settings_method_decorator(self):
+ """
+ Method level overrides overwrite class level overrides.
+ """
+ self.assertEqual(settings.FOO, "abc123")
+
+ @override_settings(FOO="321")
+ def test_override_settings_context_manager(self):
+ """
+ Context managers overwrite method and class level decorations.
+ """
+ with override_settings(FOO="xyz"):
+ self.assertEqual(settings.FOO, "xyz")
+
+class TestOverrideSettingsUndecoratedClass(TestCase):
+ """
+ Provide an undecorated class.
+ """
+ @override_settings(FOO="123")
+ def test_override_settings_on_a_method(self):
+ """
+ Override settings can be used for an individual method.
+ """
+ self.assertEqual(settings.FOO, "123")
+
+ def test_override_settings_as_context_manager(self):
+ """
+ Can use override_settings as a context manager.
+ """
+ with override_settings(FOO="321"):
+ self.assertEqual(settings.FOO, "321")
+
+class TestAppModifiers(TestCase):
+ @with_apps('django.contrib.sites')
+ def test_with_apps(self):
+ """
+ The `with_apps` decorator adds apps to INSTALLED_APPS.
+ """
+ self.assertTrue('django.contrib.sites' in settings.INSTALLED_APPS)
+
+ @without_apps("django.contrib.sites")
+ def test_without_apps(self):
+ """
+ The `without_apps` decorator removes apps from INSTALLED_APPS.
+ """
+ self.assertFalse('django.contrib.sites' in settings.INSTALLED_APPS)
+
+class TestSettingDeleted(TestCase):
+ def test_dummy_option_exists(self):
+ """
+ Deleted options should return after the context manager is finished.
+ """
+ self.assertEqual(settings.DUMMY_OPTION, 42)
+
+ with override_settings(DUMMY_OPTION=SETTING_DELETED):
+ self.assertRaises(AttributeError, lambda: settings.DUMMY_OPTION)
+
+ self.assertEqual(settings.DUMMY_OPTION, 42)
+
+ @override_settings(DUMMY_OPTION=SETTING_DELETED)
+ def test_delete_dummy_option(self):
+ """
+ Can delete settings at the method level.
+ """
+ self.assertRaises(AttributeError, lambda: settings.DUMMY_OPTION)
+
+ def test_dummy_option_exists_after_method_level_delete(self):
+ """
+ Make sure the option returns after deleting it at the method level.
+ """
+ self.assertEqual(settings.DUMMY_OPTION, 42) | |
463ff7bc6571a60c79795992cca9ae40e03dd681 | gmn/src/d1_gmn/app/context_processors.py | gmn/src/d1_gmn/app/context_processors.py | # -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Django template context processors
Before rendering a template, Django calls context processors as set up in
settings_default.TEMPLATE_CONTEXT_PROCESSORS. The context processors are
functions that are expected to return a dict which will be merged into the
environment available to the template.
"""
import django.conf
def global_settings(request):
"""Expose some values from settings.py to templates
"""
return {
'BASE_URL': django.conf.settings.BASE_URL,
}
| # -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Django template context processors
Before rendering a template, Django calls context processors as set up in
settings_default.TEMPLATE_CONTEXT_PROCESSORS. The context processors are
functions that are expected to return a dict which will be merged into the
environment available to the template.
"""
import django.conf
def global_settings(request):
"""Expose some values from settings.py to templates
"""
return {
'BASE_URL': django.conf.settings.BASE_URL,
}
| Add dynamic links to GMN home page from 404 and 500 HTML templates | Add dynamic links to GMN home page from 404 and 500 HTML templates
| Python | apache-2.0 | DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python | ---
+++
@@ -34,4 +34,3 @@
return {
'BASE_URL': django.conf.settings.BASE_URL,
}
- |
428c62ed4b10ba5714e2a0b718cd3f52e0376bc1 | feedhq/feeds/tasks.py | feedhq/feeds/tasks.py | from django.conf import settings
from django.db import connection
from ..tasks import raven
@raven
def update_feed(feed_url, use_etags=True):
from .models import UniqueFeed
UniqueFeed.objects.update_feed(feed_url, use_etags)
close_connection()
@raven
def read_later(entry_pk):
from .models import Entry # circular imports
Entry.objects.get(pk=entry_pk).read_later()
close_connection()
@raven
def update_unique_feed(feed_url):
from .models import UniqueFeed, Feed
feed, created = UniqueFeed.objects.get_or_create(
url=feed_url,
defaults={'subscribers': 1},
)
if not created:
feed.subscribers = Feed.objects.filter(url=feed_url).count()
feed.save()
def close_connection():
"""Close the connection only if not in eager mode"""
if hasattr(settings, 'RQ'):
if not settings.RQ.get('eager', True):
connection.close()
| from django.conf import settings
from django.db import connection
from ..tasks import raven
@raven
def update_feed(feed_url, use_etags=True):
from .models import UniqueFeed
UniqueFeed.objects.update_feed(feed_url, use_etags)
close_connection()
@raven
def read_later(entry_pk):
from .models import Entry # circular imports
Entry.objects.get(pk=entry_pk).read_later()
close_connection()
@raven
def update_unique_feed(feed_url):
from .models import UniqueFeed, Feed, Favicon
feed, created = UniqueFeed.objects.get_or_create(
url=feed_url,
defaults={'subscribers': 1},
)
if not created:
feed.subscribers = Feed.objects.filter(url=feed_url).count()
feed.save()
Favicon.objects.update_favicon(feed.link)
def close_connection():
"""Close the connection only if not in eager mode"""
if hasattr(settings, 'RQ'):
if not settings.RQ.get('eager', True):
connection.close()
| Update favicon on UniqueFeed update | Update favicon on UniqueFeed update
| Python | bsd-3-clause | rmoorman/feedhq,feedhq/feedhq,vincentbernat/feedhq,rmoorman/feedhq,rmoorman/feedhq,vincentbernat/feedhq,feedhq/feedhq,feedhq/feedhq,rmoorman/feedhq,feedhq/feedhq,feedhq/feedhq,vincentbernat/feedhq,vincentbernat/feedhq,rmoorman/feedhq,vincentbernat/feedhq | ---
+++
@@ -20,7 +20,7 @@
@raven
def update_unique_feed(feed_url):
- from .models import UniqueFeed, Feed
+ from .models import UniqueFeed, Feed, Favicon
feed, created = UniqueFeed.objects.get_or_create(
url=feed_url,
defaults={'subscribers': 1},
@@ -28,6 +28,7 @@
if not created:
feed.subscribers = Feed.objects.filter(url=feed_url).count()
feed.save()
+ Favicon.objects.update_favicon(feed.link)
def close_connection(): |
9dc90727df23e655e5c921ca84cb98b7d5ae5eb2 | example_game.py | example_game.py | from pygametemplate import Game
class ExampleGame(Game):
def logic(self):
pass
def draw(self):
pass
def quit(self):
pass
| from pygametemplate import Game
class ExampleGame(Game):
def logic(self):
pass
def draw(self):
pass
| Remove now unnecessary quit() method from ExampleGame | Remove now unnecessary quit() method from ExampleGame
| Python | mit | AndyDeany/pygame-template | ---
+++
@@ -8,6 +8,3 @@
def draw(self):
pass
-
- def quit(self):
- pass |
ee884a9cbaaaf7693e8d980d26cca480b9d1291e | app/models/__init__.py | app/models/__init__.py | """
Initialisation file for models directory.
The application SQLite database model is setup in SQLObject.
The db model structure is:
* Place
- contains records of all Places
* Supername -> Continent -> Country -> Town
- These tables are linked to each other in a hiearchy such that a
Supername has Continents, which have Countries, which have Towns.
- These all inherit from Place.
- Every record in one of these tables has a record in Place table
with the same ID.
* Trend
- contains a trend record for a specific time and space. Each record
has a foreign key to map it to a Place record, derived from the
Trend's WOEID value in the API.
This approach makes it easy to always map a Trend record to the same
table (Place) instead of many, while still allowing easy seperation of
Place types in the Place-related tables.
e.g. show all Places
e.g. show all from Countries table and count of its Towns we have mapped
to it.
e.g. show Towns which are in Asia
"""
# Create __all__ list using values set in other application files.
from places import __all__ as p
from trends import __all__ as t
from cronJobs import __all__ as c
__all__ = p + t + c
# Make objects available on models module.
from places import *
from trends import *
from cronJobs import *
| """
Initialisation file for models directory.
"""
# Create an _`_all__` list here, using values set in other application files.
from .places import __all__ as placesModel
from .trends import __all__ as trendsModel
from .tweets import __all__ as tweetsModel
from .cronJobs import __all__ as cronJobsModel
__all__ = placesModel + trendsModel + tweetsModel + cronJobsModel
# Make table objects available on models module.
from .places import *
from .trends import *
from .tweets import *
from .cronJobs import *
| Add tweets model to models init file, for db setup to see it. | Add tweets model to models init file, for db setup to see it.
| Python | mit | MichaelCurrin/twitterverse,MichaelCurrin/twitterverse | ---
+++
@@ -1,37 +1,15 @@
"""
Initialisation file for models directory.
+"""
+# Create an _`_all__` list here, using values set in other application files.
+from .places import __all__ as placesModel
+from .trends import __all__ as trendsModel
+from .tweets import __all__ as tweetsModel
+from .cronJobs import __all__ as cronJobsModel
+__all__ = placesModel + trendsModel + tweetsModel + cronJobsModel
-The application SQLite database model is setup in SQLObject.
-
-The db model structure is:
- * Place
- - contains records of all Places
- * Supername -> Continent -> Country -> Town
- - These tables are linked to each other in a hiearchy such that a
- Supername has Continents, which have Countries, which have Towns.
- - These all inherit from Place.
- - Every record in one of these tables has a record in Place table
- with the same ID.
- * Trend
- - contains a trend record for a specific time and space. Each record
- has a foreign key to map it to a Place record, derived from the
- Trend's WOEID value in the API.
-
-This approach makes it easy to always map a Trend record to the same
-table (Place) instead of many, while still allowing easy seperation of
-Place types in the Place-related tables.
- e.g. show all Places
- e.g. show all from Countries table and count of its Towns we have mapped
- to it.
- e.g. show Towns which are in Asia
-"""
-# Create __all__ list using values set in other application files.
-from places import __all__ as p
-from trends import __all__ as t
-from cronJobs import __all__ as c
-__all__ = p + t + c
-
-# Make objects available on models module.
-from places import *
-from trends import *
-from cronJobs import *
+# Make table objects available on models module.
+from .places import *
+from .trends import *
+from .tweets import *
+from .cronJobs import * |
2352f18400d5b4b36052e04804bd04e32b000cc4 | streak-podium/render.py | streak-podium/render.py | import pygal
def horizontal_bar(sorted_streaks, sort_attrib):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort_attrib.
"""
users = [user for user, _ in sorted_streaks][::-1]
streaks = [getattr(streak, sort_attrib) for _, streak in sorted_streaks][::-1]
chart = pygal.HorizontalStackedBar(show_legend=False,
print_values=True, print_zeroes=False,
print_labels=True)
chart.title = 'Top contributors by {} streak'.format(sort_attrib)
chart.x_labels = users
values = []
for value, user in zip(streaks, users):
if value > 0:
values.append({'value': value, 'label': user})
else:
values.append(0)
chart.add('Streaks', values)
chart.render_to_file('top.svg')
| import pygal
def horizontal_bar(sorted_streaks, sort_attrib):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort_attrib.
"""
users = [user for user, _ in sorted_streaks][::-1]
streaks = [getattr(streak, sort_attrib) for _, streak in sorted_streaks][::-1]
chart = pygal.HorizontalStackedBar(show_y_labels=False,
show_x_labels=False,
show_legend=False,
print_values=True,
print_zeroes=False,
print_labels=True)
chart.title = 'Top contributors by {} streak'.format(sort_attrib)
chart.x_labels = users
values = []
for value, user in zip(streaks, users):
if value > 0:
values.append({
'value': value,
'label': user,
'xlink': 'https://github.com/{}'.format(user)
})
else:
values.append(0) # Let zeroes be boring
chart.add('Streaks', values)
chart.render_to_file('top.svg')
| Add links to profiles and clean up chart options | Add links to profiles and clean up chart options
| Python | mit | jollyra/hubot-commit-streak,supermitch/streak-podium,supermitch/streak-podium,jollyra/hubot-commit-streak,jollyra/hubot-streak-podium,jollyra/hubot-streak-podium | ---
+++
@@ -10,8 +10,11 @@
users = [user for user, _ in sorted_streaks][::-1]
streaks = [getattr(streak, sort_attrib) for _, streak in sorted_streaks][::-1]
- chart = pygal.HorizontalStackedBar(show_legend=False,
- print_values=True, print_zeroes=False,
+ chart = pygal.HorizontalStackedBar(show_y_labels=False,
+ show_x_labels=False,
+ show_legend=False,
+ print_values=True,
+ print_zeroes=False,
print_labels=True)
chart.title = 'Top contributors by {} streak'.format(sort_attrib)
chart.x_labels = users
@@ -19,9 +22,13 @@
values = []
for value, user in zip(streaks, users):
if value > 0:
- values.append({'value': value, 'label': user})
+ values.append({
+ 'value': value,
+ 'label': user,
+ 'xlink': 'https://github.com/{}'.format(user)
+ })
else:
- values.append(0)
+ values.append(0) # Let zeroes be boring
chart.add('Streaks', values)
chart.render_to_file('top.svg') |
e02dabde2ea898847ec61cc966e29a52e27f71cd | example_storage.py | example_storage.py | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pprint import pprint
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver
CloudFiles = get_driver(Provider.CLOUDFILES)
driver = CloudFiles('access key id', 'secret key')
containers = driver.list_containers()
container_objects = driver.list_container_objects(containers[0])
pprint(containers)
pprint(container_objects)
| # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pprint import pprint
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver
CloudFiles = get_driver(Provider.CLOUDFILES_UK)
driver = CloudFiles('access key id', 'secret key')
containers = driver.list_containers()
container_objects = driver.list_container_objects(containers[0])
pprint(containers)
pprint(container_objects)
| Fix a broken constant name, should be CLOUDFILES_UK. | Fix a broken constant name, should be CLOUDFILES_UK.
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@1101075 13f79535-47bb-0310-9956-ffa450edef68
| Python | apache-2.0 | techhat/libcloud,ninefold/libcloud,smaffulli/libcloud,wrigri/libcloud,lochiiconnectivity/libcloud,lochiiconnectivity/libcloud,Kami/libcloud,StackPointCloud/libcloud,schaubl/libcloud,mistio/libcloud,briancurtin/libcloud,briancurtin/libcloud,Cloud-Elasticity-Services/as-libcloud,SecurityCompass/libcloud,wrigri/libcloud,sfriesel/libcloud,mgogoulos/libcloud,erjohnso/libcloud,smaffulli/libcloud,iPlantCollaborativeOpenSource/libcloud,jimbobhickville/libcloud,marcinzaremba/libcloud,aleGpereira/libcloud,thesquelched/libcloud,munkiat/libcloud,sahildua2305/libcloud,sahildua2305/libcloud,atsaki/libcloud,ZuluPro/libcloud,t-tran/libcloud,Keisuke69/libcloud,cloudControl/libcloud,pantheon-systems/libcloud,curoverse/libcloud,samuelchong/libcloud,mtekel/libcloud,carletes/libcloud,mtekel/libcloud,ninefold/libcloud,pantheon-systems/libcloud,supertom/libcloud,ZuluPro/libcloud,wido/libcloud,sfriesel/libcloud,ZuluPro/libcloud,vongazman/libcloud,jerryblakley/libcloud,mbrukman/libcloud,schaubl/libcloud,illfelder/libcloud,ByteInternet/libcloud,Scalr/libcloud,jimbobhickville/libcloud,niteoweb/libcloud,techhat/libcloud,carletes/libcloud,ByteInternet/libcloud,Keisuke69/libcloud,wuyuewen/libcloud,Kami/libcloud,niteoweb/libcloud,sahildua2305/libcloud,kater169/libcloud,mgogoulos/libcloud,StackPointCloud/libcloud,Kami/libcloud,jerryblakley/libcloud,andrewsomething/libcloud,erjohnso/libcloud,watermelo/libcloud,aleGpereira/libcloud,Scalr/libcloud,briancurtin/libcloud,Jc2k/libcloud,pantheon-systems/libcloud,apache/libcloud,MrBasset/libcloud,apache/libcloud,JamesGuthrie/libcloud,illfelder/libcloud,niteoweb/libcloud,Cloud-Elasticity-Services/as-libcloud,NexusIS/libcloud,sgammon/libcloud,mathspace/libcloud,iPlantCollaborativeOpenSource/libcloud,supertom/libcloud,kater169/libcloud,JamesGuthrie/libcloud,t-tran/libcloud,wido/libcloud,samuelchong/libcloud,mtekel/libcloud,apache/libcloud,MrBasset/libcloud,Itxaka/libcloud,ClusterHQ/libcloud,iPlantCollaborativeOpenSource/libcloud,kater169/libcloud,MrBasset/libcloud,ByteInternet/libcloud,atsaki/libcloud,samuelchong/libcloud,t-tran/libcloud,Itxaka/libcloud,lochiiconnectivity/libcloud,pquentin/libcloud,vongazman/libcloud,NexusIS/libcloud,ClusterHQ/libcloud,Verizon/libcloud,wuyuewen/libcloud,cloudControl/libcloud,aviweit/libcloud,Itxaka/libcloud,mistio/libcloud,carletes/libcloud,Scalr/libcloud,aleGpereira/libcloud,sergiorua/libcloud,watermelo/libcloud,cryptickp/libcloud,watermelo/libcloud,Verizon/libcloud,mathspace/libcloud,Jc2k/libcloud,mathspace/libcloud,SecurityCompass/libcloud,DimensionDataCBUSydney/libcloud,mbrukman/libcloud,JamesGuthrie/libcloud,cryptickp/libcloud,Verizon/libcloud,sgammon/libcloud,curoverse/libcloud,mbrukman/libcloud,wuyuewen/libcloud,DimensionDataCBUSydney/libcloud,dcorbacho/libcloud,pquentin/libcloud,vongazman/libcloud,DimensionDataCBUSydney/libcloud,sergiorua/libcloud,mgogoulos/libcloud,Cloud-Elasticity-Services/as-libcloud,mistio/libcloud,curoverse/libcloud,marcinzaremba/libcloud,munkiat/libcloud,wrigri/libcloud,wido/libcloud,supertom/libcloud,illfelder/libcloud,cloudControl/libcloud,dcorbacho/libcloud,munkiat/libcloud,andrewsomething/libcloud,schaubl/libcloud,andrewsomething/libcloud,jerryblakley/libcloud,thesquelched/libcloud,aviweit/libcloud,cryptickp/libcloud,SecurityCompass/libcloud,erjohnso/libcloud,marcinzaremba/libcloud,aviweit/libcloud,NexusIS/libcloud,jimbobhickville/libcloud,sfriesel/libcloud,smaffulli/libcloud,dcorbacho/libcloud,techhat/libcloud,StackPointCloud/libcloud,atsaki/libcloud,thesquelched/libcloud,sergiorua/libcloud,pquentin/libcloud | ---
+++
@@ -18,7 +18,7 @@
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver
-CloudFiles = get_driver(Provider.CLOUDFILES)
+CloudFiles = get_driver(Provider.CLOUDFILES_UK)
driver = CloudFiles('access key id', 'secret key')
|
08331a081713f880d5eca4fb7b18f4c61e360132 | tests/skipif_markers.py | tests/skipif_markers.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
skipif_markers
--------------
Contains pytest skipif markers to be used in the suite.
"""
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
# For some reason pytest incorrectly uses the first reason text regardless of
# which condition matches. Using a unified message for now
# travis_reason = 'Works locally with tox but fails on Travis.'
# no_network_reason = 'Needs a network connection to GitHub.'
reason = (
'Fails on Travis or else there is no network connection to '
'GitHub/Bitbucket.'
)
skipif_travis = pytest.mark.skipif(travis, reason=reason)
skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
skipif_markers
--------------
Contains pytest skipif markers to be used in the suite.
"""
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
skipif_travis = pytest.mark.skipif(
travis, reason='Works locally with tox but fails on Travis.'
)
skipif_no_network = pytest.mark.skipif(
no_network, reason='Needs a network connection to GitHub/Bitbucket.'
)
| Revert skipif markers to use correct reasons (bug fixed in pytest) | Revert skipif markers to use correct reasons (bug fixed in pytest)
| Python | bsd-3-clause | hackebrot/cookiecutter,michaeljoseph/cookiecutter,willingc/cookiecutter,stevepiercy/cookiecutter,pjbull/cookiecutter,stevepiercy/cookiecutter,audreyr/cookiecutter,terryjbates/cookiecutter,luzfcb/cookiecutter,dajose/cookiecutter,dajose/cookiecutter,michaeljoseph/cookiecutter,Springerle/cookiecutter,terryjbates/cookiecutter,willingc/cookiecutter,pjbull/cookiecutter,luzfcb/cookiecutter,audreyr/cookiecutter,Springerle/cookiecutter,hackebrot/cookiecutter | ---
+++
@@ -26,14 +26,10 @@
else:
no_network = True
-# For some reason pytest incorrectly uses the first reason text regardless of
-# which condition matches. Using a unified message for now
-# travis_reason = 'Works locally with tox but fails on Travis.'
-# no_network_reason = 'Needs a network connection to GitHub.'
-reason = (
- 'Fails on Travis or else there is no network connection to '
- 'GitHub/Bitbucket.'
+skipif_travis = pytest.mark.skipif(
+ travis, reason='Works locally with tox but fails on Travis.'
)
-skipif_travis = pytest.mark.skipif(travis, reason=reason)
-skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
+skipif_no_network = pytest.mark.skipif(
+ no_network, reason='Needs a network connection to GitHub/Bitbucket.'
+) |
022bbf819b3c4a14ade4100102d251eceb84c637 | tests/test_bijection.py | tests/test_bijection.py | """Test bijection class."""
import pytest
from collections_extended.bijection import bijection
def test_bijection():
"""General tests for bijection."""
b = bijection()
assert len(b) == 0
b['a'] = 1
assert len(b) == 1
assert b['a'] == 1
assert b.inverse[1] == 'a'
assert 'a' in b
assert 1 not in b
assert 1 in b.inverse
with pytest.raises(KeyError):
del b['f']
assert b == bijection(a=1)
assert b.inverse.inverse is b
assert b == b.copy()
del b['a']
assert b == bijection()
assert bijection(a=1, b=2, c=3) == bijection({'a': 1, 'b': 2, 'c': 3})
b['a'] = 1
b.inverse[1] = 'b'
assert 'b' in b
assert b['b'] == 1
assert 'a' not in b
| """Test bijection class."""
import pytest
from collections_extended.bijection import bijection
def test_bijection():
"""General tests for bijection."""
b = bijection()
assert len(b) == 0
b['a'] = 1
assert len(b) == 1
assert b['a'] == 1
assert b.inverse[1] == 'a'
assert 'a' in b
assert 1 not in b
assert 1 in b.inverse
with pytest.raises(KeyError):
del b['f']
assert b == bijection(a=1)
assert b.inverse.inverse is b
assert b == b.copy()
del b['a']
assert b == bijection()
assert bijection(a=1, b=2, c=3) == bijection({'a': 1, 'b': 2, 'c': 3})
b['a'] = 1
b.inverse[1] = 'b'
assert 'b' in b
assert b['b'] == 1
assert 'a' not in b
def test_init_from_pairs():
assert bijection({'a': 1, 'b': 2}) == bijection((('a', 1), ('b', 2)))
| Add test for bijection init from list of pairs | Add test for bijection init from list of pairs
| Python | apache-2.0 | mlenzen/collections-extended | ---
+++
@@ -28,3 +28,7 @@
assert 'b' in b
assert b['b'] == 1
assert 'a' not in b
+
+
+def test_init_from_pairs():
+ assert bijection({'a': 1, 'b': 2}) == bijection((('a', 1), ('b', 2))) |
7f9a31a03e68e1d9dc6f420c6aa157e657da4157 | apps/core/templatetags/files.py | apps/core/templatetags/files.py | from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
"""Removes traceback lines from a string (if any). It has no effect when
no 'Traceback' pattern has been found.
Returns: raws before the 'Traceback' pattern
"""
return Path(path).name
| from pathlib import Path
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def filename(path):
"""Removes parent path from a relative or absolute filename
Returns: the filename
"""
return Path(path).name
| Fix filename template tag docstring | Fix filename template tag docstring
| Python | bsd-3-clause | Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel | ---
+++
@@ -9,9 +9,8 @@
@register.filter
@stringfilter
def filename(path):
- """Removes traceback lines from a string (if any). It has no effect when
- no 'Traceback' pattern has been found.
+ """Removes parent path from a relative or absolute filename
- Returns: raws before the 'Traceback' pattern
+ Returns: the filename
"""
return Path(path).name |
746df42ff459c52690a5cf8c786a6d91edee7151 | heroku_settings.py | heroku_settings.py | import os
DEBUG = True
ASSETS_DEBUG = True
# GRANO_HOST = 'http://localhost:5000'
# GRANO_APIKEY = '7a65f180d7b898822'
# GRANO_PROJECT = 'kompromatron_C'
GRANO_HOST = os.environ.get('GRANO_HOST', 'http://beta.grano.cc/')
GRANO_APIKEY = os.environ.get('GRANO_APIKEY')
GRANO_PROJECT = os.environ.get('GRANO_PROJECT', 'kompromatron')
| import os
DEBUG = os.environ.get('DEBUG', 'False') == 'True'
ASSETS_DEBUG = os.environ.get('ASSET_DEBUG', 'False') == 'True'
# GRANO_HOST = 'http://localhost:5000'
# GRANO_APIKEY = '7a65f180d7b898822'
# GRANO_PROJECT = 'kompromatron_C'
GRANO_HOST = os.environ.get('GRANO_HOST', 'http://beta.grano.cc/')
GRANO_APIKEY = os.environ.get('GRANO_APIKEY')
GRANO_PROJECT = os.environ.get('GRANO_PROJECT', 'kompromatron')
| Make heroku debug settings environ dependent | Make heroku debug settings environ dependent | Python | mit | pudo/kompromatron,pudo/kompromatron | ---
+++
@@ -1,7 +1,7 @@
import os
-DEBUG = True
-ASSETS_DEBUG = True
+DEBUG = os.environ.get('DEBUG', 'False') == 'True'
+ASSETS_DEBUG = os.environ.get('ASSET_DEBUG', 'False') == 'True'
# GRANO_HOST = 'http://localhost:5000'
# GRANO_APIKEY = '7a65f180d7b898822' |
8a5d111b5c77ae9f7478dd7e73eca292c441d3fa | website_event_excerpt_img/__openerp__.py | website_event_excerpt_img/__openerp__.py | # -*- coding: utf-8 -*-
# © 2016 Antiun Ingeniería S.L. - Jairo Llopis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Excerpt + Image in Events",
"summary": "New layout for event summary, including an excerpt and image",
"version": "8.0.1.0.0",
"category": "Website",
"website": "http://www.antiun.com",
"author": "Antiun Ingeniería S.L., Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"website_event",
"html_imgs",
"html_text",
],
"data": [
"views/assets.xml",
"views/event.xml",
"views/event_event_view.xml",
],
"images": [
"images/frontend.png",
"images/backend.png",
"images/customize.png",
],
}
| # -*- coding: utf-8 -*-
# © 2016 Antiun Ingeniería S.L. - Jairo Llopis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Excerpt + Image in Events",
"summary": "New layout for event summary, including an excerpt and image",
"version": "8.0.1.0.0",
"category": "Website",
"website": "http://www.antiun.com",
"author": "Antiun Ingeniería S.L., Odoo Community Association (OCA)",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"website_event",
"html_image_url_extractor",
"html_text",
],
"data": [
"views/assets.xml",
"views/event.xml",
"views/event_event_view.xml",
],
"images": [
"images/frontend.png",
"images/backend.png",
"images/customize.png",
],
}
| Update module name as changed in last module version. | Update module name as changed in last module version.
| Python | agpl-3.0 | open-synergy/event,open-synergy/event | ---
+++
@@ -13,7 +13,7 @@
"installable": True,
"depends": [
"website_event",
- "html_imgs",
+ "html_image_url_extractor",
"html_text",
],
"data": [ |
52c359c1348b9c21f7c47917d024d7c161652b43 | webapp/thing_test.py | webapp/thing_test.py | #!/usr/bin/env python
from thing import PiThing
# Instantiate a PiThing
pi_thing = PiThing()
# Get the current switch state
switch = pi_thing.read_switch()
print('Switch: {0}'.format(switch))
| #!/usr/bin/env python
from thing import PiThing
# Instantiate a PiThing
pi_thing = PiThing()
# Get the current switch state
switch = pi_thing.read_switch()
print('Switch: {0}'.format(switch))
# Blink the LED forever.
print('Blinking LED (Ctrl-C to stop)...')
while True:
pi_thing.set_led(True)
time.sleep(0.5)
pi_thing.set_led(False)
time.sleep(0.5)
| Add blink LED. TODO: Test on raspberry pi hardware. | Add blink LED. TODO: Test on raspberry pi hardware.
| Python | mit | beepscore/pi_thing,beepscore/pi_thing,beepscore/pi_thing | ---
+++
@@ -9,3 +9,11 @@
# Get the current switch state
switch = pi_thing.read_switch()
print('Switch: {0}'.format(switch))
+
+# Blink the LED forever.
+print('Blinking LED (Ctrl-C to stop)...')
+while True:
+ pi_thing.set_led(True)
+ time.sleep(0.5)
+ pi_thing.set_led(False)
+ time.sleep(0.5) |
ba6c2ba95f4d0ab8a6c153a617aa5d1c789318a5 | numpy/distutils/command/install.py | numpy/distutils/command/install.py |
from distutils.command.install import *
from distutils.command.install import install as old_install
class install(old_install):
def finalize_options (self):
old_install.finalize_options(self)
self.install_lib = self.install_libbase
|
import os
from distutils.command.install import *
from distutils.command.install import install as old_install
from distutils.file_util import write_file
class install(old_install):
def finalize_options (self):
old_install.finalize_options(self)
self.install_lib = self.install_libbase
def run(self):
r = old_install.run(self)
if self.record:
# bdist_rpm fails when INSTALLED_FILES contains
# paths with spaces. Such paths must be enclosed
# with double-quotes.
f = open(self.record,'r')
lines = []
need_rewrite = False
for l in f.readlines():
l = l.rstrip()
if ' ' in l:
need_rewrite = True
l = '"%s"' % (l)
lines.append(l)
f.close()
if need_rewrite:
self.execute(write_file,
(self.record, lines),
"re-writing list of installed files to '%s'" %
self.record)
return r
| Fix bdist_rpm for path names containing spaces. | Fix bdist_rpm for path names containing spaces.
| Python | bsd-3-clause | empeeu/numpy,sonnyhu/numpy,dimasad/numpy,MaPePeR/numpy,tynn/numpy,dch312/numpy,stefanv/numpy,felipebetancur/numpy,ssanderson/numpy,MichaelAquilina/numpy,musically-ut/numpy,behzadnouri/numpy,ContinuumIO/numpy,madphysicist/numpy,mwiebe/numpy,stuarteberg/numpy,mwiebe/numpy,ChristopherHogan/numpy,rgommers/numpy,madphysicist/numpy,argriffing/numpy,seberg/numpy,drasmuss/numpy,pbrod/numpy,njase/numpy,hainm/numpy,stuarteberg/numpy,yiakwy/numpy,gmcastil/numpy,jankoslavic/numpy,skwbc/numpy,ChristopherHogan/numpy,dwf/numpy,nguyentu1602/numpy,bringingheavendown/numpy,gfyoung/numpy,BMJHayward/numpy,cowlicks/numpy,ChristopherHogan/numpy,pbrod/numpy,ChanderG/numpy,kiwifb/numpy,sonnyhu/numpy,ContinuumIO/numpy,numpy/numpy,ahaldane/numpy,shoyer/numpy,mhvk/numpy,maniteja123/numpy,dwf/numpy,mhvk/numpy,bertrand-l/numpy,ekalosak/numpy,mhvk/numpy,abalkin/numpy,Linkid/numpy,MichaelAquilina/numpy,embray/numpy,MSeifert04/numpy,grlee77/numpy,hainm/numpy,tdsmith/numpy,NextThought/pypy-numpy,WarrenWeckesser/numpy,astrofrog/numpy,ddasilva/numpy,sigma-random/numpy,maniteja123/numpy,larsmans/numpy,Srisai85/numpy,dch312/numpy,MaPePeR/numpy,Linkid/numpy,rudimeier/numpy,WarrenWeckesser/numpy,rhythmsosad/numpy,AustereCuriosity/numpy,pelson/numpy,leifdenby/numpy,Yusa95/numpy,grlee77/numpy,skymanaditya1/numpy,sinhrks/numpy,gmcastil/numpy,ChristopherHogan/numpy,Linkid/numpy,Yusa95/numpy,jschueller/numpy,grlee77/numpy,naritta/numpy,groutr/numpy,GrimDerp/numpy,dato-code/numpy,MSeifert04/numpy,jankoslavic/numpy,endolith/numpy,utke1/numpy,kiwifb/numpy,njase/numpy,mortada/numpy,bmorris3/numpy,dato-code/numpy,shoyer/numpy,brandon-rhodes/numpy,jorisvandenbossche/numpy,KaelChen/numpy,pizzathief/numpy,jschueller/numpy,tdsmith/numpy,sinhrks/numpy,rgommers/numpy,GaZ3ll3/numpy,immerrr/numpy,GrimDerp/numpy,pdebuyl/numpy,mingwpy/numpy,sigma-random/numpy,immerrr/numpy,charris/numpy,NextThought/pypy-numpy,joferkington/numpy,argriffing/numpy,seberg/numpy,mattip/numpy,sigma-random/numpy,ContinuumIO/numpy,gfyoung/numpy,yiakwy/numpy,b-carter/numpy,pdebuyl/numpy,jakirkham/numpy,mingwpy/numpy,ewmoore/numpy,dch312/numpy,skymanaditya1/numpy,dwillmer/numpy,ViralLeadership/numpy,stuarteberg/numpy,sinhrks/numpy,behzadnouri/numpy,simongibbons/numpy,naritta/numpy,ahaldane/numpy,rudimeier/numpy,abalkin/numpy,rgommers/numpy,MSeifert04/numpy,rajathkumarmp/numpy,pizzathief/numpy,BabeNovelty/numpy,numpy/numpy-refactor,tdsmith/numpy,bmorris3/numpy,mingwpy/numpy,larsmans/numpy,jonathanunderwood/numpy,Eric89GXL/numpy,grlee77/numpy,anntzer/numpy,charris/numpy,endolith/numpy,mattip/numpy,rherault-insa/numpy,simongibbons/numpy,stefanv/numpy,jonathanunderwood/numpy,GaZ3ll3/numpy,stefanv/numpy,ajdawson/numpy,pbrod/numpy,gfyoung/numpy,skymanaditya1/numpy,trankmichael/numpy,githubmlai/numpy,sigma-random/numpy,SiccarPoint/numpy,ajdawson/numpy,embray/numpy,rudimeier/numpy,rmcgibbo/numpy,dwf/numpy,andsor/numpy,BMJHayward/numpy,ewmoore/numpy,felipebetancur/numpy,numpy/numpy-refactor,NextThought/pypy-numpy,numpy/numpy-refactor,shoyer/numpy,mhvk/numpy,ahaldane/numpy,NextThought/pypy-numpy,ogrisel/numpy,ddasilva/numpy,rajathkumarmp/numpy,trankmichael/numpy,rherault-insa/numpy,sonnyhu/numpy,ewmoore/numpy,chiffa/numpy,numpy/numpy-refactor,skymanaditya1/numpy,ChanderG/numpy,yiakwy/numpy,endolith/numpy,pbrod/numpy,naritta/numpy,nbeaver/numpy,shoyer/numpy,musically-ut/numpy,has2k1/numpy,joferkington/numpy,githubmlai/numpy,dwf/numpy,bertrand-l/numpy,felipebetancur/numpy,WillieMaddox/numpy,rajathkumarmp/numpy,brandon-rhodes/numpy,trankmichael/numpy,hainm/numpy,pdebuyl/numpy,ChanderG/numpy,shoyer/numpy,pbrod/numpy,anntzer/numpy,pizzathief/numpy,MichaelAquilina/numpy,Yusa95/numpy,ewmoore/numpy,CMartelLML/numpy,WarrenWeckesser/numpy,WarrenWeckesser/numpy,anntzer/numpy,BabeNovelty/numpy,musically-ut/numpy,jorisvandenbossche/numpy,jorisvandenbossche/numpy,chatcannon/numpy,mathdd/numpy,bmorris3/numpy,mindw/numpy,matthew-brett/numpy,astrofrog/numpy,ewmoore/numpy,stuarteberg/numpy,dwillmer/numpy,b-carter/numpy,dato-code/numpy,embray/numpy,anntzer/numpy,solarjoe/numpy,andsor/numpy,SunghanKim/numpy,simongibbons/numpy,simongibbons/numpy,astrofrog/numpy,joferkington/numpy,pelson/numpy,tacaswell/numpy,ViralLeadership/numpy,pyparallel/numpy,ssanderson/numpy,ChanderG/numpy,CMartelLML/numpy,Dapid/numpy,rgommers/numpy,Srisai85/numpy,pdebuyl/numpy,GaZ3ll3/numpy,seberg/numpy,AustereCuriosity/numpy,astrofrog/numpy,solarjoe/numpy,mindw/numpy,MSeifert04/numpy,matthew-brett/numpy,BMJHayward/numpy,ESSS/numpy,pelson/numpy,njase/numpy,bringingheavendown/numpy,drasmuss/numpy,has2k1/numpy,dato-code/numpy,mortada/numpy,tynn/numpy,mhvk/numpy,cjermain/numpy,tdsmith/numpy,githubmlai/numpy,pyparallel/numpy,pizzathief/numpy,ViralLeadership/numpy,embray/numpy,SunghanKim/numpy,madphysicist/numpy,dimasad/numpy,cowlicks/numpy,pelson/numpy,rhythmsosad/numpy,empeeu/numpy,Yusa95/numpy,charris/numpy,numpy/numpy-refactor,nguyentu1602/numpy,jorisvandenbossche/numpy,numpy/numpy,githubmlai/numpy,tynn/numpy,jschueller/numpy,jakirkham/numpy,jakirkham/numpy,rmcgibbo/numpy,CMartelLML/numpy,maniteja123/numpy,mathdd/numpy,joferkington/numpy,tacaswell/numpy,KaelChen/numpy,chatcannon/numpy,bmorris3/numpy,brandon-rhodes/numpy,jankoslavic/numpy,BabeNovelty/numpy,mathdd/numpy,sonnyhu/numpy,simongibbons/numpy,mingwpy/numpy,jonathanunderwood/numpy,kiwifb/numpy,immerrr/numpy,rhythmsosad/numpy,Anwesh43/numpy,Anwesh43/numpy,mortada/numpy,empeeu/numpy,mindw/numpy,ahaldane/numpy,dch312/numpy,matthew-brett/numpy,tacaswell/numpy,nbeaver/numpy,astrofrog/numpy,rmcgibbo/numpy,utke1/numpy,hainm/numpy,argriffing/numpy,andsor/numpy,stefanv/numpy,skwbc/numpy,brandon-rhodes/numpy,chiffa/numpy,jschueller/numpy,ekalosak/numpy,Srisai85/numpy,GrimDerp/numpy,MaPePeR/numpy,WillieMaddox/numpy,madphysicist/numpy,kirillzhuravlev/numpy,jakirkham/numpy,immerrr/numpy,mortada/numpy,moreati/numpy,ekalosak/numpy,embray/numpy,matthew-brett/numpy,abalkin/numpy,SiccarPoint/numpy,SunghanKim/numpy,mattip/numpy,sinhrks/numpy,cjermain/numpy,naritta/numpy,mattip/numpy,grlee77/numpy,skwbc/numpy,ESSS/numpy,SiccarPoint/numpy,ssanderson/numpy,has2k1/numpy,WarrenWeckesser/numpy,kirillzhuravlev/numpy,dimasad/numpy,nguyentu1602/numpy,rudimeier/numpy,has2k1/numpy,KaelChen/numpy,ogrisel/numpy,drasmuss/numpy,Linkid/numpy,ddasilva/numpy,GaZ3ll3/numpy,ajdawson/numpy,solarjoe/numpy,leifdenby/numpy,rherault-insa/numpy,ESSS/numpy,SiccarPoint/numpy,rmcgibbo/numpy,ajdawson/numpy,Anwesh43/numpy,nbeaver/numpy,gmcastil/numpy,dwillmer/numpy,WillieMaddox/numpy,cjermain/numpy,KaelChen/numpy,madphysicist/numpy,kirillzhuravlev/numpy,AustereCuriosity/numpy,leifdenby/numpy,ahaldane/numpy,behzadnouri/numpy,stefanv/numpy,moreati/numpy,matthew-brett/numpy,utke1/numpy,MichaelAquilina/numpy,bringingheavendown/numpy,cowlicks/numpy,mwiebe/numpy,CMartelLML/numpy,Dapid/numpy,dwf/numpy,GrimDerp/numpy,ogrisel/numpy,Eric89GXL/numpy,jakirkham/numpy,MaPePeR/numpy,b-carter/numpy,chatcannon/numpy,cjermain/numpy,pyparallel/numpy,Eric89GXL/numpy,trankmichael/numpy,musically-ut/numpy,jorisvandenbossche/numpy,bertrand-l/numpy,Anwesh43/numpy,BabeNovelty/numpy,larsmans/numpy,dimasad/numpy,pizzathief/numpy,groutr/numpy,jankoslavic/numpy,BMJHayward/numpy,MSeifert04/numpy,cowlicks/numpy,charris/numpy,dwillmer/numpy,Srisai85/numpy,SunghanKim/numpy,Eric89GXL/numpy,numpy/numpy,rhythmsosad/numpy,rajathkumarmp/numpy,ogrisel/numpy,mindw/numpy,nguyentu1602/numpy,endolith/numpy,groutr/numpy,ekalosak/numpy,larsmans/numpy,numpy/numpy,ogrisel/numpy,felipebetancur/numpy,kirillzhuravlev/numpy,andsor/numpy,pelson/numpy,Dapid/numpy,yiakwy/numpy,mathdd/numpy,seberg/numpy,moreati/numpy,empeeu/numpy,chiffa/numpy | ---
+++
@@ -1,9 +1,34 @@
+import os
from distutils.command.install import *
from distutils.command.install import install as old_install
+from distutils.file_util import write_file
class install(old_install):
def finalize_options (self):
old_install.finalize_options(self)
self.install_lib = self.install_libbase
+
+ def run(self):
+ r = old_install.run(self)
+ if self.record:
+ # bdist_rpm fails when INSTALLED_FILES contains
+ # paths with spaces. Such paths must be enclosed
+ # with double-quotes.
+ f = open(self.record,'r')
+ lines = []
+ need_rewrite = False
+ for l in f.readlines():
+ l = l.rstrip()
+ if ' ' in l:
+ need_rewrite = True
+ l = '"%s"' % (l)
+ lines.append(l)
+ f.close()
+ if need_rewrite:
+ self.execute(write_file,
+ (self.record, lines),
+ "re-writing list of installed files to '%s'" %
+ self.record)
+ return r |
64dbe1d931edd38b4d731db18408e337d39e42c3 | cab/admin.py | cab/admin.py | from django.contrib import admin
from cab.models import Language, Snippet, SnippetFlag
class LanguageAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ['name']}
class SnippetAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'author', 'rating_score', 'pub_date')
list_filter = ('language',)
date_hierarchy = 'pub_date'
search_fields = ('author__username', 'title', 'description', 'code',)
class SnippetFlagAdmin(admin.ModelAdmin):
list_display = ('snippet', 'flag')
list_filter = ('flag',)
actions = ['remove_and_ban']
def remove_and_ban(self, request, queryset):
for obj in queryset:
obj.remove_and_ban()
self.message_user(request, 'Snippets removed successfully')
remove_and_ban.short_description = 'Remove snippet and ban user'
admin.site.register(Language, LanguageAdmin)
admin.site.register(Snippet, SnippetAdmin)
admin.site.register(SnippetFlag, SnippetFlagAdmin)
| from django.contrib import admin
from cab.models import Language, Snippet, SnippetFlag
class LanguageAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ['name']}
class SnippetAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'author', 'rating_score', 'pub_date')
list_filter = ('language',)
date_hierarchy = 'pub_date'
search_fields = ('author__username', 'title', 'description', 'code',)
raw_id_fields = ('author',)
class SnippetFlagAdmin(admin.ModelAdmin):
list_display = ('snippet', 'flag')
list_filter = ('flag',)
actions = ['remove_and_ban']
raw_id_fields = ('snippet', 'user',)
def remove_and_ban(self, request, queryset):
for obj in queryset:
obj.remove_and_ban()
self.message_user(request, 'Snippets removed successfully')
remove_and_ban.short_description = 'Remove snippet and ban user'
admin.site.register(Language, LanguageAdmin)
admin.site.register(Snippet, SnippetAdmin)
admin.site.register(SnippetFlag, SnippetFlagAdmin)
| Use raw_id_fields for users and snippets. | Use raw_id_fields for users and snippets.
| Python | bsd-3-clause | django/djangosnippets.org,django/djangosnippets.org,django/djangosnippets.org,django/djangosnippets.org,django-de/djangosnippets.org,django/djangosnippets.org,django-de/djangosnippets.org,django-de/djangosnippets.org,django-de/djangosnippets.org | ---
+++
@@ -12,12 +12,14 @@
list_filter = ('language',)
date_hierarchy = 'pub_date'
search_fields = ('author__username', 'title', 'description', 'code',)
+ raw_id_fields = ('author',)
class SnippetFlagAdmin(admin.ModelAdmin):
list_display = ('snippet', 'flag')
list_filter = ('flag',)
actions = ['remove_and_ban']
+ raw_id_fields = ('snippet', 'user',)
def remove_and_ban(self, request, queryset):
for obj in queryset: |
cb2cafc809481748ec64aa8ef9bfa3cc29660a6d | install_deps.py | install_deps.py | #!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
| #!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
| Correct for None appearing in requirements list | Correct for None appearing in requirements list
| Python | bsd-3-clause | Neurita/darwin | ---
+++
@@ -24,7 +24,7 @@
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
- return install_deps
+ return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
@@ -36,9 +36,6 @@
try:
for dep_name in deps:
- if dep_name == 'None':
- continue
-
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True) |
fa92a5ff237abc0c3de169bac7784e48caa152dd | clean_lxd.py | clean_lxd.py | #!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now()
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
# This produces local time. lxc does not respect TZ=UTC.
created_at = datetime.strptime(
container['created_at'][:-6], '%Y-%m-%dT%H:%M:%S')
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
from datetime import (
datetime,
timedelta,
)
import json
import os
import subprocess
import sys
from dateutil import (
parser as date_parser,
tz,
)
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
now = datetime.now(tz.gettz('UTC'))
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
created_at = date_parser.parse(container['created_at'])
age = now - created_at
if age <= timedelta(hours=hours):
continue
yield name, age
def main():
parser = ArgumentParser('Delete old juju containers')
parser.add_argument('--dry-run', action='store_true',
help='Do not actually delete.')
parser.add_argument('--hours', type=int, default=1,
help='Number of hours a juju container may exist.')
args = parser.parse_args()
for container, age in list_old_juju_containers(args.hours):
print('deleting {} ({} old)'.format(container, age))
if args.dry_run:
continue
subprocess.check_call(('lxc', 'delete', '--verbose', '--force',
container))
if __name__ == '__main__':
sys.exit(main())
| Use dateutil to calculate age of container. | Use dateutil to calculate age of container. | Python | agpl-3.0 | mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju | ---
+++
@@ -11,19 +11,22 @@
import subprocess
import sys
+from dateutil import (
+ parser as date_parser,
+ tz,
+ )
+
def list_old_juju_containers(hours):
env = dict(os.environ)
containers = json.loads(subprocess.check_output([
'lxc', 'list', '--format', 'json'], env=env))
- now = datetime.now()
+ now = datetime.now(tz.gettz('UTC'))
for container in containers:
name = container['name']
if not name.startswith('juju-'):
continue
- # This produces local time. lxc does not respect TZ=UTC.
- created_at = datetime.strptime(
- container['created_at'][:-6], '%Y-%m-%dT%H:%M:%S')
+ created_at = date_parser.parse(container['created_at'])
age = now - created_at
if age <= timedelta(hours=hours):
continue |
d360d4e5af09c5c194db783c4344aef10367b7f3 | kolla/cmd/build.py | kolla/cmd/build.py | #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kolla.image import build
def main():
statuses = build.run_build()
if statuses:
bad_results, good_results, unmatched_results = statuses
if bad_results:
return 1
return 0
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
PROJECT_ROOT = os.path.abspath(os.path.join(
os.path.dirname(os.path.realpath(__file__)), '../..'))
if PROJECT_ROOT not in sys.path:
sys.path.insert(0, PROJECT_ROOT)
from kolla.image import build
def main():
statuses = build.run_build()
if statuses:
bad_results, good_results, unmatched_results = statuses
if bad_results:
return 1
return 0
if __name__ == '__main__':
main()
| Change the search path to look locally | Change the search path to look locally
In order to use tools/build.py, we need to search
locally for imports.
Closes-bug: #1592030
Change-Id: Idfa651c1268f93366de9f4e3fa80c33be42c71c3
| Python | apache-2.0 | mandre/kolla,intel-onp/kolla,mandre/kolla,GalenMa/kolla,openstack/kolla,stackforge/kolla,coolsvap/kolla,openstack/kolla,stackforge/kolla,dardelean/kolla-ansible,dardelean/kolla-ansible,dardelean/kolla-ansible,mrangana/kolla,rahulunair/kolla,nihilifer/kolla,intel-onp/kolla,rahulunair/kolla,nihilifer/kolla,mrangana/kolla,coolsvap/kolla,mandre/kolla,GalenMa/kolla,coolsvap/kolla,stackforge/kolla | ---
+++
@@ -11,6 +11,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
+import os
+import sys
+
+PROJECT_ROOT = os.path.abspath(os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), '../..'))
+if PROJECT_ROOT not in sys.path:
+ sys.path.insert(0, PROJECT_ROOT)
from kolla.image import build
|
903f958fbdfc0f7a2f0e1d863907488f9a88cad3 | prophyle/prophyle_validate_tree.py | prophyle/prophyle_validate_tree.py | #! /usr/bin/env python3
"""Test whether given Newick/NHX trees are valid for ProPhyle.
Author: Karel Brinda <kbrinda@hsph.harvard.edu>
Licence: MIT
Example:
$ prophyle_validate_tree.py ~/prophyle/bacteria.nw ~/prophyle/viruses.nw
"""
import os
import sys
import argparse
sys.path.append(os.path.dirname(__file__))
import prophylelib as pro
def main():
parser = argparse.ArgumentParser(description='Verify a Newick/NHX tree')
parser.add_argument('tree',
metavar='<tree.nw>',
type=str,
nargs='+',
help='phylogenetic tree (in Newick/NHX)',
)
args = parser.parse_args()
tree_fns = args.tree
ok = True
for tree_fn in tree_fns:
print("Validating '{}'".format(tree_fn))
tree = pro.load_nhx_tree(tree_fn, validate=False)
r = pro.validate_prophyle_nhx_tree(tree, verbose=True, throw_exceptions=False, output=sys.stdout)
if r:
print(" ...OK")
else:
ok = False
print()
sys.exit(0 if ok else 1)
if __name__ == "__main__":
main()
| #! /usr/bin/env python3
"""Test whether given Newick/NHX trees are valid for ProPhyle.
Author: Karel Brinda <kbrinda@hsph.harvard.edu>
Licence: MIT
Example:
$ prophyle_validate_tree.py ~/prophyle/bacteria.nw ~/prophyle/viruses.nw
"""
import os
import sys
import argparse
sys.path.append(os.path.dirname(__file__))
import prophylelib as pro
def main():
parser = argparse.ArgumentParser(description='Verify a Newick/NHX tree')
parser.add_argument('tree',
metavar='<tree.nw>',
type=str,
nargs='+',
help='phylogenetic tree (in Newick/NHX)',
)
args = parser.parse_args()
tree_fns = args.tree
ok = True
for tree_fn in tree_fns:
print("Validating '{}'".format(tree_fn))
tree = pro.load_nhx_tree(tree_fn, validate=False)
r = pro.validate_prophyle_nhx_tree(tree, verbose=True, throw_exceptions=False, output_fo=sys.stdout)
if r:
print(" ...OK")
else:
ok = False
print()
sys.exit(0 if ok else 1)
if __name__ == "__main__":
main()
| Fix script for validating trees | Fix script for validating trees
Former-commit-id: e6409f5a586d34a3bcb03b2d55afc9b220aebe04 | Python | mit | karel-brinda/prophyle,karel-brinda/prophyle,karel-brinda/prophyle,karel-brinda/prophyle | ---
+++
@@ -37,7 +37,7 @@
for tree_fn in tree_fns:
print("Validating '{}'".format(tree_fn))
tree = pro.load_nhx_tree(tree_fn, validate=False)
- r = pro.validate_prophyle_nhx_tree(tree, verbose=True, throw_exceptions=False, output=sys.stdout)
+ r = pro.validate_prophyle_nhx_tree(tree, verbose=True, throw_exceptions=False, output_fo=sys.stdout)
if r:
print(" ...OK")
else: |
61fe996f79e34ac3f5be15213bfa2c16eccfa3ee | ptt_preproc_target.py | ptt_preproc_target.py | #!/usr/bin/env python
import json
from os import scandir
from os.path import (
join as path_join,
basename as to_basename,
splitext,
exists
)
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = 'targets'
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
basename = to_basename(json_path)
root, ext = splitext(basename)
txt_path = path_join(_TARGETS_DIR_PATH, '{}.txt'.format(root))
if exists(txt_path):
l.info('Existed and skip {}'.format(txt_path))
return
with open(json_path) as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with ptt_core.mkdir_n_open(txt_path, 'w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path):
for dir_entry in scandir(preprocessed_dir_path):
generate_target_from(dir_entry.path)
if __name__ == '__main__':
generate_all('preprocessed')
| #!/usr/bin/env python
import json
from pathlib import Path
from os import scandir
from os.path import (
join as path_join,
basename as to_basename,
splitext,
exists
)
import ptt_core
l = ptt_core.l
_TARGETS_DIR_PATH = Path('targets')
if not _TARGETS_DIR_PATH.exists():
_TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
def generate_all(preprocessed_dir_path_str):
for path in Path(preprocessed_dir_path_str).iterdir():
generate_target_from(path)
if __name__ == '__main__':
generate_all('preprocessed')
| Use pathlib in the target | Use pathlib in the target
| Python | mit | moskytw/mining-news | ---
+++
@@ -2,6 +2,7 @@
import json
+from pathlib import Path
from os import scandir
from os.path import (
join as path_join,
@@ -14,34 +15,35 @@
l = ptt_core.l
-_TARGETS_DIR_PATH = 'targets'
+_TARGETS_DIR_PATH = Path('targets')
+
+if not _TARGETS_DIR_PATH.exists():
+ _TARGETS_DIR_PATH.mkdir()
def generate_target_from(json_path):
l.info('Generate target from {} ...'.format(json_path))
- basename = to_basename(json_path)
- root, ext = splitext(basename)
- txt_path = path_join(_TARGETS_DIR_PATH, '{}.txt'.format(root))
+ txt_path = _TARGETS_DIR_PATH / '{}.txt'.format(json_path.stem)
- if exists(txt_path):
+ if txt_path.exists():
l.info('Existed and skip {}'.format(txt_path))
return
- with open(json_path) as f:
+ with json_path.open() as f:
d = json.load(f)
push_score_sum = sum(push_d['score'] for push_d in d['push_ds'])
- with ptt_core.mkdir_n_open(txt_path, 'w') as f:
+ with txt_path.open('w') as f:
f.write(str(push_score_sum))
l.info('Wrote into {}'.format(txt_path))
-def generate_all(preprocessed_dir_path):
+def generate_all(preprocessed_dir_path_str):
- for dir_entry in scandir(preprocessed_dir_path):
- generate_target_from(dir_entry.path)
+ for path in Path(preprocessed_dir_path_str).iterdir():
+ generate_target_from(path)
if __name__ == '__main__': |
5959bb60ca9e42d41386b2a1c672f7a1e666df0d | pybb/read_tracking.py | pybb/read_tracking.py | def update_read_tracking(topic, user):
tracking = user.readtracking
#if last_read > last_read - don't check topics
if tracking.last_read and tracking.last_read > (topic.last_post.updated or
topic.last_post.created):
return
if isinstance(tracking.topics, dict):
#clear topics if len > 5Kb and set last_read to current time
if len(tracking.topics) > 5120:
tracking.topics = None
tracking.last_read = datetime.now()
tracking.save()
#update topics if exist new post or does't exist in dict
if topic.last_post.pk > tracking.topics.get(str(topic.pk), 0):
tracking.topics.setdefault(str(topic.pk), topic.last_post.pk)
tracking.save()
else:
#initialize topic tracking dict
tracking.topics = {topic.pk: topic.last_post.pk}
tracking.save()
| def update_read_tracking(topic, user):
tracking = user.readtracking
#if last_read > last_read - don't check topics
if tracking.last_read and tracking.last_read > (topic.last_post.updated or
topic.last_post.created):
return
if isinstance(tracking.topics, dict):
#clear topics if len > 5Kb and set last_read to current time
if len(tracking.topics) > 5120:
tracking.topics = None
tracking.last_read = datetime.now()
tracking.save()
#update topics if new post exists or cache entry is empty
if topic.last_post.pk > tracking.topics.get(str(topic.pk), 0):
tracking.topics[str(topic.pk)] = topic.last_post.pk
tracking.save()
else:
#initialize topic tracking dict
tracking.topics = {topic.pk: topic.last_post.pk}
tracking.save()
| Fix bug in read tracking system | Fix bug in read tracking system
| Python | bsd-3-clause | gpetukhov/pybb,gpetukhov/pybb,gpetukhov/pybb | ---
+++
@@ -12,9 +12,9 @@
tracking.topics = None
tracking.last_read = datetime.now()
tracking.save()
- #update topics if exist new post or does't exist in dict
+ #update topics if new post exists or cache entry is empty
if topic.last_post.pk > tracking.topics.get(str(topic.pk), 0):
- tracking.topics.setdefault(str(topic.pk), topic.last_post.pk)
+ tracking.topics[str(topic.pk)] = topic.last_post.pk
tracking.save()
else:
#initialize topic tracking dict |
22ac4b9f8dd7d74a84585131fb982f3594a91603 | hr_family/models/hr_children.py | hr_family/models/hr_children.py | # -*- coding:utf-8 -*-
#
#
# Copyright (C) 2011,2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp import models, fields
GENDER_SELECTION = [('m', 'M'),
('f', 'F')]
class HrChildren(models.Model):
_name = 'hr.employee.children'
_description = 'HR Employee Children'
name = fields.Char("Name", required=True)
date_of_birth = fields.Date("Date of Birth", oldname='dob')
employee_id = fields.Many2one('hr.employee', "Employee")
gender = fields.Selection(selection=GENDER_SELECTION, string='Gender')
| # -*- coding:utf-8 -*-
#
#
# Copyright (C) 2011,2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp import models, fields
GENDER_SELECTION = [('male', 'Male'),
('female', 'Female')]
class HrChildren(models.Model):
_name = 'hr.employee.children'
_description = 'HR Employee Children'
name = fields.Char("Name", required=True)
date_of_birth = fields.Date("Date of Birth", oldname='dob')
employee_id = fields.Many2one('hr.employee', "Employee")
gender = fields.Selection(selection=GENDER_SELECTION, string='Gender')
| Use the same selection like employee | [IMP][hr_family] Use the same selection like employee
| Python | agpl-3.0 | xpansa/hr,Vauxoo/hr,Eficent/hr,thinkopensolutions/hr,microcom/hr,hbrunn/hr,acsone/hr,hbrunn/hr,Antiun/hr,feketemihai/hr,thinkopensolutions/hr,Antiun/hr,xpansa/hr,Endika/hr,feketemihai/hr,Endika/hr,open-synergy/hr,VitalPet/hr,microcom/hr,Vauxoo/hr,VitalPet/hr,open-synergy/hr,Eficent/hr,acsone/hr | ---
+++
@@ -21,8 +21,8 @@
from openerp import models, fields
-GENDER_SELECTION = [('m', 'M'),
- ('f', 'F')]
+GENDER_SELECTION = [('male', 'Male'),
+ ('female', 'Female')]
class HrChildren(models.Model): |
ead2f795480ae7e671c93550e55cf9e106b2f306 | hubblestack_nova/pkgng_audit.py | hubblestack_nova/pkgng_audit.py | # -*- encoding: utf-8 -*-
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160421
:platform: FreeBSD
:requires: SaltStack
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
__tags__ = None
def __virtual__():
if 'FreeBSD' not in __grains__['os']:
return False, 'This audit module only runs on FreeBSD'
global __tags__
__tags__ = ['freebsd-pkg-audit']
return True
def audit(tags, verbose=False):
'''
Run the pkg.audit command
'''
ret = {'Success': [], 'Failure': []}
salt_ret = __salt__['pkg.audit']()
if '0 problem(s)' not in salt_ret:
ret['Failure'].append(salt_ret)
else:
ret['Success'].append(salt_ret)
return ret
| # -*- encoding: utf-8 -*-
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160421
:platform: FreeBSD
:requires: SaltStack
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def __virtual__():
if 'FreeBSD' not in __grains__['os']:
return False, 'This audit module only runs on FreeBSD'
return True
def audit(data_list, tags, verbose=False):
'''
Run the pkg.audit command
'''
ret = {'Success': [], 'Failure': []}
__tags__ = []
for data in data_list:
if 'freebsd-pkg' in data:
__tags__ = ['freebsd-pkg-audit']
break
if not __tags__:
# No yaml data found, don't do any work
return ret
salt_ret = __salt__['pkg.audit']()
if '0 problem(s)' not in salt_ret:
ret['Failure'].append(salt_ret)
else:
ret['Success'].append(salt_ret)
return ret
| Update frebsd-pkg-audit to rely on yaml data and take data from hubble.py | Update frebsd-pkg-audit to rely on yaml data and take data from hubble.py
| Python | apache-2.0 | HubbleStack/Nova,avb76/Nova,SaltyCharles/Nova,cedwards/Nova | ---
+++
@@ -12,22 +12,28 @@
log = logging.getLogger(__name__)
-__tags__ = None
-
def __virtual__():
if 'FreeBSD' not in __grains__['os']:
return False, 'This audit module only runs on FreeBSD'
- global __tags__
- __tags__ = ['freebsd-pkg-audit']
return True
-def audit(tags, verbose=False):
+def audit(data_list, tags, verbose=False):
'''
Run the pkg.audit command
'''
ret = {'Success': [], 'Failure': []}
+
+ __tags__ = []
+ for data in data_list:
+ if 'freebsd-pkg' in data:
+ __tags__ = ['freebsd-pkg-audit']
+ break
+
+ if not __tags__:
+ # No yaml data found, don't do any work
+ return ret
salt_ret = __salt__['pkg.audit']()
if '0 problem(s)' not in salt_ret: |
5d448435477ce94273051b8351275d8c18838b8b | icekit/utils/fluent_contents.py | icekit/utils/fluent_contents.py | from django.contrib.contenttypes.models import ContentType
# USEFUL FUNCTIONS FOR FLUENT CONTENTS #############################################################
# Fluent Contents Helper Functions #################################################################
def create_content_instance(content_plugin_class, page, placeholder_name='main', **kwargs):
"""
Creates a content instance from a content plugin class.
:param content_plugin_class: The class of the content plugin.
:param page: The fluent_page instance to create the content
instance one.
:param placeholder_name: The placeholder name defined in the
template. [DEFAULT: main]
:param kwargs: Additional keyword arguments to be used in the
content instance creation.
:return: The content instance created.
"""
# Get the placeholders that are currently available for the slot.
placeholders = page.get_placeholder_by_slot(placeholder_name)
# If a placeholder exists for the placeholder_name use the first one provided otherwise create
# a new placeholder instance.
if placeholders.exists():
placeholder = placeholders[0]
else:
placeholder = page.create_placeholder(placeholder_name)
# Obtain the content type for the page instance class.
ct = ContentType.objects.get_for_model(type(page))
# Create the actual plugin instance.
content_instance = content_plugin_class.objects.create(
parent_type=ct,
parent_id=page.id,
placeholder=placeholder,
**kwargs
)
return content_instance
# END Fluent Contents Helper Functions #############################################################
| from django.contrib.contenttypes.models import ContentType
# USEFUL FUNCTIONS FOR FLUENT CONTENTS #############################################################
# Fluent Contents Helper Functions #################################################################
def create_content_instance(content_plugin_class, test_page, placeholder_name='main', **kwargs):
"""
Creates a content instance from a content plugin class.
:param content_plugin_class: The class of the content plugin.
:param test_page: The fluent_page instance to create the content
instance one.
:param placeholder_name: The placeholder name defined in the
template. [DEFAULT: main]
:param kwargs: Additional keyword arguments to be used in the
content instance creation.
:return: The content instance created.
"""
# Get the placeholders that are currently available for the slot.
placeholders = test_page.get_placeholder_by_slot(placeholder_name)
# If a placeholder exists for the placeholder_name use the first one provided otherwise create
# a new placeholder instance.
if placeholders.exists():
placeholder = placeholders[0]
else:
placeholder = test_page.create_placeholder(placeholder_name)
# Obtain the content type for the page instance class.
ct = ContentType.objects.get_for_model(type(test_page))
# Create the actual plugin instance.
content_instance = content_plugin_class.objects.create(
parent_type=ct,
parent_id=test_page.id,
placeholder=placeholder,
**kwargs
)
return content_instance
# END Fluent Contents Helper Functions #############################################################
| Change argument name to stop probable name clash. | Change argument name to stop probable name clash.
| Python | mit | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit | ---
+++
@@ -4,12 +4,12 @@
# USEFUL FUNCTIONS FOR FLUENT CONTENTS #############################################################
# Fluent Contents Helper Functions #################################################################
-def create_content_instance(content_plugin_class, page, placeholder_name='main', **kwargs):
+def create_content_instance(content_plugin_class, test_page, placeholder_name='main', **kwargs):
"""
Creates a content instance from a content plugin class.
:param content_plugin_class: The class of the content plugin.
- :param page: The fluent_page instance to create the content
+ :param test_page: The fluent_page instance to create the content
instance one.
:param placeholder_name: The placeholder name defined in the
template. [DEFAULT: main]
@@ -18,22 +18,22 @@
:return: The content instance created.
"""
# Get the placeholders that are currently available for the slot.
- placeholders = page.get_placeholder_by_slot(placeholder_name)
+ placeholders = test_page.get_placeholder_by_slot(placeholder_name)
# If a placeholder exists for the placeholder_name use the first one provided otherwise create
# a new placeholder instance.
if placeholders.exists():
placeholder = placeholders[0]
else:
- placeholder = page.create_placeholder(placeholder_name)
+ placeholder = test_page.create_placeholder(placeholder_name)
# Obtain the content type for the page instance class.
- ct = ContentType.objects.get_for_model(type(page))
+ ct = ContentType.objects.get_for_model(type(test_page))
# Create the actual plugin instance.
content_instance = content_plugin_class.objects.create(
parent_type=ct,
- parent_id=page.id,
+ parent_id=test_page.id,
placeholder=placeholder,
**kwargs
) |
2c652df7f7ec93ecad0eb23094f12c6acd86256c | python/hello.py | python/hello.py | #!/usr/bin/env python2
(lambda _, __, ___, ____, _____, ______, _______, ________:
getattr(
__import__(True.__class__.__name__[_] + [].__class__.__name__[__]),
().__class__.__eq__.__class__.__name__[:__] +
().__iter__().__class__.__name__[_____:________]
)(
_, (lambda _, __, ___: _(_, __, ___))(
lambda _, __, ___:
chr(___ % __) + _(_, __, ___ // __) if ___ else
(lambda: _).func_code.co_lnotab,
_ << ________,
(((_____ << ____) + _) << ((___ << _____) - ___)) + (((((___ << __)
- _) << ___) + _) << ((_____ << ____) + (_ << _))) + (((_______ <<
__) - _) << (((((_ << ___) + _)) << ___) + (_ << _))) + (((_______
<< ___) + _) << ((_ << ______) + _)) + (((_______ << ____) - _) <<
((_______ << ___))) + (((_ << ____) - _) << ((((___ << __) + _) <<
__) - _)) - (_______ << ((((___ << __) - _) << __) + _)) + (_______
<< (((((_ << ___) + _)) << __))) - ((((((_ << ___) + _)) << __) +
_) << ((((___ << __) + _) << _))) + (((_______ << __) - _) <<
(((((_ << ___) + _)) << _))) + (((___ << ___) + _) << ((_____ <<
_))) + (_____ << ______) + (_ << ___)
)
)
)(
*(lambda _, __, ___: _(_, __, ___))(
(lambda _, __, ___:
[__(___[(lambda: _).func_code.co_nlocals])] +
_(_, __, ___[(lambda _: _).func_code.co_nlocals:]) if ___ else []
),
lambda _: _.func_code.co_argcount,
(
lambda _: _,
lambda _, __: _,
lambda _, __, ___: _,
lambda _, __, ___, ____: _,
lambda _, __, ___, ____, _____: _,
lambda _, __, ___, ____, _____, ______: _,
lambda _, __, ___, ____, _____, ______, _______: _,
lambda _, __, ___, ____, _____, ______, _______, ________: _
)
)
)
| #!/usr/bin/env python2
print 'Hello, World!'
| Fix that damn obfuscated python | Fix that damn obfuscated python
| Python | mit | natemara/super-important-project,natemara/super-important-project | ---
+++
@@ -1,43 +1,2 @@
#!/usr/bin/env python2
-(lambda _, __, ___, ____, _____, ______, _______, ________:
- getattr(
- __import__(True.__class__.__name__[_] + [].__class__.__name__[__]),
- ().__class__.__eq__.__class__.__name__[:__] +
- ().__iter__().__class__.__name__[_____:________]
- )(
- _, (lambda _, __, ___: _(_, __, ___))(
- lambda _, __, ___:
- chr(___ % __) + _(_, __, ___ // __) if ___ else
- (lambda: _).func_code.co_lnotab,
- _ << ________,
- (((_____ << ____) + _) << ((___ << _____) - ___)) + (((((___ << __)
- - _) << ___) + _) << ((_____ << ____) + (_ << _))) + (((_______ <<
- __) - _) << (((((_ << ___) + _)) << ___) + (_ << _))) + (((_______
- << ___) + _) << ((_ << ______) + _)) + (((_______ << ____) - _) <<
- ((_______ << ___))) + (((_ << ____) - _) << ((((___ << __) + _) <<
- __) - _)) - (_______ << ((((___ << __) - _) << __) + _)) + (_______
- << (((((_ << ___) + _)) << __))) - ((((((_ << ___) + _)) << __) +
- _) << ((((___ << __) + _) << _))) + (((_______ << __) - _) <<
- (((((_ << ___) + _)) << _))) + (((___ << ___) + _) << ((_____ <<
- _))) + (_____ << ______) + (_ << ___)
- )
- )
-)(
- *(lambda _, __, ___: _(_, __, ___))(
- (lambda _, __, ___:
- [__(___[(lambda: _).func_code.co_nlocals])] +
- _(_, __, ___[(lambda _: _).func_code.co_nlocals:]) if ___ else []
- ),
- lambda _: _.func_code.co_argcount,
- (
- lambda _: _,
- lambda _, __: _,
- lambda _, __, ___: _,
- lambda _, __, ___, ____: _,
- lambda _, __, ___, ____, _____: _,
- lambda _, __, ___, ____, _____, ______: _,
- lambda _, __, ___, ____, _____, ______, _______: _,
- lambda _, __, ___, ____, _____, ______, _______, ________: _
- )
- )
-)
+print 'Hello, World!' |
8e3b686b413af1340ba1641b65d237791704e117 | protocols/views.py | protocols/views.py | from django.shortcuts import render
from django.conf.urls import *
from django.contrib.auth.decorators import user_passes_test
from .forms import ProtocolForm, TopicFormSet
def can_add_protocols(user):
return user.is_authenticated() and user.has_perm('protocols.add_protocol')
@user_passes_test(can_add_protocols)
def add(request):
data = request.POST if request.POST else None
protocol_form = ProtocolForm(data)
topic_form = TopicFormSet(data)
if protocol_form.is_valid() and topic_form.is_valid():
protocol_form.save()
topic_form.save()
return render(request, 'protocols/add.html', locals())
| from django.shortcuts import render
from django.conf.urls import *
from django.contrib.auth.decorators import user_passes_test
from .forms import ProtocolForm, TopicFormSet
def can_add_protocols(user):
return user.is_authenticated() and user.has_perm('protocols.add_protocol')
@user_passes_test(can_add_protocols)
def add(request):
data = request.POST if request.POST else None
protocol_form = ProtocolForm(data)
topic_form = TopicFormSet(data)
if protocol_form.is_valid() and topic_form.is_valid():
protocol_form.save()
topic_form.save()
return render(request, 'protocols/add.html', locals())
def list_all_protocols(request):
protocols = Protocol.objects.all()
return render(request, 'protocols/list.html', locals()) | Add method listing all the protocols | Add method listing all the protocols
| Python | mit | Hackfmi/Diaphanum,Hackfmi/Diaphanum | ---
+++
@@ -21,3 +21,7 @@
topic_form.save()
return render(request, 'protocols/add.html', locals())
+
+def list_all_protocols(request):
+ protocols = Protocol.objects.all()
+ return render(request, 'protocols/list.html', locals()) |
93c4039bff64b86e203f8ae3c3c576343cc146c0 | stock_request_picking_type/models/stock_request_order.py | stock_request_picking_type/models/stock_request_order.py | # Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockRequestOrder(models.Model):
_inherit = 'stock.request.order'
@api.model
def _get_default_picking_type(self):
return self.env['stock.picking.type'].search([
('code', '=', 'stock_request_order'),
('warehouse_id.company_id', 'in',
[self.env.context.get('company_id', self.env.user.company_id.id),
False])],
limit=1).id
picking_type_id = fields.Many2one(
'stock.picking.type', 'Operation Type',
default=_get_default_picking_type, required=True)
@api.onchange('warehouse_id')
def onchange_warehouse_picking_id(self):
if self.warehouse_id:
picking_type_id = self.env['stock.picking.type'].\
search([('code', '=', 'stock_request_order'),
('warehouse_id', '=', self.warehouse_id.id)], limit=1)
if picking_type_id:
self._origin.write({'picking_type_id': picking_type_id.id})
| # Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockRequestOrder(models.Model):
_inherit = 'stock.request.order'
@api.model
def _get_default_picking_type(self):
return self.env['stock.picking.type'].search([
('code', '=', 'stock_request_order'),
('warehouse_id.company_id', 'in',
[self.env.context.get('company_id', self.env.user.company_id.id),
False])],
limit=1).id
picking_type_id = fields.Many2one(
'stock.picking.type', 'Operation Type',
default=_get_default_picking_type, required=True)
@api.onchange('warehouse_id')
def onchange_warehouse_picking_id(self):
if self.warehouse_id:
picking_type_id = self.env['stock.picking.type'].\
search([('code', '=', 'stock_request_order'),
('warehouse_id', '=', self.warehouse_id.id)], limit=1)
if picking_type_id:
self._origin.write({'picking_type_id': picking_type_id.id})
@api.model
def create(self, vals):
if vals.get('warehouse_id', False):
picking_type_id = self.env['stock.picking.type'].\
search([('code', '=', 'stock_request_order'),
('warehouse_id', '=', vals['warehouse_id'])], limit=1)
if picking_type_id:
vals.update({'picking_type_id': picking_type_id.id})
return super().create(vals)
| Set Picking Type in Create | [IMP] Set Picking Type in Create
[IMP] Flake8
| Python | agpl-3.0 | OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse | ---
+++
@@ -28,3 +28,13 @@
('warehouse_id', '=', self.warehouse_id.id)], limit=1)
if picking_type_id:
self._origin.write({'picking_type_id': picking_type_id.id})
+
+ @api.model
+ def create(self, vals):
+ if vals.get('warehouse_id', False):
+ picking_type_id = self.env['stock.picking.type'].\
+ search([('code', '=', 'stock_request_order'),
+ ('warehouse_id', '=', vals['warehouse_id'])], limit=1)
+ if picking_type_id:
+ vals.update({'picking_type_id': picking_type_id.id})
+ return super().create(vals) |
dc54aad6813f5ef1828f4706d87eab9f91af1c5a | pronto/serializers/obo.py | pronto/serializers/obo.py | import io
from typing import BinaryIO, ClassVar
from ..term import Term, TermData
from ..relationship import Relationship, RelationshipData
from ._fastobo import FastoboSerializer
from .base import BaseSerializer
class OboSerializer(FastoboSerializer, BaseSerializer):
format = "obo"
def dump(self, file):
writer = io.TextIOWrapper(file)
try:
# dump the header
if self.ont.metadata:
header = self._to_header_frame(self.ont.metadata)
file.write(str(header).encode("utf-8"))
if self.ont._terms or self.ont._relationships:
file.write(b"\n")
# dump terms
if self.ont._terms:
for i, (id, data) in enumerate(self.ont._terms.items()):
frame = self._to_term_frame(Term(self.ont, data))
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._terms) - 1 or self.ont._relationships:
file.write(b"\n")
# dump typedefs
if self.ont._relationships:
for i, (id, data) in enumerate(self.ont._relationships.items()):
frame = self._to_typedef_frame(Relationship(self.ont, data))
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._relationships) - 1:
file.write(b"\n")
finally:
writer.detach()
| import io
from typing import BinaryIO, ClassVar
from ..term import Term, TermData
from ..relationship import Relationship, RelationshipData
from ._fastobo import FastoboSerializer
from .base import BaseSerializer
class OboSerializer(FastoboSerializer, BaseSerializer):
format = "obo"
def dump(self, file):
writer = io.TextIOWrapper(file)
try:
# dump the header
if self.ont.metadata:
header = self._to_header_frame(self.ont.metadata)
file.write(str(header).encode("utf-8"))
if self.ont._terms or self.ont._relationships:
file.write(b"\n")
# dump terms
if self.ont._terms:
for i, id in enumerate(sorted(self.ont._terms)):
data = self.ont._terms[id]
frame = self._to_term_frame(Term(self.ont, data))
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._terms) - 1 or self.ont._relationships:
file.write(b"\n")
# dump typedefs
if self.ont._relationships:
for i, id in enumerate(sorted(self.ont._relationships)):
data = self.ont._relationships[id]
frame = self._to_typedef_frame(Relationship(self.ont, data))
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._relationships) - 1:
file.write(b"\n")
finally:
writer.detach()
| Fix OBO serializer assuming `Ontology._terms` is properly ordered | Fix OBO serializer assuming `Ontology._terms` is properly ordered
| Python | mit | althonos/pronto | ---
+++
@@ -22,14 +22,16 @@
file.write(b"\n")
# dump terms
if self.ont._terms:
- for i, (id, data) in enumerate(self.ont._terms.items()):
+ for i, id in enumerate(sorted(self.ont._terms)):
+ data = self.ont._terms[id]
frame = self._to_term_frame(Term(self.ont, data))
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._terms) - 1 or self.ont._relationships:
file.write(b"\n")
# dump typedefs
if self.ont._relationships:
- for i, (id, data) in enumerate(self.ont._relationships.items()):
+ for i, id in enumerate(sorted(self.ont._relationships)):
+ data = self.ont._relationships[id]
frame = self._to_typedef_frame(Relationship(self.ont, data))
file.write(str(frame).encode("utf-8"))
if i < len(self.ont._relationships) - 1: |
47d329691e11ea332c17931ca40a822de788acfb | sanic_sentry.py | sanic_sentry.py | import logging
import sanic
import raven
import raven_aiohttp
from raven.handlers.logging import SentryHandler
class SanicSentry:
def __init__(self, app=None):
self.app = None
self.handler = None
self.client = None
if app is not None:
self.init_app(app)
def init_app(self, app: sanic.Sanic):
self.client = raven.Client(
dsn=app.config['SENTRY_DSN'],
transport=raven_aiohttp.AioHttpTransport,
)
self.handler = SentryHandler(client=self.client, level=app.config.get('SENTRY_LEVEL', logging.ERROR))
logger = logging.getLogger('sanic')
logger.addHandler(self.handler)
self.app = app
self.app.sentry = self
| import logging
import sanic
from sanic.log import logger
import raven
import raven_aiohttp
from raven.handlers.logging import SentryHandler
class SanicSentry:
def __init__(self, app=None):
self.app = None
self.handler = None
self.client = None
if app is not None:
self.init_app(app)
def init_app(self, app: sanic.Sanic):
self.client = raven.Client(
dsn=app.config['SENTRY_DSN'],
transport=raven_aiohttp.AioHttpTransport,
)
self.handler = SentryHandler(client=self.client, level=app.config.get('SENTRY_LEVEL', logging.ERROR))
logger.addHandler(self.handler)
self.app = app
self.app.sentry = self
| Fix to work on Sanic 0.7 | Fix to work on Sanic 0.7
Sanic 0.7 changed the logger name and is now using the 'root' logger (instead of 'sanic').
I think it is better to import it directly from Sanic than to use `logging.getLogger` to avoid this kind of problem in the future... | Python | mit | serathius/sanic-sentry | ---
+++
@@ -1,6 +1,7 @@
import logging
import sanic
+from sanic.log import logger
import raven
import raven_aiohttp
@@ -21,7 +22,6 @@
transport=raven_aiohttp.AioHttpTransport,
)
self.handler = SentryHandler(client=self.client, level=app.config.get('SENTRY_LEVEL', logging.ERROR))
- logger = logging.getLogger('sanic')
logger.addHandler(self.handler)
self.app = app
self.app.sentry = self |
aee26ebb12ddcc410ad1b0eccf8fd740c6b9b39a | demo.py | demo.py | if __name__ == '__main__':
from NeuroIO import NeuroIO
from PreProcess import PreProcess
from Cluster import Cluster
neuroread = NeuroIO(r"C:\Users\USERNAME\Downloads\Brainbow-demo.tif")
img_data = neuroread.img_data_return()[0]
img_path = neuroread.img_data_return()[1]
pre_processed_data = PreProcess(im_data=img_data, filepath=img_path, snapshotpath=r"C:\UROP\\", multiprocess=True,
cores="auto")
pre_processed_data.blur(blur_sigma=0.5, xyz_scale=(1, 1, 1))
pre_processed_data.find_threshold(method="isodata", snapshot=True)
refined_mask = pre_processed_data.sobel_watershed(threshold="last", snapshot=True)
pre_processed_data.lab_mode()
img_lab = pre_processed_data.return_data()
segment = Cluster(img_data=img_lab, mask=refined_mask)
cluster_results = segment.DBSCAN(start_frame=0, end_frame=100, size_threshold=75, max_dist=19, min_samp=10,
dist_weight=3.0,
color_weight=18.0, metric="euclidean", algo="ball_tree")
neuroread.img_data_write(cluster_results, "C:\Users\USERNAME\Desktop\\")
| if __name__ == '__main__':
from NeuroIO import NeuroIO
from PreProcess import PreProcess
from Cluster import Cluster
neuroread = NeuroIO(r"C:\Users\USERNAME\Downloads\Brainbow-demo.tif")
img_data = neuroread.img_data_return()[0]
img_path = neuroread.img_data_return()[1]
pre_processed_data = PreProcess(im_data=img_data, filepath=img_path, snapshotpath=r"C:\UROP\\",
multiprocess=True, cores="auto")
pre_processed_data.blur(blur_sigma=0.5, xyz_scale=(1, 1, 1))
pre_processed_data.find_threshold(method="isodata", snapshot=True)
refined_mask = pre_processed_data.sobel_watershed(threshold="last", snapshot=True)
pre_processed_data.lab_mode()
img_lab = pre_processed_data.return_data()
segment = Cluster(img_data=img_lab, mask=refined_mask)
cluster_results = segment.super_pixel(start_frame=0, end_frame=100, size_threshold=75, max_dist=19, min_samp=10,
dist_weight=3.0, color_weight=18.0, metric="euclidean", algo="auto",
multiprocess=True, num_cores="auto", num_slices=4)
neuroread.img_data_write(cluster_results, "C:\Users\USERNAME\Desktop\\")
| Enable support for the parallel dbscan. | Enable support for the parallel dbscan.
| Python | apache-2.0 | aluo-x/GRIDFIRE | ---
+++
@@ -6,15 +6,15 @@
neuroread = NeuroIO(r"C:\Users\USERNAME\Downloads\Brainbow-demo.tif")
img_data = neuroread.img_data_return()[0]
img_path = neuroread.img_data_return()[1]
- pre_processed_data = PreProcess(im_data=img_data, filepath=img_path, snapshotpath=r"C:\UROP\\", multiprocess=True,
- cores="auto")
+ pre_processed_data = PreProcess(im_data=img_data, filepath=img_path, snapshotpath=r"C:\UROP\\",
+ multiprocess=True, cores="auto")
pre_processed_data.blur(blur_sigma=0.5, xyz_scale=(1, 1, 1))
pre_processed_data.find_threshold(method="isodata", snapshot=True)
refined_mask = pre_processed_data.sobel_watershed(threshold="last", snapshot=True)
pre_processed_data.lab_mode()
img_lab = pre_processed_data.return_data()
segment = Cluster(img_data=img_lab, mask=refined_mask)
- cluster_results = segment.DBSCAN(start_frame=0, end_frame=100, size_threshold=75, max_dist=19, min_samp=10,
- dist_weight=3.0,
- color_weight=18.0, metric="euclidean", algo="ball_tree")
+ cluster_results = segment.super_pixel(start_frame=0, end_frame=100, size_threshold=75, max_dist=19, min_samp=10,
+ dist_weight=3.0, color_weight=18.0, metric="euclidean", algo="auto",
+ multiprocess=True, num_cores="auto", num_slices=4)
neuroread.img_data_write(cluster_results, "C:\Users\USERNAME\Desktop\\") |
8c2db8786a0dd08c7ca039f491260f9407eb946c | dodo.py | dodo.py | # coding: utf8
DOIT_CONFIG = {'default_tasks': []}
CITEULIKE_GROUP = 19073
BIBFILE = 'docs/pyfssa.bib'
def task_download_bib():
"""Download bibliography from CiteULike group"""
return {
'actions': [' '.join([
'wget', '-O', BIBFILE,
'"http://www.citeulike.org/bibtex/group/{}?incl_amazon=0&key_type=4"'.format(CITEULIKE_GROUP),
])],
# 'file_dep': [CITEULIKE_COOKIES],
'targets': [BIBFILE],
}
| # coding: utf8
import os
DOIT_CONFIG = {'default_tasks': []}
CITEULIKE_GROUP = 19073
BIBFILE = 'docs/pyfssa.bib'
def task_download_bib():
"""Download bibliography from CiteULike group"""
return {
'actions': [' '.join([
'wget', '-O', BIBFILE,
'"http://www.citeulike.org/bibtex/group/{}?incl_amazon=0&key_type=4"'.format(CITEULIKE_GROUP),
])],
# 'file_dep': [CITEULIKE_COOKIES],
'targets': [BIBFILE],
}
def task_upload_doc():
"""Upload built html documentation to GitHub pages"""
return {
'actions': [[
'ghp-import',
'-n', # Include a .nojekyll file in the branch.
'-p', # Push the branch to origin/{branch} after committing.
os.path.join('docs', '_build', 'html')
]],
}
| Add task to upload documentation to github pages | Add task to upload documentation to github pages
| Python | isc | andsor/pyfssa,andsor/pyfssa | ---
+++
@@ -1,4 +1,6 @@
# coding: utf8
+
+import os
DOIT_CONFIG = {'default_tasks': []}
@@ -17,3 +19,16 @@
# 'file_dep': [CITEULIKE_COOKIES],
'targets': [BIBFILE],
}
+
+
+def task_upload_doc():
+ """Upload built html documentation to GitHub pages"""
+
+ return {
+ 'actions': [[
+ 'ghp-import',
+ '-n', # Include a .nojekyll file in the branch.
+ '-p', # Push the branch to origin/{branch} after committing.
+ os.path.join('docs', '_build', 'html')
+ ]],
+ } |
a52c1669a843e8afcf629de819e8144d6832bc7b | sensors_test.py | sensors_test.py | from TSL2561 import TSL2561
from MCP9808 import MCP9808
import time
def main():
tsl = TSL2561(debug=0)
mcp = MCP9808(debug=0)
#tsl.set_gain(16)
while True:
full = tsl.read_full()
ir = tsl.read_IR()
lux = tsl.read_lux()
print("%d,%d = %d lux" % (full, ir, lux))
temp = mcp.read_temp()
print("%0.2f degC" % temp)
time.sleep(1)
if __name__ == '__main__':
main()
| from TSL2561 import TSL2561
from MCP9808 import MCP9808
import time
import wiringpi2 as wiringpi
def main():
wiringpi.wiringPiSetupGpio()
wiringpi.pinMode(18,2) # enable PWM mode on pin 18
tsl = TSL2561(debug=0)
mcp = MCP9808(debug=0)
#tsl.set_gain(16)
while True:
full = tsl.read_full()
ir = tsl.read_IR()
lux = tsl.read_lux()
print("%d,%d = %d lux" % (full, ir, lux))
temp = mcp.read_temp()
print("%0.2f degC" % temp)
wiringpi.pwmWrite(18, min(int(lux), 1024))
time.sleep(1)
if __name__ == '__main__':
main()
| Add light PWM control to test code. | Add light PWM control to test code.
| Python | mit | liffiton/ATLeS,liffiton/ATLeS,liffiton/ATLeS,liffiton/ATLeS | ---
+++
@@ -1,8 +1,12 @@
from TSL2561 import TSL2561
from MCP9808 import MCP9808
import time
+import wiringpi2 as wiringpi
+
def main():
+ wiringpi.wiringPiSetupGpio()
+ wiringpi.pinMode(18,2) # enable PWM mode on pin 18
tsl = TSL2561(debug=0)
mcp = MCP9808(debug=0)
#tsl.set_gain(16)
@@ -13,8 +17,8 @@
print("%d,%d = %d lux" % (full, ir, lux))
temp = mcp.read_temp()
print("%0.2f degC" % temp)
+ wiringpi.pwmWrite(18, min(int(lux), 1024))
time.sleep(1)
-
if __name__ == '__main__': |
49645ca7f579e5499f21e2192d16f4eed1271e82 | tests/integration/cli/sync_test.py | tests/integration/cli/sync_test.py | from mock import patch
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
| from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
super(TestSyncCLI, self).tearDown()
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
| Clean up after sync integration tests | Clean up after sync integration tests
| Python | mit | gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty | ---
+++
@@ -1,5 +1,3 @@
-from mock import patch
-
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
@@ -10,6 +8,14 @@
self.run_command('bundles activate busyboxa')
self.run_command('up')
+ def tearDown(self):
+ super(TestSyncCLI, self).tearDown()
+ self.run_command('bundles deactivate busyboxa')
+ try:
+ self.run_command('stop')
+ except Exception:
+ pass
+
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md') |
2643562c03f057d91a325492e4561ce7676dc6b6 | thinglang/parser/tokens/classes.py | thinglang/parser/tokens/classes.py | from thinglang.lexer.symbols import LexicalGroupEnd
from thinglang.lexer.symbols.base import LexicalIdentifier
from thinglang.lexer.symbols.functions import LexicalDeclarationConstructor
from thinglang.parser.tokens import DefinitionPairToken, BaseToken
from thinglang.parser.tokens.functions import ArgumentList
class ThingDefinition(DefinitionPairToken):
def __contains__(self, item):
return any(child.value == item for child in self.children)
def __getitem__(self, item):
return [child for child in self.children if child.value == item][0]
def describe(self):
return self.name
class MethodDefinition(BaseToken):
def __init__(self, slice):
super(MethodDefinition, self).__init__(slice)
if isinstance(slice[0], LexicalDeclarationConstructor):
self.name = LexicalIdentifier.constructor()
argument_list = slice[1]
else:
self.name = slice[1]
argument_list = slice[2]
if isinstance(argument_list, ArgumentList):
self.arguments = argument_list
else:
self.arguments = ArgumentList()
def describe(self):
return '{}, args={}'.format(self.name, self.arguments)
class MemberDefinition(BaseToken):
def __init__(self, slice):
super(MemberDefinition, self).__init__(slice)
_, self.type, self.name = slice
def describe(self):
return 'has {} {}'.format(self.type, self.name)
| from thinglang.lexer.symbols import LexicalGroupEnd
from thinglang.lexer.symbols.base import LexicalIdentifier
from thinglang.lexer.symbols.functions import LexicalDeclarationConstructor
from thinglang.parser.tokens import DefinitionPairToken, BaseToken
from thinglang.parser.tokens.functions import ArgumentList
class ThingDefinition(DefinitionPairToken):
def __contains__(self, item):
return any(child.name == item for child in self.children)
def __getitem__(self, item):
return [child for child in self.children if child.name == item][0]
def describe(self):
return self.name
class MethodDefinition(BaseToken):
def __init__(self, slice):
super(MethodDefinition, self).__init__(slice)
if isinstance(slice[0], LexicalDeclarationConstructor):
self.name = LexicalIdentifier.constructor()
argument_list = slice[1]
else:
self.name = slice[1]
argument_list = slice[2]
if isinstance(argument_list, ArgumentList):
self.arguments = argument_list
else:
self.arguments = ArgumentList()
def describe(self):
return '{}, args={}'.format(self.name, self.arguments)
class MemberDefinition(BaseToken):
def __init__(self, slice):
super(MemberDefinition, self).__init__(slice)
_, self.type, self.name = slice
def describe(self):
return 'has {} {}'.format(self.type, self.name)
| Fix value/name ambiguity in ThingDefinition | Fix value/name ambiguity in ThingDefinition
| Python | mit | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | ---
+++
@@ -8,10 +8,10 @@
class ThingDefinition(DefinitionPairToken):
def __contains__(self, item):
- return any(child.value == item for child in self.children)
+ return any(child.name == item for child in self.children)
def __getitem__(self, item):
- return [child for child in self.children if child.value == item][0]
+ return [child for child in self.children if child.name == item][0]
def describe(self):
return self.name |
d86b537a3820b23d66b5a8d52d15ae5d11c2b34b | spacy/lang/da/__init__.py | spacy/lang/da/__init__.py | # coding: utf8
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .norm_exceptions import NORM_EXCEPTIONS
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .morph_rules import MORPH_RULES
from ..tag_map import TAG_MAP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ..norm_exceptions import BASE_NORMS
from ...language import Language
from ...attrs import LANG, NORM
from ...util import update_exc, add_lookups
class DanishDefaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters.update(LEX_ATTRS)
lex_attr_getters[LANG] = lambda text: 'da'
lex_attr_getters[NORM] = add_lookups(Language.Defaults.lex_attr_getters[NORM],
BASE_NORMS, NORM_EXCEPTIONS)
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
# morph_rules = MORPH_RULES
tag_map = TAG_MAP
stop_words = STOP_WORDS
class Danish(Language):
lang = 'da'
Defaults = DanishDefaults
__all__ = ['Danish']
| # coding: utf8
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .norm_exceptions import NORM_EXCEPTIONS
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .morph_rules import MORPH_RULES
from ..tag_map import TAG_MAP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ..norm_exceptions import BASE_NORMS
from ...language import Language
from ...attrs import LANG, NORM
from ...util import update_exc, add_lookups
class DanishDefaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters.update(LEX_ATTRS)
lex_attr_getters[LANG] = lambda text: 'da'
lex_attr_getters[NORM] = add_lookups(Language.Defaults.lex_attr_getters[NORM],
BASE_NORMS, NORM_EXCEPTIONS)
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
morph_rules = MORPH_RULES
tag_map = TAG_MAP
stop_words = STOP_WORDS
class Danish(Language):
lang = 'da'
Defaults = DanishDefaults
__all__ = ['Danish']
| Enable morph rules for Danish | Enable morph rules for Danish
| Python | mit | explosion/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,honnibal/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,aikramer2/spaCy,honnibal/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,explosion/spaCy,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy | ---
+++
@@ -22,7 +22,7 @@
lex_attr_getters[NORM] = add_lookups(Language.Defaults.lex_attr_getters[NORM],
BASE_NORMS, NORM_EXCEPTIONS)
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
- # morph_rules = MORPH_RULES
+ morph_rules = MORPH_RULES
tag_map = TAG_MAP
stop_words = STOP_WORDS
|
62ea03ce0de1a0ddc0879416a93ebc82ec30ecdd | examples/flask_example/example/settings.py | examples/flask_example/example/settings.py | from example import app
app.debug = False
SECRET_KEY = 'random-secret-key'
SESSION_COOKIE_NAME = 'psa_session'
DEBUG = False
SQLALCHEMY_DATABASE_URI = 'sqlite:///test.db'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SESSION_PROTECTION = 'strong'
| from example import app
app.debug = True
SECRET_KEY = 'random-secret-key'
SESSION_COOKIE_NAME = 'psa_session'
DEBUG = False
SQLALCHEMY_DATABASE_URI = 'sqlite:///test.db'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SESSION_PROTECTION = 'strong'
| Set app in debug mode by default | Set app in debug mode by default
| Python | bsd-3-clause | henocdz/python-social-auth,iruga090/python-social-auth,muhammad-ammar/python-social-auth,nirmalvp/python-social-auth,merutak/python-social-auth,barseghyanartur/python-social-auth,alrusdi/python-social-auth,mrwags/python-social-auth,msampathkumar/python-social-auth,cjltsod/python-social-auth,mrwags/python-social-auth,cmichal/python-social-auth,drxos/python-social-auth,VishvajitP/python-social-auth,S01780/python-social-auth,wildtetris/python-social-auth,lamby/python-social-auth,MSOpenTech/python-social-auth,mchdks/python-social-auth,python-social-auth/social-core,ByteInternet/python-social-auth,python-social-auth/social-app-django,imsparsh/python-social-auth,webjunkie/python-social-auth,alrusdi/python-social-auth,DhiaEddineSaidi/python-social-auth,falcon1kr/python-social-auth,michael-borisov/python-social-auth,robbiet480/python-social-auth,msampathkumar/python-social-auth,mark-adams/python-social-auth,cjltsod/python-social-auth,garrett-schlesinger/python-social-auth,lawrence34/python-social-auth,wildtetris/python-social-auth,frankier/python-social-auth,ononeor12/python-social-auth,rsalmaso/python-social-auth,MSOpenTech/python-social-auth,yprez/python-social-auth,mark-adams/python-social-auth,ariestiyansyah/python-social-auth,chandolia/python-social-auth,daniula/python-social-auth,fearlessspider/python-social-auth,hsr-ba-fs15-dat/python-social-auth,michael-borisov/python-social-auth,rsteca/python-social-auth,nirmalvp/python-social-auth,jeyraof/python-social-auth,tkajtoch/python-social-auth,MSOpenTech/python-social-auth,JerzySpendel/python-social-auth,python-social-auth/social-app-django,lneoe/python-social-auth,mchdks/python-social-auth,python-social-auth/social-core,tkajtoch/python-social-auth,ByteInternet/python-social-auth,bjorand/python-social-auth,python-social-auth/social-app-cherrypy,JerzySpendel/python-social-auth,ariestiyansyah/python-social-auth,webjunkie/python-social-auth,python-social-auth/social-app-django,rsteca/python-social-auth,chandolia/python-social-auth,yprez/python-social-auth,JJediny/python-social-auth,msampathkumar/python-social-auth,lneoe/python-social-auth,mark-adams/python-social-auth,jameslittle/python-social-auth,falcon1kr/python-social-auth,tkajtoch/python-social-auth,mathspace/python-social-auth,SeanHayes/python-social-auth,firstjob/python-social-auth,jeyraof/python-social-auth,lawrence34/python-social-auth,SeanHayes/python-social-auth,muhammad-ammar/python-social-auth,jeyraof/python-social-auth,joelstanner/python-social-auth,drxos/python-social-auth,barseghyanartur/python-social-auth,degs098/python-social-auth,jameslittle/python-social-auth,DhiaEddineSaidi/python-social-auth,clef/python-social-auth,tutumcloud/python-social-auth,fearlessspider/python-social-auth,JerzySpendel/python-social-auth,drxos/python-social-auth,imsparsh/python-social-auth,JJediny/python-social-auth,duoduo369/python-social-auth,bjorand/python-social-auth,michael-borisov/python-social-auth,jameslittle/python-social-auth,wildtetris/python-social-auth,daniula/python-social-auth,lamby/python-social-auth,nirmalvp/python-social-auth,yprez/python-social-auth,DhiaEddineSaidi/python-social-auth,noodle-learns-programming/python-social-auth,contracode/python-social-auth,firstjob/python-social-auth,tutumcloud/python-social-auth,hsr-ba-fs15-dat/python-social-auth,VishvajitP/python-social-auth,cmichal/python-social-auth,python-social-auth/social-storage-sqlalchemy,jneves/python-social-auth,python-social-auth/social-docs,muhammad-ammar/python-social-auth,chandolia/python-social-auth,contracode/python-social-auth,mchdks/python-social-auth,robbiet480/python-social-auth,mathspace/python-social-auth,imsparsh/python-social-auth,daniula/python-social-auth,nvbn/python-social-auth,tobias47n9e/social-core,ariestiyansyah/python-social-auth,iruga090/python-social-auth,merutak/python-social-auth,mathspace/python-social-auth,ByteInternet/python-social-auth,contracode/python-social-auth,Andygmb/python-social-auth,jneves/python-social-auth,bjorand/python-social-auth,fearlessspider/python-social-auth,clef/python-social-auth,JJediny/python-social-auth,falcon1kr/python-social-auth,Andygmb/python-social-auth,S01780/python-social-auth,duoduo369/python-social-auth,ononeor12/python-social-auth,lneoe/python-social-auth,barseghyanartur/python-social-auth,henocdz/python-social-auth,mrwags/python-social-auth,jneves/python-social-auth,degs098/python-social-auth,S01780/python-social-auth,Andygmb/python-social-auth,firstjob/python-social-auth,merutak/python-social-auth,alrusdi/python-social-auth,webjunkie/python-social-auth,san-mate/python-social-auth,lawrence34/python-social-auth,ononeor12/python-social-auth,VishvajitP/python-social-auth,iruga090/python-social-auth,henocdz/python-social-auth,noodle-learns-programming/python-social-auth,robbiet480/python-social-auth,degs098/python-social-auth,hsr-ba-fs15-dat/python-social-auth,nvbn/python-social-auth,joelstanner/python-social-auth,noodle-learns-programming/python-social-auth,joelstanner/python-social-auth,rsalmaso/python-social-auth,cmichal/python-social-auth,frankier/python-social-auth,lamby/python-social-auth,rsteca/python-social-auth,san-mate/python-social-auth,garrett-schlesinger/python-social-auth,san-mate/python-social-auth,clef/python-social-auth | ---
+++
@@ -1,7 +1,7 @@
from example import app
-app.debug = False
+app.debug = True
SECRET_KEY = 'random-secret-key'
SESSION_COOKIE_NAME = 'psa_session' |
2a9406968552de04c5b3fdd5796dc3693af08a07 | src/engine/file_loader.py | src/engine/file_loader.py | import os
import json
'''
data_dir = os.path.join(
os.path.split(
os.path.split(os.path.dirname(__file__))[0])[0], 'data')
'''
data_dir = os.path.join(os.environ['PORTER'], 'data')
def read_and_parse_json(data_type):
sub_dir = os.path.join(data_dir, data_type)
elements = []
def full_path(file_name):
return os.path.join(sub_dir, file_name)
def only_json(file_name):
return file_name.endswith('.json')
for json_file_name in filter(only_json, map(full_path, os.listdir(sub_dir))):
with open(json_file_name) as json_file:
elements.append(json.load(json_file))
return elements
| import os
import json
data_dir = os.path.join(os.environ['PORTER'], 'data')
def read_and_parse_json(data_type):
sub_dir = os.path.join(data_dir, data_type)
elements = []
def full_path(file_name):
return os.path.join(sub_dir, file_name)
def only_json(file_name):
return file_name.endswith('.json')
for json_file_name in filter(only_json, map(full_path, os.listdir(sub_dir))):
with open(json_file_name) as json_file:
elements.append(json.load(json_file))
return elements
| Remove commented file loading code | Remove commented file loading code
| Python | mit | Tactique/game_engine,Tactique/game_engine | ---
+++
@@ -1,11 +1,6 @@
import os
import json
-'''
-data_dir = os.path.join(
- os.path.split(
- os.path.split(os.path.dirname(__file__))[0])[0], 'data')
-'''
data_dir = os.path.join(os.environ['PORTER'], 'data')
|
0c593e993cb8fb4ea6b3031454ac359efa6aaf5c | states-pelican.py | states-pelican.py | import salt.exceptions
import subprocess
def build_site(name, output="/srv/www"):
# /srv/salt/_states/pelican.py
# Generates static site with pelican -o $output $name
# Sorry.
# -- Jadon Bennett, 2015
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
# I don't know how to make this work. But it's cool.
#current_state = __salt__['pelican.current_state'](name)
current_state = "cool"
if __opts__['test'] == True:
ret['comment'] = 'Markdown files from "{0}" will be converted to HTML and put in "{1}"'.format(name,output)
ret['changes'] = {
'old': current_state,
'new': 'New!',
}
ret['result'] = None
return ret
subprocess.call(['pelican', '-o', output, name])
ret['comment'] = 'Static site generated from "{0}".'.format(name)
ret['changes'] = {
'old': current_state,
'new': 'Whoopee!',
}
ret['result'] = True
return ret
| import salt.exceptions
import subprocess
def build_site(name, output="/srv/www"):
# /srv/salt/_states/pelican.py
# Generates static site with pelican -o $output $name
# Sorry.
# -- Jadon Bennett, 2015
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
current_state = __salt__['pelican.current_state'](name)
if __opts__['test'] == True:
ret['comment'] = 'Markdown files from "{0}" will be converted to HTML and put in "{1}"'.format(name,output)
ret['changes'] = {
'old': current_state,
'new': 'New!',
}
ret['result'] = None
return ret
new_state = __salt__['pelican.generate'](output,path)
ret['comment'] = 'Static site generated from "{0}".'.format(name)
ret['changes'] = {
'old': current_state,
'new': new_state,
}
ret['result'] = True
return ret
| Move subprocess stuff to an execution module for Pelican. | Move subprocess stuff to an execution module for Pelican.
| Python | mit | lvl1/salt-formulas | ---
+++
@@ -9,9 +9,7 @@
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
- # I don't know how to make this work. But it's cool.
- #current_state = __salt__['pelican.current_state'](name)
- current_state = "cool"
+ current_state = __salt__['pelican.current_state'](name)
if __opts__['test'] == True:
ret['comment'] = 'Markdown files from "{0}" will be converted to HTML and put in "{1}"'.format(name,output)
@@ -23,13 +21,13 @@
return ret
- subprocess.call(['pelican', '-o', output, name])
+ new_state = __salt__['pelican.generate'](output,path)
ret['comment'] = 'Static site generated from "{0}".'.format(name)
ret['changes'] = {
'old': current_state,
- 'new': 'Whoopee!',
+ 'new': new_state,
}
ret['result'] = True |
4530325e460d38086201573d85a9ae95fc877a4c | webvtt/exceptions.py | webvtt/exceptions.py |
class MalformedFileError(Exception):
"""Error raised when the file is not well formatted"""
|
class MalformedFileError(Exception):
"""Error raised when the file is not well formatted"""
class MalformedCaptionError(Exception):
"""Error raised when a caption is not well formatted"""
| Add new exception for malformed captions | Add new exception for malformed captions
| Python | mit | glut23/webvtt-py,sampattuzzi/webvtt-py | ---
+++
@@ -2,3 +2,7 @@
class MalformedFileError(Exception):
"""Error raised when the file is not well formatted"""
+
+
+class MalformedCaptionError(Exception):
+ """Error raised when a caption is not well formatted""" |
7d9c4a9f173b856e92e5586d1b961d876fb212a4 | test/dependencies_test.py | test/dependencies_test.py | import luigi
import sciluigi as sl
import os
TESTFILE_PATH = '/tmp/test.out'
class TestTask(sl.Task):
def out_data(self):
return sl.TargetInfo(self, TESTFILE_PATH)
def run(self):
with self.out_data().open('w') as outfile:
outfile.write('File written by luigi\n')
class TestRunTask():
def setup(self):
self.t = sl.new_task(TestTask)
def teardown(self):
self.t = None
os.remove(TESTFILE_PATH)
def test_run(self):
# Run a task with a luigi worker
w = luigi.worker.Worker()
w.add(self.t)
w.run()
w.stop()
assert os.path.isfile(TESTFILE_PATH)
| import luigi
import sciluigi as sl
import os
TESTFILE_PATH = '/tmp/test.out'
class TestTask(sl.Task):
def out_data(self):
return sl.TargetInfo(self, TESTFILE_PATH)
def run(self):
with self.out_data().open('w') as outfile:
outfile.write('File written by luigi\n')
class TestRunTask():
def setup(self):
self.t = sl.new_task(self, TestTask, 'testtask')
def teardown(self):
self.t = None
os.remove(TESTFILE_PATH)
def test_run(self):
# Run a task with a luigi worker
w = luigi.worker.Worker()
w.add(self.t)
w.run()
w.stop()
assert os.path.isfile(TESTFILE_PATH)
| Make sure test works with new API | Make sure test works with new API
| Python | mit | pharmbio/sciluigi,pharmbio/sciluigi,samuell/sciluigi | ---
+++
@@ -16,7 +16,7 @@
class TestRunTask():
def setup(self):
- self.t = sl.new_task(TestTask)
+ self.t = sl.new_task(self, TestTask, 'testtask')
def teardown(self):
self.t = None |
321258a01b735d432fcc103e17c7eb3031c6153f | scheduler.py | scheduler.py | from apscheduler.schedulers.blocking import BlockingScheduler
import logging
import warner
import archiver
import announcer
import flagger
import os
logging.basicConfig()
sched = BlockingScheduler()
@sched.scheduled_job("cron", hour=4)
#@sched.scheduled_job("cron", hour="*", minute="*/10") # for testing
def destalinate_job():
print("Destalinating")
if "SB_TOKEN" not in os.environ or "API_TOKEN" not in os.environ:
print("ERR: Missing at least one Slack environment variable.")
else:
scheduled_warner = warner.Warner()
scheduled_archiver = archiver.Archiver()
scheduled_announcer = announcer.Announcer()
scheduled_flagger = flagger.Flagger()
print("Warning")
scheduled_warner.warn()
print("Archiving")
scheduled_archiver.archive()
print("Announcing")
scheduled_announcer.announce()
print("Flagging")
scheduled_flagger.flag()
print("OK: destalinated")
print("END: destalinate_job")
sched.start()
| from apscheduler.schedulers.blocking import BlockingScheduler
import logging
import warner
import archiver
import announcer
import flagger
import os
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if os.getenv("TEST_SCHEDULE"):
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
logging.basicConfig()
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
print("Destalinating")
if "SB_TOKEN" not in os.environ or "API_TOKEN" not in os.environ:
print("ERR: Missing at least one Slack environment variable.")
else:
scheduled_warner = warner.Warner()
scheduled_archiver = archiver.Archiver()
scheduled_announcer = announcer.Announcer()
scheduled_flagger = flagger.Flagger()
print("Warning")
scheduled_warner.warn()
print("Archiving")
scheduled_archiver.archive()
print("Announcing")
scheduled_announcer.announce()
print("Flagging")
scheduled_flagger.flag()
print("OK: destalinated")
print("END: destalinate_job")
sched.start()
| Switch to a test schedule based on the environment | Switch to a test schedule based on the environment
Switching an environment variable and kicking the `clock` process feels
like a neater solution than commenting out one line, uncommenting
another, and redeploying.
| Python | apache-2.0 | randsleadershipslack/destalinator,TheConnMan/destalinator,royrapoport/destalinator,underarmour/destalinator,royrapoport/destalinator,TheConnMan/destalinator,randsleadershipslack/destalinator | ---
+++
@@ -6,11 +6,17 @@
import flagger
import os
+
+# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
+if os.getenv("TEST_SCHEDULE"):
+ schedule_kwargs = {"hour": "*", "minute": "*/10"}
+else:
+ schedule_kwargs = {"hour": 4}
+
logging.basicConfig()
sched = BlockingScheduler()
-@sched.scheduled_job("cron", hour=4)
-#@sched.scheduled_job("cron", hour="*", minute="*/10") # for testing
+@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
print("Destalinating")
if "SB_TOKEN" not in os.environ or "API_TOKEN" not in os.environ: |
da281daf8f83b745dd128b0270dd26d80c952b9e | tests/settings.py | tests/settings.py | """
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = 'hrzeqwz0@nps2#ns3_qkqz*#5=)1bxcdwa*h__hta0f1bqr2e!'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
TEMPLATE_DIRS = ("tests/templates", )
INSTALLED_APPS = (
'django_nose',
)
for dir in os.listdir("tests/apps"):
if os.path.isfile("tests/apps/%s/urls.py" % dir):
INSTALLED_APPS += ( "tests.apps.%s" % dir, )
MIDDLEWARE_CLASSES = (
)
ROOT_URLCONF = 'tests.urls'
WSGI_APPLICATION = 'tests.wsgi.application'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
| """
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = 'hrzeqwz0@nps2#ns3_qkqz*#5=)1bxcdwa*h__hta0f1bqr2e!'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
TEMPLATE_DIRS = ("tests/templates", )
INSTALLED_APPS = (
'django_nose',
'django.contrib.contenttypes',
'django.contrib.auth'
)
for dir in os.listdir("tests/apps"):
if os.path.isfile("tests/apps/%s/urls.py" % dir):
INSTALLED_APPS += ( "tests.apps.%s" % dir, )
MIDDLEWARE_CLASSES = (
)
ROOT_URLCONF = 'tests.urls'
WSGI_APPLICATION = 'tests.wsgi.application'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
| Add missing installed apps for tests | Add missing installed apps for tests
| Python | mit | alisaifee/djlimiter,alisaifee/djlimiter | ---
+++
@@ -14,6 +14,8 @@
INSTALLED_APPS = (
'django_nose',
+ 'django.contrib.contenttypes',
+ 'django.contrib.auth'
)
for dir in os.listdir("tests/apps"):
if os.path.isfile("tests/apps/%s/urls.py" % dir): |
a7e1b1961d14306f16c97e66982f4aef5b203e0a | tests/test_acf.py | tests/test_acf.py | import io
import pytest
from steamfiles import acf
test_file_name = 'tests/test_data/appmanifest_202970.acf'
@pytest.yield_fixture
def acf_data():
with open(test_file_name, 'rt') as f:
yield f.read()
@pytest.mark.usefixtures('acf_data')
def test_loads_dumps(acf_data):
assert acf.dumps(acf.loads(acf_data)) == acf_data
@pytest.mark.usefixtures('acf_data')
def test_load_dump(acf_data):
with open(test_file_name, 'rt') as in_file:
out_file = io.StringIO()
obj = acf.load(in_file)
acf.dump(out_file, obj)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == acf_data
| import io
import os
import pytest
from steamfiles import acf
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appmanifest_202970.acf')
@pytest.yield_fixture
def acf_data():
with open(test_file_name, 'rt') as f:
yield f.read()
@pytest.mark.usefixtures('acf_data')
def test_loads_dumps(acf_data):
assert acf.dumps(acf.loads(acf_data)) == acf_data
@pytest.mark.usefixtures('acf_data')
def test_load_dump(acf_data):
with open(test_file_name, 'rt') as in_file:
out_file = io.StringIO()
obj = acf.load(in_file)
acf.dump(out_file, obj)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == acf_data
| Fix relative path not working properly 50% of the time… | Fix relative path not working properly 50% of the time…
| Python | mit | leovp/steamfiles | ---
+++
@@ -1,9 +1,9 @@
import io
+import os
import pytest
from steamfiles import acf
-test_file_name = 'tests/test_data/appmanifest_202970.acf'
-
+test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appmanifest_202970.acf')
@pytest.yield_fixture
def acf_data(): |
e1bdfbb226795f4dd15fefb109ece2aa9659f421 | tests/test_ssl.py | tests/test_ssl.py | from nose.tools import assert_true, assert_false, assert_equal, \
assert_list_equal, raises
import datajoint as dj
from . import CONN_INFO
from pymysql.err import OperationalError
class TestSSL:
@staticmethod
def test_secure_connection():
result = dj.conn(**CONN_INFO, reset=True).query(
"SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1]
assert_true(len(result) > 0)
@staticmethod
def test_insecure_connection():
result = dj.conn(**CONN_INFO, ssl=False, reset=True).query(
"SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1]
assert_equal(result, '')
@staticmethod
@raises(OperationalError)
def test_reject_insecure():
result = dj.conn(
CONN_INFO['host'], user='djssl', password='djssl',
ssl=False, reset=True
).query("SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1]
| from nose.tools import assert_true, assert_false, assert_equal, \
assert_list_equal, raises
import datajoint as dj
from . import CONN_INFO
from pymysql.err import OperationalError
class TestSSL:
# @staticmethod
# def test_secure_connection():
# result = dj.conn(**CONN_INFO, reset=True).query(
# "SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1]
# assert_true(len(result) > 0)
@staticmethod
def test_insecure_connection():
result = dj.conn(**CONN_INFO, ssl=False, reset=True).query(
"SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1]
assert_equal(result, '')
@staticmethod
@raises(OperationalError)
def test_reject_insecure():
result = dj.conn(
CONN_INFO['host'], user='djssl', password='djssl',
ssl=False, reset=True
).query("SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1]
| Disable secure test until new test rig complete. | Disable secure test until new test rig complete.
| Python | lgpl-2.1 | eywalker/datajoint-python,datajoint/datajoint-python,dimitri-yatsenko/datajoint-python | ---
+++
@@ -7,11 +7,11 @@
class TestSSL:
- @staticmethod
- def test_secure_connection():
- result = dj.conn(**CONN_INFO, reset=True).query(
- "SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1]
- assert_true(len(result) > 0)
+ # @staticmethod
+ # def test_secure_connection():
+ # result = dj.conn(**CONN_INFO, reset=True).query(
+ # "SHOW STATUS LIKE 'Ssl_cipher';").fetchone()[1]
+ # assert_true(len(result) > 0)
@staticmethod
def test_insecure_connection(): |
6eff3cc2fba257e685dadbb19dda8aa667cb799c | tests/mongodb_settings.py | tests/mongodb_settings.py |
from test_project.settings import *
DATABASES['mongo'] = {
'ENGINE' : 'django_mongodb_engine',
'NAME' : 'mutant',
'OPTIONS': {
'OPERATIONS': {
'save' : {'safe' : True},
}
}
}
SOUTH_DATABASE_ADAPTERS = {'mongo': 'django_mongodb_engine.south'}
INSTALLED_APPS.extend(['django_mongodb_engine', 'djangotoolbox'])
# FK and M2M are not supported for nonrel db so we make sure to avoid
# loading mutant.contrib.related
INSTALLED_APPS.remove('mutant.contrib.related')
# But we can test the non rel fields
INSTALLED_APPS.append('mutant.contrib.nonrel')
DATABASE_ROUTERS = (
'mongodb_router.MongoRouter',
)
|
from test_project.settings import *
DATABASES['mongo'] = {
'ENGINE' : 'django_mongodb_engine',
'NAME' : 'mutant',
'OPTIONS': {
'OPERATIONS': {
'save' : {'safe' : True},
}
}
}
SOUTH_DATABASE_ADAPTERS = {'mongo': 'django_mongodb_engine.south_adapter'}
INSTALLED_APPS.extend(['django_mongodb_engine', 'djangotoolbox'])
# FK and M2M are not supported for nonrel db so we make sure to avoid
# loading mutant.contrib.related
INSTALLED_APPS.remove('mutant.contrib.related')
# But we can test the non rel fields
INSTALLED_APPS.append('mutant.contrib.nonrel')
DATABASE_ROUTERS = (
'mongodb_router.MongoRouter',
)
| Make sure to load the new mongo south adapter | Make sure to load the new mongo south adapter
| Python | mit | charettes/django-mutant | ---
+++
@@ -11,7 +11,7 @@
}
}
-SOUTH_DATABASE_ADAPTERS = {'mongo': 'django_mongodb_engine.south'}
+SOUTH_DATABASE_ADAPTERS = {'mongo': 'django_mongodb_engine.south_adapter'}
INSTALLED_APPS.extend(['django_mongodb_engine', 'djangotoolbox'])
|
96469afbd9d01b0e4f43e93a0edfd3dc84bfa2f3 | Monstr/Core/Config.py | Monstr/Core/Config.py | import ConfigParser
Config = ConfigParser.ConfigParser()
import os
print os.getcwd()
try:
Config.read('/opt/monstr/current.cfg')
except Exception as e:
print 'WARNING! Configuration is missing. Using test_conf.cfg'
Config.read('test.cfg')
def get_section(section):
result = {}
if section in Config.sections():
options = Config.options(section)
for option in options:
result[option] = Config.get(section, option)
return result
else:
raise Exception('Requested section is absent in configuration')
| import ConfigParser
import os
CONFIG_PATH = '/opt/monstr/current.cfg'
Config = ConfigParser.ConfigParser()
print os.getcwd()
if os.path.isfile(CONFIG_PATH):
Config.read(CONFIG_PATH)
else:
print 'WARNING! Configuration is missing. Using test_conf.cfg'
Config.read('test.cfg')
def get_section(section):
result = {}
if section in Config.sections():
options = Config.options(section)
for option in options:
result[option] = Config.get(section, option)
return result
else:
raise Exception('Requested section is absent in configuration')
| Check whether current config exists usinf os and if | FIX: Check whether current config exists usinf os and if
| Python | apache-2.0 | tier-one-monitoring/monstr,tier-one-monitoring/monstr | ---
+++
@@ -1,11 +1,14 @@
import ConfigParser
+import os
+
+CONFIG_PATH = '/opt/monstr/current.cfg'
+
+
Config = ConfigParser.ConfigParser()
-
-import os
print os.getcwd()
-try:
- Config.read('/opt/monstr/current.cfg')
-except Exception as e:
+if os.path.isfile(CONFIG_PATH):
+ Config.read(CONFIG_PATH)
+else:
print 'WARNING! Configuration is missing. Using test_conf.cfg'
Config.read('test.cfg')
|
77dbd2dc061e70a414dcd509a79bdb54491274aa | src/index.py | src/index.py | """
SmartAPI Web Server Entry Point
> python index.py
"""
import os.path
from tornado.ioloop import IOLoop, PeriodicCallback
from utils.api_monitor import update_uptime_status
from utils.versioning import backup_and_refresh
import config
from biothings.web.index_base import main
from biothings.web.settings import BiothingESWebSettings
WEB_SETTINGS = BiothingESWebSettings(config=config)
if __name__ == '__main__':
(SRC_PATH, _) = os.path.split(os.path.abspath(__file__))
STATIC_PATH = os.path.join(SRC_PATH, 'static')
IOLoop.current().add_callback(backup_and_refresh)
IOLoop.current().add_callback(update_uptime_status)
PeriodicCallback(backup_and_refresh, 24*60*60*1000).start()
PeriodicCallback(update_uptime_status, 24*60*60*1000, 0.1).start()
main(WEB_SETTINGS.generate_app_list(),
app_settings={"cookie_secret": config.COOKIE_SECRET},
debug_settings={"static_path": STATIC_PATH},
use_curl=True)
| """
SmartAPI Web Server Entry Point
> python index.py
"""
import datetime
import logging
import os.path
from tornado.ioloop import IOLoop
from utils.api_monitor import update_uptime_status
from utils.versioning import backup_and_refresh
import config
from biothings.web.index_base import main
from biothings.web.settings import BiothingESWebSettings
WEB_SETTINGS = BiothingESWebSettings(config=config)
def schedule_daily_job():
tomorrow = datetime.datetime.today() + datetime.timedelta(days=1)
midnight = datetime.datetime.combine(tomorrow, datetime.time.min)
def wrapper():
try:
backup_and_refresh()
update_uptime_status()
except BaseException:
logging.exception("Failed daily job.")
schedule_daily_job()
IOLoop.current().add_timeout(midnight.timestamp(), wrapper)
if __name__ == '__main__':
(SRC_PATH, _) = os.path.split(os.path.abspath(__file__))
STATIC_PATH = os.path.join(SRC_PATH, 'static')
schedule_daily_job()
main(WEB_SETTINGS.generate_app_list(),
app_settings={"cookie_secret": config.COOKIE_SECRET},
debug_settings={"static_path": STATIC_PATH},
use_curl=True)
| Allow accurate daily task time scheduling | Allow accurate daily task time scheduling
| Python | mit | Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI | ---
+++
@@ -5,9 +5,11 @@
"""
+import datetime
+import logging
import os.path
-from tornado.ioloop import IOLoop, PeriodicCallback
+from tornado.ioloop import IOLoop
from utils.api_monitor import update_uptime_status
from utils.versioning import backup_and_refresh
@@ -17,13 +19,25 @@
WEB_SETTINGS = BiothingESWebSettings(config=config)
+
+def schedule_daily_job():
+ tomorrow = datetime.datetime.today() + datetime.timedelta(days=1)
+ midnight = datetime.datetime.combine(tomorrow, datetime.time.min)
+
+ def wrapper():
+ try:
+ backup_and_refresh()
+ update_uptime_status()
+ except BaseException:
+ logging.exception("Failed daily job.")
+ schedule_daily_job()
+ IOLoop.current().add_timeout(midnight.timestamp(), wrapper)
+
+
if __name__ == '__main__':
(SRC_PATH, _) = os.path.split(os.path.abspath(__file__))
STATIC_PATH = os.path.join(SRC_PATH, 'static')
- IOLoop.current().add_callback(backup_and_refresh)
- IOLoop.current().add_callback(update_uptime_status)
- PeriodicCallback(backup_and_refresh, 24*60*60*1000).start()
- PeriodicCallback(update_uptime_status, 24*60*60*1000, 0.1).start()
+ schedule_daily_job()
main(WEB_SETTINGS.generate_app_list(),
app_settings={"cookie_secret": config.COOKIE_SECRET},
debug_settings={"static_path": STATIC_PATH}, |
7f109dc3e5b1d7ecacc6810aaee456359c70ad40 | validation/base.py | validation/base.py | import functools
_undefined = object()
def validator(f):
@functools.wraps(f)
def wrapper(value=_undefined, **kwargs):
required = kwargs.pop('required', True)
def validate(value_):
if value_ is None:
if required:
raise TypeError()
return
f(value, **kwargs)
if value is not _undefined:
validate(value, **kwargs)
else:
return validate
return wrapper
| import functools
_undefined = object()
def validator(f):
@functools.wraps(f)
def wrapper(value=_undefined, **kwargs):
required = kwargs.pop('required', True)
def validate(value):
if value is None:
if required:
raise TypeError()
return
f(value, **kwargs)
if value is not _undefined:
validate(value, **kwargs)
else:
return validate
return wrapper
| Fix name of first argument to decorated validators | Fix name of first argument to decorated validators
| Python | apache-2.0 | JOIVY/validation | ---
+++
@@ -9,8 +9,8 @@
def wrapper(value=_undefined, **kwargs):
required = kwargs.pop('required', True)
- def validate(value_):
- if value_ is None:
+ def validate(value):
+ if value is None:
if required:
raise TypeError()
return |
9272fd30c70e946bfcc003a2936f57efdaa05bd7 | bindings/jupyroot/python/JupyROOT/__init__.py | bindings/jupyroot/python/JupyROOT/__init__.py | #-----------------------------------------------------------------------------
# Author: Danilo Piparo <Danilo.Piparo@cern.ch> CERN
# Author: Enric Tejedor <enric.tejedor.saavedra@cern.ch> CERN
#-----------------------------------------------------------------------------
################################################################################
# Copyright (C) 1995-2020, Rene Brun and Fons Rademakers. #
# All rights reserved. #
# #
# For the licensing terms see $ROOTSYS/LICENSE. #
# For the list of contributors see $ROOTSYS/README/CREDITS. #
################################################################################
from JupyROOT.helpers import cppcompleter, utils
if '__IPYTHON__' in __builtins__ and __IPYTHON__:
cppcompleter.load_ipython_extension(get_ipython())
utils.iPythonize()
| #-----------------------------------------------------------------------------
# Author: Danilo Piparo <Danilo.Piparo@cern.ch> CERN
# Author: Enric Tejedor <enric.tejedor.saavedra@cern.ch> CERN
#-----------------------------------------------------------------------------
################################################################################
# Copyright (C) 1995-2020, Rene Brun and Fons Rademakers. #
# All rights reserved. #
# #
# For the licensing terms see $ROOTSYS/LICENSE. #
# For the list of contributors see $ROOTSYS/README/CREDITS. #
################################################################################
from JupyROOT.helpers import cppcompleter, utils
# Check if we are in the IPython shell
try:
import builtins
except ImportError:
import __builtin__ as builtins # Py2
_is_ipython = hasattr(builtins, '__IPYTHON__')
if _is_ipython:
cppcompleter.load_ipython_extension(get_ipython())
utils.iPythonize()
| Update logic to check for IPython | [JupyROOT] Update logic to check for IPython
To sync it with what was already introduced in ROOT/__init__.py
| Python | lgpl-2.1 | olifre/root,olifre/root,root-mirror/root,olifre/root,olifre/root,olifre/root,root-mirror/root,root-mirror/root,olifre/root,root-mirror/root,root-mirror/root,root-mirror/root,olifre/root,olifre/root,root-mirror/root,root-mirror/root,root-mirror/root,root-mirror/root,olifre/root,olifre/root,root-mirror/root,olifre/root | ---
+++
@@ -13,6 +13,13 @@
from JupyROOT.helpers import cppcompleter, utils
-if '__IPYTHON__' in __builtins__ and __IPYTHON__:
+# Check if we are in the IPython shell
+try:
+ import builtins
+except ImportError:
+ import __builtin__ as builtins # Py2
+_is_ipython = hasattr(builtins, '__IPYTHON__')
+
+if _is_ipython:
cppcompleter.load_ipython_extension(get_ipython())
utils.iPythonize() |
7ef287f2bb7a783146d5360eb9729aa4e273f7d9 | passgen.py | passgen.py | #!/usr/bin/env python3
import argparse
import random
import string
import sys
def main():
# Set defaults
default_length = 10
default_seed = None
default_population = string.ascii_letters + string.digits + '!%+=.,'
# Set up and parse arguments
p = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
p.add_argument('length', default=default_length, type=int,
help="Length of the password.")
p.add_argument('--population', '-p', default=default_population,
help="List of characters to generate the password from.")
p.add_argument('--seed', '-s', default=default_seed,
help="Seed for the pseudo-random number generator. If "
"omitted, the current system time is used.")
args = p.parse_args()
# Seed the number generator
random.seed(args.seed)
# Generate password
pw = ''
for i in range(1, args.length):
pw += random.choice(args.population)
# Print password
print(pw)
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python3
import argparse
import random
import string
import sys
def main():
# Set defaults
default_length = 10
default_seed = None
default_population = string.ascii_letters + string.digits + '!%+=.,'
# Set up and parse arguments
p = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
p.add_argument('length', default=default_length, type=int,
help="Length of the password.")
p.add_argument('--population', '-p', default=default_population,
help="List of characters to generate the password from.")
p.add_argument('--seed', '-s', default=default_seed,
help="Seed for the pseudo-random number generator. If "
"omitted, the current system time is used.")
args = p.parse_args()
# Seed the number generator
random.seed(args.seed)
# Generate password
pw = ''
for i in range(0, args.length):
pw += random.choice(args.population)
# Print password
print(pw)
if __name__ == '__main__':
sys.exit(main())
| Fix length (was 1 too short). | Fix length (was 1 too short).
| Python | unlicense | sloede/passgen | ---
+++
@@ -28,7 +28,7 @@
# Generate password
pw = ''
- for i in range(1, args.length):
+ for i in range(0, args.length):
pw += random.choice(args.population)
# Print password |
cee60151acf606a4e22a92c51066b7fb720f35a3 | application/models.py | application/models.py | """
Database Emulator for the teammetrics project
Temporarily the data is generated by accessing data available at http://blend.debian.org/liststats
"""
import urllib2
import logging
def extractMetrics(team, metric):
"""
Parses the data available at the url into a data structure.
"""
url = "http://blends.debian.net/liststats/"+metric+"_"+team+"_year.txt"
lines = urllib2.urlopen(url).readlines()
ll = len(lines)
names = lines[0].split('\t')
results = list()
for i in range (1,ll):
data = lines[i].split('\t')
year = data[0]
results.append(dict());
results[len(results)-1]["year"]=year;
results[len(results)-1]["userdata"]=list();
lw = len(data)
yeardata=dict()
for j in range(1,lw):
results[len(results)-1]["userdata"].append(dict())
results[len(results)-1]["userdata"][len(results[len(results)-1]["userdata"])-1]["user"]=names[j]
results[len(results)-1]["userdata"][len(results[len(results)-1]["userdata"])-1]["data"]=data[j]
metricresult = dict()
metricresult["metric"]=metric
metricresult["data"]=results;
return metricresult
| """
Database Emulator for the teammetrics project
Temporarily the data is generated by accessing data available at http://blend.debian.org/liststats
"""
import urllib2
import logging
def extractMetrics(team, metric):
"""
Parses the data available at the url into a data structure.
"""
url = "http://blends.debian.net/liststats/"+metric+"_"+team+"_year.txt"
lines = urllib2.urlopen(url).readlines()
ll = len(lines)
names = lines[0].split('\t')
results = list()
for i in range(1,ll):
data = lines[i].split('\t')
ld = len(data)
for j in range (1,ld):
if(i==1):
results.append(dict())
results[j-1]["user"]=names[j].strip();
results[j-1]["userdata"]=list()
results[j-1]["userdata"].append(dict())
results[j-1]["userdata"][i-1]["year"]=data[0].strip()
results[j-1]["userdata"][i-1]["data"]=data[j].strip()
metricresult = dict()
metricresult["metric"]=metric
metricresult["data"]=results;
return metricresult
| Update the API to make it more semantic. | Update the API to make it more semantic.
| Python | mit | swvist/debmetrics,swvist/debmetrics | ---
+++
@@ -15,21 +15,19 @@
ll = len(lines)
names = lines[0].split('\t')
results = list()
- for i in range (1,ll):
- data = lines[i].split('\t')
- year = data[0]
- results.append(dict());
- results[len(results)-1]["year"]=year;
- results[len(results)-1]["userdata"]=list();
+ for i in range(1,ll):
+ data = lines[i].split('\t')
+ ld = len(data)
- lw = len(data)
- yeardata=dict()
- for j in range(1,lw):
- results[len(results)-1]["userdata"].append(dict())
- results[len(results)-1]["userdata"][len(results[len(results)-1]["userdata"])-1]["user"]=names[j]
- results[len(results)-1]["userdata"][len(results[len(results)-1]["userdata"])-1]["data"]=data[j]
+ for j in range (1,ld):
+ if(i==1):
+ results.append(dict())
+ results[j-1]["user"]=names[j].strip();
+ results[j-1]["userdata"]=list()
+ results[j-1]["userdata"].append(dict())
+ results[j-1]["userdata"][i-1]["year"]=data[0].strip()
+ results[j-1]["userdata"][i-1]["data"]=data[j].strip()
metricresult = dict()
metricresult["metric"]=metric
metricresult["data"]=results;
return metricresult
- |
5c6f6b63450651b2860e960a4fb16c787537d3ed | kcm.py | kcm.py | #!/usr/bin/env python
"""kcm.
Usage:
kcm (-h | --help)
kcm --version
kcm (init | describe | reconcile) [--conf-dir=<dir>]
kcm isolate [--conf-dir=<dir>] --pool=<pool> <command> [-- <args> ...]
Options:
-h --help Show this screen.
--version Show version.
--conf-dir=<dir> KCM configuration directory [default: /etc/kcm].
--pool=<pool> Pool name: either INFRA, CONTROLPLANE or DATAPLANE.
"""
from intel import config, util
from docopt import docopt
import json
def main():
args = docopt(__doc__, version="KCM 0.1.0")
if args["init"]:
init(args["--conf-dir"])
return
if args["describe"]:
describe(args["--conf-dir"])
return
def init(conf_dir):
util.check_hugepages()
def describe(conf_dir):
c = config.Config(conf_dir)
with c.lock():
print(json.dumps(c.as_dict(), sort_keys=True, indent=2))
if __name__ == "__main__":
main()
| #!/usr/bin/env python
"""kcm.
Usage:
kcm (-h | --help)
kcm --version
kcm (init | describe | reconcile) [--conf-dir=<dir>]
kcm isolate [--conf-dir=<dir>] --pool=<pool> <command> [-- <args> ...]
Options:
-h --help Show this screen.
--version Show version.
--conf-dir=<dir> KCM configuration directory [default: /etc/kcm].
--pool=<pool> Pool name: either infra, controlplane or dataplane.
"""
from intel import config, util
from docopt import docopt
import json
def main():
args = docopt(__doc__, version="KCM 0.1.0")
if args["init"]:
init(args["--conf-dir"])
return
if args["describe"]:
describe(args["--conf-dir"])
return
def init(conf_dir):
util.check_hugepages()
def describe(conf_dir):
c = config.Config(conf_dir)
with c.lock():
print(json.dumps(c.as_dict(), sort_keys=True, indent=2))
if __name__ == "__main__":
main()
| Fix pool names in KCM help. | Fix pool names in KCM help.
| Python | apache-2.0 | Intel-Corp/CPU-Manager-for-Kubernetes,Intel-Corp/CPU-Manager-for-Kubernetes,Intel-Corp/CPU-Manager-for-Kubernetes | ---
+++
@@ -13,7 +13,7 @@
-h --help Show this screen.
--version Show version.
--conf-dir=<dir> KCM configuration directory [default: /etc/kcm].
- --pool=<pool> Pool name: either INFRA, CONTROLPLANE or DATAPLANE.
+ --pool=<pool> Pool name: either infra, controlplane or dataplane.
"""
from intel import config, util
from docopt import docopt |
cd5f4c65777253d265a620194f553f5f4b76881d | l10n_ch_payment_slip/report/__init__.py | l10n_ch_payment_slip/report/__init__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Nicolas Bessi (Camptocamp)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from . import payment_slip_from_invoice
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Nicolas Bessi (Camptocamp)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from . import payment_slip_from_invoice
from . import reports_common
| Add common in import statement | Add common in import statement
| Python | agpl-3.0 | brain-tec/l10n-switzerland,BT-jmichaud/l10n-switzerland,brain-tec/l10n-switzerland,brain-tec/l10n-switzerland | ---
+++
@@ -29,3 +29,4 @@
#
##############################################################################
from . import payment_slip_from_invoice
+from . import reports_common |
5c7b33574550d37454b4362fa0896a4dad6e98d1 | aesthetic/output/gif.py | aesthetic/output/gif.py | from PIL import Image
from PIL import ImageDraw
def render(animation, out, scale=8):
images = [render_frame(colors, scale=scale) for colors in animation]
save_gif(out, *images)
def render_frame(colors, scale=8):
led_count = 53
size = (led_count * scale, scale)
im = Image.new("RGB", size, "black")
d = ImageDraw.Draw(im)
for idx, color in enumerate(colors):
color = tuple(map(int, color))
x0 = scale * idx
y0 = 0
x1 = scale * (idx + 1)
y1 = scale
d.rectangle((x0, y0, x1, y1), fill=color)
return im
def save_gif(out, image, *more_images):
image.save(out, save_all=True,
append_images=list(more_images),
loop=1000,
duration=50)
| from PIL import Image
from PIL import ImageDraw
def render(animation, out, scale=8):
images = [render_frame(colors, scale=scale) for colors in animation]
save_gif(out, *images)
def render_frame(colors, scale=8):
led_count = 53
size = (led_count * scale, scale)
im = Image.new("RGB", size, "black")
d = ImageDraw.Draw(im)
for idx, color in enumerate(colors):
color = tuple(map(int, color))
x0 = scale * idx
y0 = 0
x1 = scale * (idx + 1)
y1 = scale
d.rectangle((x0, y0, x1, y1), fill=color)
return im
def save_gif(out, image, *more_images):
delay_ms = 1000 * 0.035
image.save(out, save_all=True,
append_images=list(more_images),
duration=delay_ms, optimize=True)
| Optimize GIF palette (too many colors right now), better GIF timing options. | Optimize GIF palette (too many colors right now), better GIF timing options.
| Python | apache-2.0 | gnoack/aesthetic | ---
+++
@@ -22,7 +22,7 @@
return im
def save_gif(out, image, *more_images):
+ delay_ms = 1000 * 0.035
image.save(out, save_all=True,
append_images=list(more_images),
- loop=1000,
- duration=50)
+ duration=delay_ms, optimize=True) |
612810cd1acbffe925a74e005e766b09349d2606 | src/nodemgr/database_nodemgr/common.py | src/nodemgr/database_nodemgr/common.py | #
# Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
#
import subprocess
class CassandraManager(object):
def __init__(self, cassandra_repair_logdir):
self.cassandra_repair_logdir = cassandra_repair_logdir
def status(self):
subprocess.Popen(["contrail-cassandra-status",
"--log-file", "/var/log/cassandra/status.log",
"--debug"])
def repair(self):
logdir = self.cassandra_repair_logdir + "repair.log"
subprocess.Popen(["contrail-cassandra-repair",
"--log-file", logdir,
"--debug"])
| #
# Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
#
import subprocess
class CassandraManager(object):
def __init__(self, cassandra_repair_logdir):
self.cassandra_repair_logdir = cassandra_repair_logdir
def status(self):
subprocess.Popen(["contrail-cassandra-status",
"--log-file", "/var/log/cassandra/status.log",
"--debug"], close_fds=True)
def repair(self):
logdir = self.cassandra_repair_logdir + "repair.log"
subprocess.Popen(["contrail-cassandra-repair",
"--log-file", logdir,
"--debug"], close_fds=True)
| Fix issue with config-nodemgr and cassandra-repair listening on same port | Fix issue with config-nodemgr and cassandra-repair listening on same port
contrail-config-nodemgr spawns contrail-cassandra-repair using
subprocess.Popen and thus contrail-cassandra-repair inherits all
the fds including the listening fd. Then when contrail-config-nodemgr
is restarted/killed, contrail-cassandra-repair will still have the
listening fd and hence contrail-config-nodemgr will not be able to
come up and listen on the same port. Fix is to use - close_fds=True
in the Popen call since the parent and child don't really need to
communicate or share anything using fds and this will close all the
fds shared from parent in the child.
Change-Id: Icef9f981c6447dd013cebeab727f49c5e2cf74f8
Closes-Bug: #1643466
| Python | apache-2.0 | eonpatapon/contrail-controller,rombie/contrail-controller,rombie/contrail-controller,rombie/contrail-controller,nischalsheth/contrail-controller,eonpatapon/contrail-controller,eonpatapon/contrail-controller,rombie/contrail-controller,nischalsheth/contrail-controller,eonpatapon/contrail-controller,nischalsheth/contrail-controller,nischalsheth/contrail-controller,eonpatapon/contrail-controller,nischalsheth/contrail-controller,nischalsheth/contrail-controller,nischalsheth/contrail-controller,eonpatapon/contrail-controller,rombie/contrail-controller,rombie/contrail-controller,rombie/contrail-controller,nischalsheth/contrail-controller,eonpatapon/contrail-controller | ---
+++
@@ -12,10 +12,10 @@
def status(self):
subprocess.Popen(["contrail-cassandra-status",
"--log-file", "/var/log/cassandra/status.log",
- "--debug"])
+ "--debug"], close_fds=True)
def repair(self):
logdir = self.cassandra_repair_logdir + "repair.log"
subprocess.Popen(["contrail-cassandra-repair",
"--log-file", logdir,
- "--debug"])
+ "--debug"], close_fds=True) |
cb0c7ba021a3896e7ad726178bc686775829de34 | appengine/components/components/machine_provider/utils.py | appengine/components/components/machine_provider/utils.py | # Copyright 2015 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
"""Helper functions for working with the Machine Provider."""
import logging
from components import net
from components import utils
MACHINE_PROVIDER_API_URL = 'https://machine-provider.appspot.com/_ah/api'
CATALOG_BASE_URL = '%s/catalog/v1' % MACHINE_PROVIDER_API_URL
MACHINE_PROVIDER_BASE_URL = '%s/machine_provider/v1' % MACHINE_PROVIDER_API_URL
MACHINE_PROVIDER_SCOPES = (
'https://www.googleapis.com/auth/userinfo.email',
)
def add_machines(requests):
"""Add machines to the Machine Provider's Catalog.
Args:
requests: A list of rpc_messages.CatalogMachineAdditionRequest instances.
"""
logging.info('Sending batched add_machines request')
return net.json_request(
'%s/add_machines' % CATALOG_BASE_URL,
method='POST',
payload=utils.to_json_encodable({'requests': requests}),
scopes=MACHINE_PROVIDER_SCOPES,
)
| # Copyright 2015 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
"""Helper functions for working with the Machine Provider."""
import logging
from google.appengine.ext import ndb
from components import net
from components import utils
from components.datastore_utils import config
MACHINE_PROVIDER_SCOPES = (
'https://www.googleapis.com/auth/userinfo.email',
)
class MachineProviderConfiguration(config.GlobalConfig):
"""Configuration for talking to the Machine Provider."""
# URL of the Machine Provider instance to use.
instance_url = ndb.StringProperty(required=True)
@classmethod
def get_instance_url(cls):
"""Returns the URL of the Machine Provider instance."""
return cls.cached().instance_url
def set_defaults(self):
"""Sets default values used to initialize the config."""
self.instance_url = 'https://machine-provider.appspot.com'
def add_machines(requests):
"""Add machines to the Machine Provider's Catalog.
Args:
requests: A list of rpc_messages.CatalogMachineAdditionRequest instances.
"""
logging.info('Sending batched add_machines request')
return net.json_request(
'%s/_ah/api/catalog/v1/add_machines' %
MachineProviderConfiguration.get_instance_url(),
method='POST',
payload=utils.to_json_encodable({'requests': requests}),
scopes=MACHINE_PROVIDER_SCOPES,
)
| Allow users of the Machine Provider to specify the dev instance for API calls | Allow users of the Machine Provider to specify the dev instance for API calls
BUG=489837
Review URL: https://codereview.chromium.org/1572793002
| Python | apache-2.0 | luci/luci-py,luci/luci-py,luci/luci-py,luci/luci-py | ---
+++
@@ -6,17 +6,31 @@
import logging
+from google.appengine.ext import ndb
+
from components import net
from components import utils
+from components.datastore_utils import config
-MACHINE_PROVIDER_API_URL = 'https://machine-provider.appspot.com/_ah/api'
-
-CATALOG_BASE_URL = '%s/catalog/v1' % MACHINE_PROVIDER_API_URL
-MACHINE_PROVIDER_BASE_URL = '%s/machine_provider/v1' % MACHINE_PROVIDER_API_URL
MACHINE_PROVIDER_SCOPES = (
'https://www.googleapis.com/auth/userinfo.email',
)
+
+
+class MachineProviderConfiguration(config.GlobalConfig):
+ """Configuration for talking to the Machine Provider."""
+ # URL of the Machine Provider instance to use.
+ instance_url = ndb.StringProperty(required=True)
+
+ @classmethod
+ def get_instance_url(cls):
+ """Returns the URL of the Machine Provider instance."""
+ return cls.cached().instance_url
+
+ def set_defaults(self):
+ """Sets default values used to initialize the config."""
+ self.instance_url = 'https://machine-provider.appspot.com'
def add_machines(requests):
@@ -27,8 +41,9 @@
"""
logging.info('Sending batched add_machines request')
return net.json_request(
- '%s/add_machines' % CATALOG_BASE_URL,
+ '%s/_ah/api/catalog/v1/add_machines' %
+ MachineProviderConfiguration.get_instance_url(),
method='POST',
payload=utils.to_json_encodable({'requests': requests}),
scopes=MACHINE_PROVIDER_SCOPES,
- )
+ ) |
4fe55df3bb668a2eafdb65a3a31ad27ffa5dc3c2 | pytable.py | pytable.py | # -*- coding: utf-8 -*-
from __future__ import print_function
from operator import itemgetter
import monoidal_tables as mt
from monoidal_tables import renderers
if __name__ == '__main__':
table = (mt.integer('X', itemgetter('x')) +
mt.integer('Y', itemgetter('y')) +
mt.align_center(mt.column('Name', itemgetter('name'))))
data = [
{'x': 0, 'y': 0, 'name': 'Origin'},
{'x': 5, 'y': 5, 'name': 'Diagonal'},
{'x': 12, 'y': 8, 'name': 'Up'},
]
table.render(data, renderer=renderers.FancyRenderer)
| # -*- coding: utf-8 -*-
from __future__ import print_function
from operator import itemgetter
import monoidal_tables as mt
from monoidal_tables import renderers
if __name__ == '__main__':
table = (mt.integer('X', itemgetter('x')) +
mt.set_class(mt.integer('Y', itemgetter('y')), 'col-y') +
mt.align_center(mt.column('Name', itemgetter('name'))))
data = [
{'x': 0, 'y': 0, 'name': 'Origin'},
{'x': 5, 'y': 5, 'name': 'Diagonal'},
{'x': 12, 'y': 8, 'name': 'Up'},
]
table.render(data, renderer=renderers.FancyRenderer)
| Update example to show HTML class | Update example to show HTML class
| Python | bsd-3-clause | lubomir/monoidal-tables | ---
+++
@@ -10,7 +10,7 @@
if __name__ == '__main__':
table = (mt.integer('X', itemgetter('x')) +
- mt.integer('Y', itemgetter('y')) +
+ mt.set_class(mt.integer('Y', itemgetter('y')), 'col-y') +
mt.align_center(mt.column('Name', itemgetter('name'))))
data = [
{'x': 0, 'y': 0, 'name': 'Origin'}, |
85feafe002dfdce67cc4b29125f656e55867d088 | telegrambot/bot_views/generic/base.py | telegrambot/bot_views/generic/base.py | from telegrambot.bot_views.generic.responses import TextResponse, KeyboardResponse
from telegram import ParseMode
import sys
import traceback
import logging
logger = logging.getLogger(__name__)
class TemplateCommandView(object):
template_text = None
template_keyboard = None
def get_context(self, bot, update, **kwargs):
return None
def handle(self, bot, update, **kwargs):
try:
ctx = self.get_context(bot, update, **kwargs)
text = TextResponse(self.template_text, ctx).render()
keyboard = KeyboardResponse(self.template_keyboard, ctx).render()
# logger.debug("Text:" + str(text.encode('utf-8')))
# logger.debug("Keyboard:" + str(keyboard))
if text:
bot.send_message(chat_id=update.message.chat_id, text=text.encode('utf-8'), reply_markup=keyboard, parse_mode=ParseMode.MARKDOWN)
else:
logger.info("No text response for update %s" % str(update))
except:
exc_info = sys.exc_info()
traceback.print_exception(*exc_info)
raise
@classmethod
def as_command_view(cls, **initkwargs):
def view(bot, update, **kwargs):
self = cls(**initkwargs)
return self.handle(bot, update, **kwargs)
return view | from telegrambot.bot_views.generic.responses import TextResponse, KeyboardResponse
from telegram import ParseMode
import sys
import traceback
import logging
logger = logging.getLogger(__name__)
PY3 = sys.version_info > (3,)
class TemplateCommandView(object):
template_text = None
template_keyboard = None
def get_context(self, bot, update, **kwargs):
return None
def handle(self, bot, update, **kwargs):
try:
ctx = self.get_context(bot, update, **kwargs)
text = TextResponse(self.template_text, ctx).render()
keyboard = KeyboardResponse(self.template_keyboard, ctx).render()
# logger.debug("Text:" + str(text.encode('utf-8')))
# logger.debug("Keyboard:" + str(keyboard))
if text:
if not PY3:
text = text.encode('utf-8')
bot.send_message(chat_id=update.message.chat_id, text=text, reply_markup=keyboard, parse_mode=ParseMode.MARKDOWN)
else:
logger.info("No text response for update %s" % str(update))
except:
exc_info = sys.exc_info()
traceback.print_exception(*exc_info)
raise
@classmethod
def as_command_view(cls, **initkwargs):
def view(bot, update, **kwargs):
self = cls(**initkwargs)
return self.handle(bot, update, **kwargs)
return view
| Fix encoding bug in TemplateCommandView | Fix encoding bug in TemplateCommandView
| Python | bsd-3-clause | jlmadurga/django-telegram-bot,jlmadurga/django-telegram-bot | ---
+++
@@ -5,7 +5,7 @@
import logging
logger = logging.getLogger(__name__)
-
+PY3 = sys.version_info > (3,)
class TemplateCommandView(object):
template_text = None
@@ -22,7 +22,9 @@
# logger.debug("Text:" + str(text.encode('utf-8')))
# logger.debug("Keyboard:" + str(keyboard))
if text:
- bot.send_message(chat_id=update.message.chat_id, text=text.encode('utf-8'), reply_markup=keyboard, parse_mode=ParseMode.MARKDOWN)
+ if not PY3:
+ text = text.encode('utf-8')
+ bot.send_message(chat_id=update.message.chat_id, text=text, reply_markup=keyboard, parse_mode=ParseMode.MARKDOWN)
else:
logger.info("No text response for update %s" % str(update))
except: |
444e1951950e77f2b0e35d2921026bcadff6881b | backend/breach/forms.py | backend/breach/forms.py | from django.forms import ModelForm
from breach.models import Target
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
| from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
| Add form validation for victim | Add form validation for victim
| Python | mit | dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dionyziz/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture,esarafianou/rupture | ---
+++
@@ -1,5 +1,5 @@
from django.forms import ModelForm
-from breach.models import Target
+from breach.models import Target, Victim
class TargetForm(ModelForm):
@@ -15,3 +15,11 @@
'recordscardinality',
'method'
)
+
+
+class VictimForm(ModelForm):
+ class Meta:
+ model = Victim
+ fields = (
+ 'sourceip',
+ ) |
fe2ce4e21530daffacbd654790a161019dd2de83 | backend/breach/forms.py | backend/breach/forms.py | from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
| from django.forms import ModelForm
from breach.models import Target, Victim
class TargetForm(ModelForm):
class Meta:
model = Target
fields = (
'name',
'endpoint',
'prefix',
'alphabet',
'secretlength',
'alignmentalphabet',
'recordscardinality',
'method'
)
class VictimForm(ModelForm):
class Meta:
model = Victim
fields = (
'sourceip',
)
class AttackForm(ModelForm):
class Meta:
model = Victim
fields = (
'id',
)
| Add form for /attack with victim id | Add form for /attack with victim id
| Python | mit | dimriou/rupture,dionyziz/rupture,esarafianou/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,esarafianou/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture | ---
+++
@@ -23,3 +23,11 @@
fields = (
'sourceip',
)
+
+
+class AttackForm(ModelForm):
+ class Meta:
+ model = Victim
+ fields = (
+ 'id',
+ ) |
d4398d068d4fdf6364869cd01237f53438e2674c | blinkylib/blinkytape.py | blinkylib/blinkytape.py | import blinkycolor
import serial
class BlinkyTape(object):
def __init__(self, port, baud_rate = 115200, pixel_count = 60):
self._serial = serial.Serial(port, baud_rate)
self._pixel_count = pixel_count
self._pixels = [blinkycolor.BLACK] * self._pixel_count
@property
def pixel_count(self):
return self._pixel_count
def set_pixel(self, index, color):
if index >= self._pixel_count: raise IndexError
self._pixels[index] = color
def set_pixels(self, pixels):
if len(pixels) != self._pixel_count: raise ValueError
self._pixels = pixels
def update(self):
UPDATE_VALUE = [0, 0, 255]
for pixel in self._pixels:
self._serial.write(pixel.raw)
self._serial.write(UPDATE_VALUE)
self._serial.flush()
| import blinkycolor
import serial
class BlinkyTape(object):
def __init__(self, port, baud_rate = 115200, pixel_count = 60):
self._serial = serial.Serial(port, baud_rate)
self._pixel_count = pixel_count
self._pixels = [blinkycolor.BLACK] * self._pixel_count
@property
def pixel_count(self):
return self._pixel_count
def set_pixel(self, index, color):
if index >= self._pixel_count: raise IndexError
self._pixels[index] = color
def set_pixels(self, pixels):
if len(pixels) != self._pixel_count: raise ValueError
self._pixels = pixels
def update(self):
UPDATE_VALUE = [0, 0, 255]
for pixel in self._pixels:
self._serial.write(pixel.raw)
self._serial.write(UPDATE_VALUE)
self._serial.flush()
self._serial.flushInput()
| Fix the slow-mo gradient bug by flushing BlinkyTape response on updates | Fix the slow-mo gradient bug by flushing BlinkyTape response on updates
| Python | mit | jonspeicher/blinkyfun | ---
+++
@@ -25,3 +25,4 @@
self._serial.write(pixel.raw)
self._serial.write(UPDATE_VALUE)
self._serial.flush()
+ self._serial.flushInput() |
5723dbbf2dbebf349c61a00ee4ea665b4009bd18 | spur/io.py | spur/io.py | import threading
class IoHandler(object):
def __init__(self, in_out_pairs, read_all):
self._handlers = [
OutputHandler(file_in, file_out)
for file_in, file_out
in in_out_pairs
]
self._read_all = read_all
def wait(self):
handler_result = [handler.wait() for handler in self._handlers]
read_all_result = self._read_all()
return [
handler_output or read_all_output
for handler_output, read_all_output
in zip(handler_result, read_all_result)
]
class OutputHandler(object):
def __init__(self, stdout_in, stdout_out):
self._stdout_in = stdout_in
self._stdout_out = stdout_out
self._output = []
if stdout_out:
self._stdout_thread = threading.Thread(target=self._capture_stdout)
self._stdout_thread.daemon = True
self._stdout_thread.start()
else:
self._stdout_thread = None
def wait(self):
if self._stdout_thread:
self._stdout_thread.join()
return "".join(self._output)
def _capture_stdout(self):
while True:
output = self._stdout_in.read(1)
if output:
self._stdout_out.write(output)
self._output.append(output)
else:
return
| import threading
class IoHandler(object):
def __init__(self, in_out_pairs, read_all):
self._handlers = [
OutputHandler(file_in, file_out)
for file_in, file_out
in in_out_pairs
]
self._read_all = read_all
def wait(self):
handler_result = [handler.wait() for handler in self._handlers]
read_all_result = self._read_all()
return [
handler_output or read_all_output
for handler_output, read_all_output
in zip(handler_result, read_all_result)
]
class OutputHandler(object):
def __init__(self, file_in, file_out):
self._file_in = file_in
self._file_out = file_out
self._output = []
if file_out:
self._thread = threading.Thread(target=self._capture_output)
self._thread.daemon = True
self._thread.start()
else:
self._thread = None
def wait(self):
if self._thread:
self._thread.join()
return "".join(self._output)
def _capture_output (self):
while True:
output = self._file_in.read(1)
if output:
self._file_out.write(output)
self._output.append(output)
else:
return
| Remove references to stdout in OutputHandler | Remove references to stdout in OutputHandler
| Python | bsd-2-clause | mwilliamson/spur.py | ---
+++
@@ -21,28 +21,28 @@
class OutputHandler(object):
- def __init__(self, stdout_in, stdout_out):
- self._stdout_in = stdout_in
- self._stdout_out = stdout_out
+ def __init__(self, file_in, file_out):
+ self._file_in = file_in
+ self._file_out = file_out
self._output = []
- if stdout_out:
- self._stdout_thread = threading.Thread(target=self._capture_stdout)
- self._stdout_thread.daemon = True
- self._stdout_thread.start()
+ if file_out:
+ self._thread = threading.Thread(target=self._capture_output)
+ self._thread.daemon = True
+ self._thread.start()
else:
- self._stdout_thread = None
+ self._thread = None
def wait(self):
- if self._stdout_thread:
- self._stdout_thread.join()
+ if self._thread:
+ self._thread.join()
return "".join(self._output)
- def _capture_stdout(self):
+ def _capture_output (self):
while True:
- output = self._stdout_in.read(1)
+ output = self._file_in.read(1)
if output:
- self._stdout_out.write(output)
+ self._file_out.write(output)
self._output.append(output)
else:
return |
800ffecbed76f306806642546ed949153c8414c3 | astropy/vo/samp/tests/test_hub_proxy.py | astropy/vo/samp/tests/test_hub_proxy.py | from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from ..client import SAMPClient
class TestHubProxy(object):
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect()
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_get_running_hubs(self):
SAMPHubProxy.get_running_hubs()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
| import os
import tempfile
from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from ..client import SAMPClient
class TestHubProxy(object):
def setup_method(self, method):
fileobj, self.lockfile = tempfile.mkstemp()
self.hub = SAMPHubServer(web_profile=False,
lockfile=self.lockfile)
self.hub.start()
os.environ['SAMP_HUB'] = "std-lockurl:file://" + os.path.abspath(self.lockfile)
self.proxy = SAMPHubProxy()
self.proxy.connect()
def teardown_method(self, method):
del os.environ['SAMP_HUB'] # hacky
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
if os.path.exists(self.lockfile):
os.remove(self.lockfile)
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_get_running_hubs(self):
SAMPHubProxy.get_running_hubs()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
| Use temporary SAMP lock file | Use temporary SAMP lock file
| Python | bsd-3-clause | saimn/astropy,DougBurke/astropy,joergdietrich/astropy,AustereCuriosity/astropy,lpsinger/astropy,lpsinger/astropy,tbabej/astropy,kelle/astropy,tbabej/astropy,mhvk/astropy,kelle/astropy,dhomeier/astropy,larrybradley/astropy,DougBurke/astropy,larrybradley/astropy,joergdietrich/astropy,dhomeier/astropy,stargaser/astropy,astropy/astropy,StuartLittlefair/astropy,larrybradley/astropy,tbabej/astropy,aleksandr-bakanov/astropy,kelle/astropy,astropy/astropy,AustereCuriosity/astropy,funbaker/astropy,stargaser/astropy,pllim/astropy,funbaker/astropy,funbaker/astropy,pllim/astropy,lpsinger/astropy,mhvk/astropy,astropy/astropy,bsipocz/astropy,bsipocz/astropy,StuartLittlefair/astropy,dhomeier/astropy,MSeifert04/astropy,MSeifert04/astropy,DougBurke/astropy,aleksandr-bakanov/astropy,mhvk/astropy,mhvk/astropy,kelle/astropy,kelle/astropy,saimn/astropy,dhomeier/astropy,AustereCuriosity/astropy,tbabej/astropy,stargaser/astropy,saimn/astropy,StuartLittlefair/astropy,larrybradley/astropy,joergdietrich/astropy,pllim/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,AustereCuriosity/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,pllim/astropy,saimn/astropy,pllim/astropy,DougBurke/astropy,saimn/astropy,lpsinger/astropy,larrybradley/astropy,lpsinger/astropy,funbaker/astropy,AustereCuriosity/astropy,mhvk/astropy,bsipocz/astropy,MSeifert04/astropy,tbabej/astropy,bsipocz/astropy,joergdietrich/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,astropy/astropy,stargaser/astropy,astropy/astropy | ---
+++
@@ -1,3 +1,6 @@
+import os
+import tempfile
+
from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from ..client import SAMPClient
@@ -7,16 +10,28 @@
def setup_method(self, method):
- self.hub = SAMPHubServer(web_profile=False)
+ fileobj, self.lockfile = tempfile.mkstemp()
+
+ self.hub = SAMPHubServer(web_profile=False,
+ lockfile=self.lockfile)
self.hub.start()
+
+ os.environ['SAMP_HUB'] = "std-lockurl:file://" + os.path.abspath(self.lockfile)
self.proxy = SAMPHubProxy()
self.proxy.connect()
def teardown_method(self, method):
+
+ del os.environ['SAMP_HUB'] # hacky
+
if self.proxy.is_connected:
self.proxy.disconnect()
+
self.hub.stop()
+
+ if os.path.exists(self.lockfile):
+ os.remove(self.lockfile)
def test_is_connected(self):
assert self.proxy.is_connected |
9a5aee262b5a89e5a22e9e1390e23898a5373627 | byceps/util/jobqueue.py | byceps/util/jobqueue.py | """
byceps.util.jobqueue
~~~~~~~~~~~~~~~~~~~~
An asynchronously processed job queue based on Redis_ and RQ_.
.. _Redis: http://redis.io/
.. _RQ: http://python-rq.org/
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from contextlib import contextmanager
from rq import Connection, Queue
from byceps.redis import redis
@contextmanager
def connection():
with Connection(redis.client):
yield
def get_queue(app):
is_async = app.config['JOBS_ASYNC']
return Queue(is_async=is_async)
def enqueue(*args, **kwargs):
"""Add the function call to the queue as a job."""
with connection():
queue = get_queue()
queue.enqueue(*args, **kwargs)
| """
byceps.util.jobqueue
~~~~~~~~~~~~~~~~~~~~
An asynchronously processed job queue based on Redis_ and RQ_.
.. _Redis: http://redis.io/
.. _RQ: http://python-rq.org/
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from contextlib import contextmanager
from flask import current_app
from rq import Connection, Queue
from byceps.redis import redis
@contextmanager
def connection():
with Connection(redis.client):
yield
def get_queue(app):
is_async = app.config['JOBS_ASYNC']
return Queue(is_async=is_async)
def enqueue(*args, **kwargs):
"""Add the function call to the queue as a job."""
with connection():
queue = get_queue(current_app)
queue.enqueue(*args, **kwargs)
| Fix `get_queue` call in `enqueue` | Fix `get_queue` call in `enqueue`
| Python | bsd-3-clause | homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps | ---
+++
@@ -13,6 +13,7 @@
from contextlib import contextmanager
+from flask import current_app
from rq import Connection, Queue
from byceps.redis import redis
@@ -32,5 +33,5 @@
def enqueue(*args, **kwargs):
"""Add the function call to the queue as a job."""
with connection():
- queue = get_queue()
+ queue = get_queue(current_app)
queue.enqueue(*args, **kwargs) |
41139b20b78550982ee8242c18e24ad81e2d13ae | api/caching/tasks.py | api/caching/tasks.py | import urlparse
import celery
import requests
from celery.utils.log import get_task_logger
from django.conf import settings
from framework.tasks import app as celery_app
from framework.tasks.utils import logged
logger = get_task_logger(__name__)
class VarnishTask(celery.Task):
abstract = True
max_retries = 5
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
@celery_app.task(base=VarnishTask, name='caching_tasks.ban_url')
# @logged('ban_url')
def ban_url(url):
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed with message {}'.format(
url,
response.text
))
| import urlparse
import celery
import requests
from celery.utils.log import get_task_logger
from django.conf import settings
from framework.tasks import app as celery_app
logger = get_task_logger(__name__)
class VarnishTask(celery.Task):
abstract = True
max_retries = 5
def get_varnish_servers():
# TODO: this should get the varnish servers from HAProxy or a setting
return settings.VARNISH_SERVERS
@celery_app.task(base=VarnishTask, name='caching_tasks.ban_url')
# @logged('ban_url')
def ban_url(url):
if settings.ENABLE_VARNISH:
parsed_url = urlparse.urlparse(url)
for host in get_varnish_servers():
varnish_parsed_url = urlparse.urlparse(host)
ban_url = '{scheme}://{netloc}{path}.*'.format(
scheme=varnish_parsed_url.scheme,
netloc=varnish_parsed_url.netloc,
path=parsed_url.path
)
response = requests.request('BAN', ban_url, headers=dict(
Host=parsed_url.hostname
))
if not response.ok:
logger.error('Banning {} failed with message {}'.format(
url,
response.text
))
| Remove unused import because Travis is picky | Remove unused import because Travis is picky
| Python | apache-2.0 | emetsger/osf.io,Nesiehr/osf.io,wearpants/osf.io,amyshi188/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,Nesiehr/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,jnayak1/osf.io,CenterForOpenScience/osf.io,abought/osf.io,amyshi188/osf.io,CenterForOpenScience/osf.io,samchrisinger/osf.io,adlius/osf.io,sloria/osf.io,brandonPurvis/osf.io,felliott/osf.io,baylee-d/osf.io,monikagrabowska/osf.io,wearpants/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,pattisdr/osf.io,felliott/osf.io,asanfilippo7/osf.io,emetsger/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,saradbowman/osf.io,cwisecarver/osf.io,zamattiac/osf.io,TomBaxter/osf.io,binoculars/osf.io,SSJohns/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,acshi/osf.io,kwierman/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,GageGaskins/osf.io,erinspace/osf.io,mfraezz/osf.io,TomBaxter/osf.io,kwierman/osf.io,icereval/osf.io,RomanZWang/osf.io,chennan47/osf.io,abought/osf.io,billyhunt/osf.io,DanielSBrown/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,hmoco/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,pattisdr/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,crcresearch/osf.io,samchrisinger/osf.io,aaxelb/osf.io,mluo613/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,mluo613/osf.io,Nesiehr/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,GageGaskins/osf.io,amyshi188/osf.io,cwisecarver/osf.io,chennan47/osf.io,mluke93/osf.io,alexschiller/osf.io,zachjanicki/osf.io,billyhunt/osf.io,acshi/osf.io,binoculars/osf.io,icereval/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,Johnetordoff/osf.io,jnayak1/osf.io,adlius/osf.io,mattclark/osf.io,caneruguz/osf.io,jnayak1/osf.io,kwierman/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,caneruguz/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,mluo613/osf.io,cslzchen/osf.io,felliott/osf.io,aaxelb/osf.io,KAsante95/osf.io,KAsante95/osf.io,leb2dg/osf.io,doublebits/osf.io,samchrisinger/osf.io,rdhyee/osf.io,wearpants/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,rdhyee/osf.io,rdhyee/osf.io,alexschiller/osf.io,emetsger/osf.io,icereval/osf.io,doublebits/osf.io,monikagrabowska/osf.io,mluo613/osf.io,chennan47/osf.io,SSJohns/osf.io,saradbowman/osf.io,rdhyee/osf.io,mluke93/osf.io,jnayak1/osf.io,cslzchen/osf.io,cwisecarver/osf.io,acshi/osf.io,aaxelb/osf.io,TomBaxter/osf.io,mattclark/osf.io,abought/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,KAsante95/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,kwierman/osf.io,acshi/osf.io,HalcyonChimera/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,billyhunt/osf.io,hmoco/osf.io,KAsante95/osf.io,caseyrollins/osf.io,alexschiller/osf.io,mluke93/osf.io,caseyrollins/osf.io,chrisseto/osf.io,emetsger/osf.io,RomanZWang/osf.io,kch8qx/osf.io,felliott/osf.io,binoculars/osf.io,mattclark/osf.io,sloria/osf.io,Nesiehr/osf.io,wearpants/osf.io,baylee-d/osf.io,billyhunt/osf.io,erinspace/osf.io,KAsante95/osf.io,HalcyonChimera/osf.io,abought/osf.io,RomanZWang/osf.io,kch8qx/osf.io,mfraezz/osf.io,cslzchen/osf.io,SSJohns/osf.io,baylee-d/osf.io,DanielSBrown/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,doublebits/osf.io,kch8qx/osf.io,zamattiac/osf.io,doublebits/osf.io,doublebits/osf.io,aaxelb/osf.io,hmoco/osf.io,crcresearch/osf.io,acshi/osf.io,GageGaskins/osf.io,crcresearch/osf.io,mluke93/osf.io,mfraezz/osf.io,cslzchen/osf.io,leb2dg/osf.io,brandonPurvis/osf.io,mluo613/osf.io,laurenrevere/osf.io,chrisseto/osf.io,zamattiac/osf.io,sloria/osf.io | ---
+++
@@ -6,7 +6,6 @@
from django.conf import settings
from framework.tasks import app as celery_app
-from framework.tasks.utils import logged
logger = get_task_logger(__name__)
|
4a125d2455e1c31043c66835c60cc0e55f9990e9 | core/network.py | core/network.py | import codecs
from string import Template
import os
import networkx as nx
from networkx.readwrite import json_graph
path = os.path.dirname(os.path.abspath(__file__))
def create_network(data):
G = nx.DiGraph()
for node in data:
G.add_node( encode_utf8( node['creator'] ) )
if '___comments' in node:
for comment in node['___comments']:
G.add_edge( encode_utf8( comment['from']['name'] ), encode_utf8( node['creator'] ) )
d = json_graph.node_link_data(G)
html_template = Template( codecs.open( path + '/html/network.html', 'r').read() )
js_template_type = 'svg' if len(d['nodes']) < 500 else 'canvas'
js_text_template = Template( codecs.open( path + '/js/network_' + js_template_type +'.js', 'r').read() )
css_text = codecs.open( path + '/css/network.css', 'r').read()
js_text = js_text_template.substitute({'nodes' : d['nodes'], 'links' : d['links']})
return html_template.substitute( {'css': css_text, 'js': js_text} )
def encode_utf8( string ):
try:
return string.encode('utf8')
except UnicodeDecodeError:
return string
| import codecs
from string import Template
import os
import networkx as nx
from networkx.readwrite import json_graph
path = os.path.dirname(os.path.abspath(__file__))
def create_network(data):
G = nx.DiGraph()
for node in data:
G.add_node( encode_utf8( node['creator'] ) )
if '_comments' in node:
for comment in node['_comments']:
G.add_edge( encode_utf8( comment['from']['name'] ), encode_utf8( node['creator'] ) )
d = json_graph.node_link_data(G)
html_template = Template( codecs.open( path + '/html/network.html', 'r').read() )
js_template_type = 'svg' if len(d['nodes']) < 500 else 'canvas'
js_text_template = Template( codecs.open( path + '/js/network_' + js_template_type +'.js', 'r').read() )
css_text = codecs.open( path + '/css/network.css', 'r').read()
js_text = js_text_template.substitute({'nodes' : d['nodes'], 'links' : d['links']})
return html_template.substitute( {'css': css_text, 'js': js_text} )
def encode_utf8( string ):
try:
return string.encode('utf8')
except UnicodeDecodeError:
return string
| Fix variable naming for comments | Fix variable naming for comments
| Python | mit | HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core | ---
+++
@@ -16,8 +16,8 @@
for node in data:
G.add_node( encode_utf8( node['creator'] ) )
- if '___comments' in node:
- for comment in node['___comments']:
+ if '_comments' in node:
+ for comment in node['_comments']:
G.add_edge( encode_utf8( comment['from']['name'] ), encode_utf8( node['creator'] ) )
d = json_graph.node_link_data(G) |
0bd7c3ff4bbfe6571dbb615c7bd625ab968bfd19 | app/communication.py | app/communication.py | from networktables import NetworkTables
TABLE_NAME = 'ImageProc'
DO_WORK_NAME = 'calculate'
HORIZONTAL_DATA_NAME = 'horizontal'
VERTICAL_DATA_NAME = 'vertical'
class TableManager:
def __init__(self):
self.startup()
self.vision_table = NetworkTables.getTable(TABLE_NAME)
self.do_work = self.vision_table.getBoolean(DO_WORK_NAME, False)
self.vision_table.addTableListener(self.do_work_changed, True, DO_WORK_NAME, False)
def startup(self):
"""Connection and setup of the networktables"""
NetworkTables.initialize(server='10.43.20.2')
def publish_target_data(self, horizontal_distance, horizontal_vector, vertical_distance):
"""
Publish navigation data to target
Publish the data needed to navigate to the target
Parameters
----------
horizontal_distance : int
The horizontal distance to the target
horizontal_vector : string
The side to the target. L for left, R for right
vertical_distance : int
The vertical distance to the target
Returns
-------
void
"""
self.vision_table.putString(HORIZONTAL_DATA_NAME, horizontal_vector + str(horizontal_distance))
self.vision_table.putNumber(VERTICAL_DATA_NAME, vertical_distance)
def is_do_work(self):
"""Return True if the robot requesting calculation for target navigation"""
return self.do_work
def do_work_changed(self, table, key, value, isNew):
"""Handle change in the work request indicator"""
if key == DO_WORK_NAME:
self.do_work = value
| Add table manger to handle and commit changes to the table | Add table manger to handle and commit changes to the table
| Python | mit | codeinvain/object_detection,codeinvain/object_detection | ---
+++
@@ -0,0 +1,49 @@
+from networktables import NetworkTables
+
+TABLE_NAME = 'ImageProc'
+DO_WORK_NAME = 'calculate'
+HORIZONTAL_DATA_NAME = 'horizontal'
+VERTICAL_DATA_NAME = 'vertical'
+
+class TableManager:
+ def __init__(self):
+ self.startup()
+
+ self.vision_table = NetworkTables.getTable(TABLE_NAME)
+ self.do_work = self.vision_table.getBoolean(DO_WORK_NAME, False)
+ self.vision_table.addTableListener(self.do_work_changed, True, DO_WORK_NAME, False)
+
+ def startup(self):
+ """Connection and setup of the networktables"""
+ NetworkTables.initialize(server='10.43.20.2')
+
+ def publish_target_data(self, horizontal_distance, horizontal_vector, vertical_distance):
+ """
+ Publish navigation data to target
+
+ Publish the data needed to navigate to the target
+
+ Parameters
+ ----------
+ horizontal_distance : int
+ The horizontal distance to the target
+ horizontal_vector : string
+ The side to the target. L for left, R for right
+ vertical_distance : int
+ The vertical distance to the target
+
+ Returns
+ -------
+ void
+ """
+ self.vision_table.putString(HORIZONTAL_DATA_NAME, horizontal_vector + str(horizontal_distance))
+ self.vision_table.putNumber(VERTICAL_DATA_NAME, vertical_distance)
+
+ def is_do_work(self):
+ """Return True if the robot requesting calculation for target navigation"""
+ return self.do_work
+
+ def do_work_changed(self, table, key, value, isNew):
+ """Handle change in the work request indicator"""
+ if key == DO_WORK_NAME:
+ self.do_work = value | |
8653f2c0e63fecd5617dfa063878c846ddafcf97 | tests/test_add_language/test_update_language_list.py | tests/test_add_language/test_update_language_list.py | # test_update_language_list
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
@nose.with_setup(set_up, tear_down)
def test_update_languge_list_add():
"""should add new languages to language list"""
kln_language_id = 'kln'
kln_language_name = 'Klingon'
add_lang.update_language_list(kln_language_id, kln_language_name)
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
kln_lang = None
for lang in langs:
if lang['id'] == kln_language_id:
kln_lang = lang
nose.assert_is_not_none(kln_lang)
nose.assert_equal(kln_lang['name'], kln_language_name)
| # test_update_language_list
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
@nose.with_setup(set_up, tear_down)
def test_update_languge_list_add():
"""should add new languages to language list"""
new_language_id = 'kln'
new_language_name = 'Klingon'
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
orig_num_langs = len(langs)
add_lang.update_language_list(new_language_id, new_language_name)
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
num_langs = len(langs)
nose.assert_equal(num_langs, orig_num_langs + 1)
new_lang = None
for lang in langs:
if lang['id'] == new_language_id:
new_lang = lang
nose.assert_is_not_none(new_lang)
nose.assert_equal(new_lang['name'], new_language_name)
| Add additional checks to update_language_list test | Add additional checks to update_language_list test
Also make language variable names independent of their actual values.
| Python | mit | caleb531/youversion-suggest,caleb531/youversion-suggest | ---
+++
@@ -16,15 +16,20 @@
@nose.with_setup(set_up, tear_down)
def test_update_languge_list_add():
"""should add new languages to language list"""
- kln_language_id = 'kln'
- kln_language_name = 'Klingon'
- add_lang.update_language_list(kln_language_id, kln_language_name)
+ new_language_id = 'kln'
+ new_language_name = 'Klingon'
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
- kln_lang = None
+ orig_num_langs = len(langs)
+ add_lang.update_language_list(new_language_id, new_language_name)
+ with open(langs_path, 'r') as langs_file:
+ langs = json.load(langs_file)
+ num_langs = len(langs)
+ nose.assert_equal(num_langs, orig_num_langs + 1)
+ new_lang = None
for lang in langs:
- if lang['id'] == kln_language_id:
- kln_lang = lang
- nose.assert_is_not_none(kln_lang)
- nose.assert_equal(kln_lang['name'], kln_language_name)
+ if lang['id'] == new_language_id:
+ new_lang = lang
+ nose.assert_is_not_none(new_lang)
+ nose.assert_equal(new_lang['name'], new_language_name) |
13b387af53edcce78f95adc2ad96e87bb6df75e6 | beetle_preview/__init__.py | beetle_preview/__init__.py | from http import server
from socketserver import TCPServer
import os
class Server:
def __init__(self, own_config, config, builder):
self.directory = config.folders['output']
self.port = own_config['port']
self.builder = builder
def serve(self):
os.chdir(self.directory)
request_handler = server.SimpleHTTPRequestHandler
httpd = TCPServer(('', self.port), request_handler)
try:
httpd.serve_forever()
except KeyboardInterrupt:
httpd.shutdown()
def register(plugin_config, config, commander, builder, content_renderer):
server = Server(plugin_config, config, builder)
commander.add('preview', server.serve, 'Serve the rendered site')
| from http import server
from socketserver import TCPServer
import os
class Server:
def __init__(self, own_config, config, builder):
self.directory = config.folders['output']
self.port = own_config.get('port', 5000)
self.builder = builder
def serve(self):
os.chdir(self.directory)
request_handler = server.SimpleHTTPRequestHandler
httpd = TCPServer(('', self.port), request_handler)
try:
httpd.serve_forever()
except KeyboardInterrupt:
httpd.shutdown()
def register(plugin_config, config, commander, builder, content_renderer):
server = Server(plugin_config, config, builder)
commander.add('preview', server.serve, 'Serve the rendered site')
| Set up a default port of 5000 so it won't fail if you forget to specify one in config.yaml | Set up a default port of 5000 so it won't fail if you forget to specify one in config.yaml
| Python | mit | cknv/beetle-preview | ---
+++
@@ -6,7 +6,7 @@
class Server:
def __init__(self, own_config, config, builder):
self.directory = config.folders['output']
- self.port = own_config['port']
+ self.port = own_config.get('port', 5000)
self.builder = builder
def serve(self): |
a32270be3ef07fa4a8289374d779ec44f834834c | examples/chr12_plot.py | examples/chr12_plot.py | import tadtool.tad as tad
import tadtool.plot as tp
# load regions data set
regions = tad.HicRegionFileReader().regions("chr12_20-35Mb_regions.bed")
# load matrix
matrix = tad.HicMatrixFileReader().matrix("chr12_20-35Mb.matrix.txt")
# prepare plot
tad_plot = tp.TADtoolPlot(matrix, regions, norm='lin', max_dist=1000000, algorithm='insulation')
fig, axes = tad_plot.plot('chr12:31000000-34000000')
# show plot
fig.show()
| import tadtool.tad as tad
import tadtool.plot as tp
# load regions data set
regions = tad.HicRegionFileReader().regions("chr12_20-35Mb_regions.bed")
# load matrix
matrix = tad.HicMatrixFileReader().matrix("chr12_20-35Mb.matrix.txt")
# prepare plot
tad_plot = tp.TADtoolPlot(matrix, regions, norm='lin', max_dist=1000000, algorithm='insulation')
fig, axes = tad_plot.plot('chr12:31000000-34000000')
# show plot
import matplotlib.pyplot as plt
plt.show()
| Make sure example also runs if executed as scipt | Make sure example also runs if executed as scipt
| Python | mit | vaquerizaslab/tadtool | ---
+++
@@ -12,5 +12,6 @@
fig, axes = tad_plot.plot('chr12:31000000-34000000')
# show plot
-fig.show()
+import matplotlib.pyplot as plt
+plt.show()
|
97645cf2d1dec9b59f30a460de7f142d1f6bc01b | bin/purge_database_json.py | bin/purge_database_json.py | from pymongo import MongoClient
import json
import sys
from emission.core.get_database import get_db, get_section_db
from emission.tests import common
def purgeData(userName):
Sections=get_section_db()
common.purgeData(Sections)
def purgeAllData():
db = get_db()
common.dropAllCollections(db)
if __name__ == '__main__':
if len(sys.argv) == 0:
print "USAGE: %s [userName]" % sys.argv[0]
exit(1)
if len(sys.argv) == 1:
purgeAllData()
else:
purgeData(sys.argv[1])
| from pymongo import MongoClient
import json
import sys
from emission.core.get_database import get_db, get_section_db
import emission.tests.common as etc
def purgeAllData():
db = get_db()
etc.dropAllCollections(db)
if __name__ == '__main__':
if len(sys.argv) != 1:
print "USAGE: %s" % sys.argv[0]
exit(1)
purgeAllData()
| Fix obsolete code + import | Fix obsolete code + import
I can now run this without anything crashing
```
C02KT61MFFT0:e-mission-server shankari$ ./e-mission-py.bash bin/purge_database_json.py localhost
USAGE: bin/purge_database_json.py
C02KT61MFFT0:e-mission-server shankari$ ./e-mission-py.bash bin/purge_database_json.py
C02KT61MFFT0:e-mission-server shankari$
```
| Python | bsd-3-clause | e-mission/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server | ---
+++
@@ -2,22 +2,15 @@
import json
import sys
from emission.core.get_database import get_db, get_section_db
-from emission.tests import common
-
-def purgeData(userName):
- Sections=get_section_db()
- common.purgeData(Sections)
+import emission.tests.common as etc
def purgeAllData():
db = get_db()
- common.dropAllCollections(db)
+ etc.dropAllCollections(db)
if __name__ == '__main__':
- if len(sys.argv) == 0:
- print "USAGE: %s [userName]" % sys.argv[0]
+ if len(sys.argv) != 1:
+ print "USAGE: %s" % sys.argv[0]
exit(1)
- if len(sys.argv) == 1:
- purgeAllData()
- else:
- purgeData(sys.argv[1])
+ purgeAllData() |
3f62fb788beea1ac32d514d549fdaeaaae0f3292 | mesonbuild/scripts/__init__.py | mesonbuild/scripts/__init__.py | #!/usr/bin/env python3
# Copyright 2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def destdir_join(d1, d2):
# c:\destdir + c:\prefix must produce c:\destdir\prefix
if len(d1) > 1 and d1[1] == ':' and \
len(d2) > 1 and d2[1] == ':':
return d1 + d2[2:]
return d1 + d2
| # Copyright 2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def destdir_join(d1, d2):
# c:\destdir + c:\prefix must produce c:\destdir\prefix
if len(d1) > 1 and d1[1] == ':' and \
len(d2) > 1 and d2[1] == ':':
return d1 + d2[2:]
return d1 + d2
| Remove unneeded shebang line that was triggering some linters. | Remove unneeded shebang line that was triggering some linters.
| Python | apache-2.0 | ernestask/meson,aaronp24/meson,rhd/meson,mesonbuild/meson,centricular/meson,mesonbuild/meson,wberrier/meson,rhd/meson,mesonbuild/meson,fmuellner/meson,trhd/meson,mesonbuild/meson,MathieuDuponchelle/meson,centricular/meson,centricular/meson,QuLogic/meson,jeandet/meson,trhd/meson,MathieuDuponchelle/meson,thiblahute/meson,pexip/meson,trhd/meson,pexip/meson,trhd/meson,thiblahute/meson,aaronp24/meson,jeandet/meson,fmuellner/meson,aaronp24/meson,aaronp24/meson,QuLogic/meson,trhd/meson,QuLogic/meson,becm/meson,trhd/meson,pexip/meson,fmuellner/meson,ernestask/meson,rhd/meson,jeandet/meson,centricular/meson,pexip/meson,aaronp24/meson,wberrier/meson,QuLogic/meson,fmuellner/meson,ernestask/meson,ernestask/meson,trhd/meson,thiblahute/meson,aaronp24/meson,MathieuDuponchelle/meson,QuLogic/meson,trhd/meson,pexip/meson,aaronp24/meson,ernestask/meson,jpakkane/meson,becm/meson,wberrier/meson,jpakkane/meson,mesonbuild/meson,trhd/meson,mesonbuild/meson,thiblahute/meson,jpakkane/meson,MathieuDuponchelle/meson,fmuellner/meson,thiblahute/meson,jpakkane/meson,QuLogic/meson,jeandet/meson,aaronp24/meson,wberrier/meson,centricular/meson,pexip/meson,fmuellner/meson,MathieuDuponchelle/meson,becm/meson,becm/meson,becm/meson,jeandet/meson,mesonbuild/meson,rhd/meson,ernestask/meson,pexip/meson,thiblahute/meson,becm/meson,QuLogic/meson,pexip/meson,wberrier/meson,MathieuDuponchelle/meson,centricular/meson,QuLogic/meson,jpakkane/meson,mesonbuild/meson,pexip/meson,ernestask/meson,rhd/meson,mesonbuild/meson,MathieuDuponchelle/meson,MathieuDuponchelle/meson,rhd/meson,jpakkane/meson,becm/meson,ernestask/meson,MathieuDuponchelle/meson,ernestask/meson,rhd/meson,aaronp24/meson,centricular/meson,jeandet/meson,wberrier/meson,jeandet/meson,QuLogic/meson,jeandet/meson,jpakkane/meson,jpakkane/meson,fmuellner/meson,jeandet/meson,thiblahute/meson,becm/meson,centricular/meson,rhd/meson,fmuellner/meson,fmuellner/meson,jpakkane/meson,wberrier/meson,becm/meson,wberrier/meson,mesonbuild/meson,pexip/meson,becm/meson,thiblahute/meson,thiblahute/meson | ---
+++
@@ -1,5 +1,3 @@
-#!/usr/bin/env python3
-
# Copyright 2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License"); |
8266b46f8710e48cf93778a90cc0c82f4f9dcbe8 | l10n_br_nfe/models/__init__.py | l10n_br_nfe/models/__init__.py | # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import res_country_state
from . import res_partner
from . import res_company
from . import product_product
from . import document_related
from . import document
from . import document_line
from . import res_city
from . import res_config_settings
from . import cfop
from . import document_cancel
from . import document_correction
from . import document_invalidate_number
from . import spec_mixin
| # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import res_country_state
from . import res_partner
from . import res_company
from . import product_product
from . import document_related
from . import document
from . import document_line
from . import res_city
from . import res_config_settings
from . import cfop
# from . import document_cancel
# from . import document_correction
from . import document_invalidate_number
from . import spec_mixin
| Disable import of document_cancel and document_correction | [REF] Disable import of document_cancel and document_correction
| Python | agpl-3.0 | OCA/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil | ---
+++
@@ -10,7 +10,7 @@
from . import res_city
from . import res_config_settings
from . import cfop
-from . import document_cancel
-from . import document_correction
+# from . import document_cancel
+# from . import document_correction
from . import document_invalidate_number
from . import spec_mixin |
7805dbadd44c262223ae02d358aa251b4df5d0b0 | astropy/table/__init__.py | astropy/table/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from .table import Column, Table, TableColumns, Row, MaskedColumn
from .np_utils import TableMergeError
from .operations import join, hstack, vstack
# Import routines that connect readers/writers to astropy.table
from ..io.ascii import connect
from ..io.fits import connect
from ..io.misc import connect
from ..io.votable import connect
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from .table import Column, Table, TableColumns, Row, MaskedColumn, GroupedTable
from .np_utils import TableMergeError
from .operations import join, hstack, vstack
# Import routines that connect readers/writers to astropy.table
from ..io.ascii import connect
from ..io.fits import connect
from ..io.misc import connect
from ..io.votable import connect
| Add GroupedTable to the top-level table classes | Add GroupedTable to the top-level table classes
| Python | bsd-3-clause | bsipocz/astropy,StuartLittlefair/astropy,joergdietrich/astropy,saimn/astropy,stargaser/astropy,larrybradley/astropy,lpsinger/astropy,pllim/astropy,funbaker/astropy,mhvk/astropy,joergdietrich/astropy,AustereCuriosity/astropy,funbaker/astropy,pllim/astropy,larrybradley/astropy,mhvk/astropy,AustereCuriosity/astropy,pllim/astropy,kelle/astropy,AustereCuriosity/astropy,funbaker/astropy,DougBurke/astropy,tbabej/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,astropy/astropy,lpsinger/astropy,saimn/astropy,stargaser/astropy,StuartLittlefair/astropy,astropy/astropy,aleksandr-bakanov/astropy,funbaker/astropy,dhomeier/astropy,mhvk/astropy,StuartLittlefair/astropy,saimn/astropy,stargaser/astropy,astropy/astropy,pllim/astropy,MSeifert04/astropy,pllim/astropy,dhomeier/astropy,tbabej/astropy,joergdietrich/astropy,larrybradley/astropy,DougBurke/astropy,kelle/astropy,StuartLittlefair/astropy,mhvk/astropy,dhomeier/astropy,lpsinger/astropy,larrybradley/astropy,tbabej/astropy,AustereCuriosity/astropy,kelle/astropy,saimn/astropy,bsipocz/astropy,tbabej/astropy,MSeifert04/astropy,mhvk/astropy,dhomeier/astropy,bsipocz/astropy,kelle/astropy,lpsinger/astropy,lpsinger/astropy,tbabej/astropy,StuartLittlefair/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,saimn/astropy,astropy/astropy,bsipocz/astropy,dhomeier/astropy,joergdietrich/astropy,joergdietrich/astropy,kelle/astropy,aleksandr-bakanov/astropy,stargaser/astropy,DougBurke/astropy,DougBurke/astropy,astropy/astropy,MSeifert04/astropy,larrybradley/astropy | ---
+++
@@ -1,5 +1,5 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
-from .table import Column, Table, TableColumns, Row, MaskedColumn
+from .table import Column, Table, TableColumns, Row, MaskedColumn, GroupedTable
from .np_utils import TableMergeError
from .operations import join, hstack, vstack
|
357af01554cca6197d07a4a408c02921e70a14eb | cozify/multisensor.py | cozify/multisensor.py | import time
from influxdb import InfluxDBClient
from influxdb import SeriesHelper
from . import config
# expects Cozify devices type json data
def getMultisensorData(data):
out = []
for device in data:
state=data[device]['state']
devtype = state['type']
if devtype == 'STATE_MULTI_SENSOR':
name=data[device]['name']
if 'lastSeen' in state:
timestamp=state['lastSeen']
else:
# if no time of measurement is known we must make a reasonable assumption
# Stored here in milliseconds to match accuracy of what the hub will give you
timestamp = time.time() * 1000
if 'temperature' in state:
temperature=state['temperature']
else:
temperature=None
if 'humidity' in state:
humidity=state['humidity']
else:
humidity=None
out.append({
'name': name,
'time': timestamp,
'temperature': temperature,
'humidity': humidity
})
return out
| import time
from . import config
# expects Cozify devices type json data
def getMultisensorData(data):
out = []
for device in data:
state=data[device]['state']
devtype = state['type']
if devtype == 'STATE_MULTI_SENSOR':
name=data[device]['name']
if 'lastSeen' in state:
timestamp=state['lastSeen']
else:
# if no time of measurement is known we must make a reasonable assumption
# Stored here in milliseconds to match accuracy of what the hub will give you
timestamp = time.time() * 1000
if 'temperature' in state:
temperature=state['temperature']
else:
temperature=None
if 'humidity' in state:
humidity=state['humidity']
else:
humidity=None
out.append({
'name': name,
'time': timestamp,
'temperature': temperature,
'humidity': humidity
})
return out
| Remove outdated imports, oops sorry. | Remove outdated imports, oops sorry.
| Python | mit | Artanicus/python-cozify,Artanicus/python-cozify | ---
+++
@@ -1,7 +1,4 @@
import time
-
-from influxdb import InfluxDBClient
-from influxdb import SeriesHelper
from . import config
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.