commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
fe771659b876bfe23e5b16b9648ab7ede5b314e9
|
comics/crawler/crawlers/questionablecontent.py
|
comics/crawler/crawlers/questionablecontent.py
|
from comics.crawler.crawlers import BaseComicCrawler
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.feed_url = 'http://www.questionablecontent.net/QCRSS.xml'
self.parse_feed()
for entry in self.feed['entries']:
if self.timestamp_to_date(entry['updated_parsed']) == self.pub_date:
self.title = entry['title']
pieces = entry['summary'].split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
return
|
from comics.crawler.crawlers import BaseComicCrawler
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.feed_url = 'http://www.questionablecontent.net/QCRSS.xml'
self.parse_feed()
for entry in self.feed.entries:
if ('updated_parsed' in entry and
self.timestamp_to_date(entry.updated_parsed) == self.pub_date):
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
return
|
Fix error in Questionable Content crawler when feed entry does not contain date
|
Fix error in Questionable Content crawler when feed entry does not contain date
|
Python
|
agpl-3.0
|
datagutten/comics,klette/comics,klette/comics,datagutten/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,klette/comics,jodal/comics,jodal/comics
|
---
+++
@@ -5,10 +5,11 @@
self.feed_url = 'http://www.questionablecontent.net/QCRSS.xml'
self.parse_feed()
- for entry in self.feed['entries']:
- if self.timestamp_to_date(entry['updated_parsed']) == self.pub_date:
- self.title = entry['title']
- pieces = entry['summary'].split('"')
+ for entry in self.feed.entries:
+ if ('updated_parsed' in entry and
+ self.timestamp_to_date(entry.updated_parsed) == self.pub_date):
+ self.title = entry.title
+ pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
|
523293a2785df1229159ad5d0d430195404b9334
|
arc_distance/__init__.py
|
arc_distance/__init__.py
|
# Authors: Yuancheng Peng
# License: MIT
"""Computes the arc distance between a collection of points
This code is challenging because it requires efficient vectorisation of
trigonometric functions that are note natively supported in SSE/AVX. The numpy
version makes use of numpy.tile and transpose, which proves to be challenging
too.
See also http://en.wikipedia.org/wiki/Great-circle_distance
"""
import numpy as np
def make_env(n=100):
rng = np.random.RandomState(42)
a = rng.rand(n, 2)
b = rng.rand(n, 2)
return (a, b), {}
|
# Authors: Yuancheng Peng
# License: MIT
"""Computes the arc distance between a collection of points
This code is challenging because it requires efficient vectorisation of
trigonometric functions that are note natively supported in SSE/AVX. The numpy
version makes use of numpy.tile and transpose, which proves to be challenging
too.
See also http://en.wikipedia.org/wiki/Great-circle_distance
"""
import numpy as np
def make_env(n=1000):
rng = np.random.RandomState(42)
a = rng.rand(n, 2)
b = rng.rand(n, 2)
return (a, b), {}
|
Make arc distance test size bigger to better show the difference.
|
Make arc distance test size bigger to better show the difference.
Now on the web site, 5 on the 7 test case have speed of 0.001, the minimal value.
|
Python
|
mit
|
numfocus/python-benchmarks,numfocus/python-benchmarks
|
---
+++
@@ -13,7 +13,7 @@
import numpy as np
-def make_env(n=100):
+def make_env(n=1000):
rng = np.random.RandomState(42)
a = rng.rand(n, 2)
b = rng.rand(n, 2)
|
620bb416b0e44cc002679e001f1f0b8ab7792685
|
bmi_tester/tests_pytest/test_grid.py
|
bmi_tester/tests_pytest/test_grid.py
|
from nose.tools import (assert_is_instance, assert_less_equal, assert_equal,
assert_greater, assert_in)
# from nose import with_setup
# from .utils import setup_func, teardown_func, all_names, all_grids, new_bmi
from .utils import all_names, all_grids
VALID_GRID_TYPES = (
"scalar",
"unstructured",
"unstructured_triangular",
"rectilinear",
"structured_quadrilateral",
"uniform_rectilinear",
"uniform_rectilinear_grid",
)
def test_valid_grid_rank(new_bmi, gid):
"Test grid rank for grid {gid}".format(gid=gid)
rank = new_bmi.get_grid_rank(gid)
assert isinstance(rank, int)
assert rank <= 3
def test_get_grid_size(new_bmi, gid):
"Test grid size for grid {gid}".format(gid=gid)
size = new_bmi.get_grid_size(gid)
assert isinstance(size, int)
assert size > 0
def test_get_grid_type(new_bmi, gid):
"Test grid is known for grid {gid}".format(gid=gid)
gtype = new_bmi.get_grid_type(gid)
assert isinstance(gtype, str)
assert gtype in VALID_GRID_TYPES
|
from nose.tools import (assert_is_instance, assert_less_equal, assert_equal,
assert_greater, assert_in)
# from nose import with_setup
# from .utils import setup_func, teardown_func, all_names, all_grids, new_bmi
from .utils import all_names, all_grids
VALID_GRID_TYPES = (
"scalar",
"vector",
"unstructured",
"unstructured_triangular",
"rectilinear",
"structured_quadrilateral",
"uniform_rectilinear",
"uniform_rectilinear_grid",
)
def test_valid_grid_rank(new_bmi, gid):
"Test grid rank for grid {gid}".format(gid=gid)
rank = new_bmi.get_grid_rank(gid)
assert isinstance(rank, int)
assert rank <= 3
def test_get_grid_size(new_bmi, gid):
"Test grid size for grid {gid}".format(gid=gid)
size = new_bmi.get_grid_size(gid)
assert isinstance(size, int)
assert size > 0
def test_get_grid_type(new_bmi, gid):
"Test grid is known for grid {gid}".format(gid=gid)
gtype = new_bmi.get_grid_type(gid)
assert isinstance(gtype, str)
assert gtype in VALID_GRID_TYPES
|
Add vector as valid grid type.
|
Add vector as valid grid type.
|
Python
|
mit
|
csdms/bmi-tester
|
---
+++
@@ -8,6 +8,7 @@
VALID_GRID_TYPES = (
"scalar",
+ "vector",
"unstructured",
"unstructured_triangular",
"rectilinear",
|
d7f744cfe542fffc398c3301699541190087ccbd
|
src/musicbrainz2/__init__.py
|
src/musicbrainz2/__init__.py
|
"""A collection of classes for MusicBrainz.
This package contains the following modules:
1. L{model}: The MusicBrainz domain model, containing classes like
L{Artist <model.Artist>}, L{Release <model.Release>}, or
L{Track <model.Track>}
2. L{webservice}: An interface to the MusicBrainz XML web service.
3. L{wsxml}: A parser for the web service XML format.
4. L{disc}: Functions for creating and submitting DiscIDs.
5. L{utils}: Utilities for working with URIs and other commonly needed tools.
To get started quickly, have a look at L{webservice.Query} and the examples
there. The source distribution also contains example code you might find
interesting.
@author: Matthias Friedrich <matt@mafr.de>
"""
__revision__ = '$Id$'
__version__ = '0.2.1'
# EOF
|
"""A collection of classes for MusicBrainz.
This package contains the following modules:
1. L{model}: The MusicBrainz domain model, containing classes like
L{Artist <model.Artist>}, L{Release <model.Release>}, or
L{Track <model.Track>}
2. L{webservice}: An interface to the MusicBrainz XML web service.
3. L{wsxml}: A parser for the web service XML format.
4. L{disc}: Functions for creating and submitting DiscIDs.
5. L{utils}: Utilities for working with URIs and other commonly needed tools.
To get started quickly, have a look at L{webservice.Query} and the examples
there. The source distribution also contains example code you might find
interesting.
@author: Matthias Friedrich <matt@mafr.de>
"""
__revision__ = '$Id$'
__version__ = '0.3.0'
# EOF
|
Set the version number to 0.3.0.
|
Set the version number to 0.3.0.
git-svn-id: f25caaa641ea257ccb5bc415e08f7c71e4161381@214 b0b80210-5d09-0410-99dd-b4bd03f891c0
|
Python
|
bsd-3-clause
|
mineo/python-musicbrainz2
|
---
+++
@@ -21,6 +21,6 @@
@author: Matthias Friedrich <matt@mafr.de>
"""
__revision__ = '$Id$'
-__version__ = '0.2.1'
+__version__ = '0.3.0'
# EOF
|
85c7784982e70b2962af0ae82d65fb0a6c12fa78
|
integrations/node_js/my_first_test.py
|
integrations/node_js/my_first_test.py
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('http://xkcd.com/353/')
self.assert_element('img[alt="Python"]')
self.click('a[rel="license"]')
text = self.get_text("div center")
self.assertTrue("reuse any of my drawings" in text)
self.open('http://xkcd.com/1481/')
title = self.get_attribute('#comic img', 'title')
self.assertTrue('connections to the server' in title)
self.click_link_text('Blag')
self.assert_text('The blag of the webcomic', 'h2')
self.update_text('input#s', 'Robots!\n')
self.assert_text('Hooray robots!', '#content')
self.open('http://xkcd.com/1319/')
self.assert_text('Automation', 'div#ctitle')
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('http://xkcd.com/353/')
self.assert_element('img[alt="Python"]')
self.click('a[rel="license"]')
text = self.get_text("div center")
self.assertTrue("reuse any of my drawings" in text)
self.open('http://xkcd.com/1481/')
title = self.get_attribute('#comic img', 'title')
self.assertTrue('connections to the server' in title)
self.click('link=Blag')
self.assert_text('The blag of the webcomic', 'h2')
self.update_text('input#s', 'Robots!\n')
self.assert_text('Hooray robots!', '#content')
self.open('http://xkcd.com/1319/')
self.assert_text('Automation', 'div#ctitle')
|
Update a click in a test
|
Update a click in a test
|
Python
|
mit
|
seleniumbase/SeleniumBase,mdmintz/seleniumspot,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/seleniumspot,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase
|
---
+++
@@ -12,7 +12,7 @@
self.open('http://xkcd.com/1481/')
title = self.get_attribute('#comic img', 'title')
self.assertTrue('connections to the server' in title)
- self.click_link_text('Blag')
+ self.click('link=Blag')
self.assert_text('The blag of the webcomic', 'h2')
self.update_text('input#s', 'Robots!\n')
self.assert_text('Hooray robots!', '#content')
|
832f0887eb617691dc50688a35a0bef04e4e3346
|
fmcapi/__init__.py
|
fmcapi/__init__.py
|
"""
The fmcapi __init__.py file is called whenever someone imports the package into their program.
"""
# from .fmc import *
# from .api_objects import *
# from .helper_functions import *
import logging
# logging.getLogger(__name__).addHandler(logging.NullHandler())
# Its always good to set up a log file.
logging_format = '%(asctime)s - %(levelname)s:%(filename)s:%(lineno)s - %(message)s'
logging_dateformat = '%Y/%m/%d-%H:%M:%S'
# Logging level options are logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL
logging_level = logging.INFO
# ogging_level = logging.DEBUG
logging_filename = 'output.log'
logging.basicConfig(format=logging_format,
datefmt=logging_dateformat,
filename=logging_filename,
filemode='w',
level=logging_level)
logging.debug("In the fmcapi __init__.py file.")
def __authorship__():
"""In the FMC __authorship__() class method:
***********************************************************************************************************************
This python module was created by Dax Mickelson along with LOTs of help from Ryan Malloy and Neil Patel.
Feel free to send me comments/suggestions/improvements. Either by email: dmickels@cisco.com or more importantly
via a Pull request from the github repository: https://github.com/daxm/fmcapi.
***********************************************************************************************************************
"""
logging.debug(__authorship__.__doc__)
__authorship__()
|
"""
The fmcapi __init__.py file is called whenever someone imports the package into their program.
"""
# from .fmc import *
# from .api_objects import *
# from .helper_functions import *
import logging
logging.debug("In the fmcapi __init__.py file.")
def __authorship__():
"""In the FMC __authorship__() class method:
***********************************************************************************************************************
This python module was created by Dax Mickelson along with LOTs of help from Ryan Malloy and Neil Patel.
Feel free to send me comments/suggestions/improvements. Either by email: dmickels@cisco.com or more importantly
via a Pull request from the github repository: https://github.com/daxm/fmcapi.
***********************************************************************************************************************
"""
logging.debug(__authorship__.__doc__)
__authorship__()
|
Remove file logger enabled by default
|
Remove file logger enabled by default
|
Python
|
bsd-3-clause
|
daxm/fmcapi,daxm/fmcapi
|
---
+++
@@ -6,22 +6,6 @@
# from .api_objects import *
# from .helper_functions import *
import logging
-
-# logging.getLogger(__name__).addHandler(logging.NullHandler())
-
-# Its always good to set up a log file.
-logging_format = '%(asctime)s - %(levelname)s:%(filename)s:%(lineno)s - %(message)s'
-logging_dateformat = '%Y/%m/%d-%H:%M:%S'
-# Logging level options are logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL
-logging_level = logging.INFO
-# ogging_level = logging.DEBUG
-logging_filename = 'output.log'
-logging.basicConfig(format=logging_format,
- datefmt=logging_dateformat,
- filename=logging_filename,
- filemode='w',
- level=logging_level)
-
logging.debug("In the fmcapi __init__.py file.")
|
3cd3e40f84036dbb12f2281e58696f9104653ecc
|
src/adhocracy/lib/app_globals.py
|
src/adhocracy/lib/app_globals.py
|
"""The application's Globals object"""
import logging
import memcache
log = logging.getLogger(__name__)
class Globals(object):
"""Globals acts as a container for objects available throughout the
life of the application
"""
def __init__(self, config):
"""One instance of Globals is created during application
initialization and is available during requests via the
'app_globals' variable
"""
if 'memcached.server' in config:
self.cache = memcache.Client([config['memcached.server']])
log.info("Memcache set up")
log.debug("Flushing cache")
self.cache.flush_all()
else:
log.warn("Skipped memcache, no results caching will take place.")
self.cache = None
if 'adhocracy.instance' in config:
self.single_instance = config.get('adhocracy.instance')
else:
self.single_instance = None
|
"""The application's Globals object"""
import logging
import memcache
log = logging.getLogger(__name__)
class Globals(object):
"""Globals acts as a container for objects available throughout the
life of the application
"""
def __init__(self, config):
"""One instance of Globals is created during application
initialization and is available during requests via the
'app_globals' variable
"""
if 'memcached.server' in config:
self.cache = memcache.Client([config['memcached.server']])
log.debug("Memcache set up")
log.debug("Flushing cache")
self.cache.flush_all()
else:
log.warn("Skipped memcache, no results caching will take place.")
self.cache = None
if 'adhocracy.instance' in config:
self.single_instance = config.get('adhocracy.instance')
else:
self.single_instance = None
|
Decrease log level for memcache setup
|
Decrease log level for memcache setup
|
Python
|
agpl-3.0
|
DanielNeugebauer/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,phihag/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,phihag/adhocracy,alkadis/vcv,liqd/adhocracy,alkadis/vcv,alkadis/vcv,alkadis/vcv,phihag/adhocracy
|
---
+++
@@ -22,7 +22,7 @@
"""
if 'memcached.server' in config:
self.cache = memcache.Client([config['memcached.server']])
- log.info("Memcache set up")
+ log.debug("Memcache set up")
log.debug("Flushing cache")
self.cache.flush_all()
else:
|
c7172405b835920d553aa3d5ac6d415da2253d0d
|
oneflow/core/social_pipeline.py
|
oneflow/core/social_pipeline.py
|
# -*- coding: utf-8 -*-
u"""
Copyright 2013-2014 Olivier Cortès <oc@1flow.io>.
This file is part of the 1flow project.
It provides {python,django}-social-auth pipeline helpers.
1flow is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
1flow is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with 1flow. If not, see http://www.gnu.org/licenses/
"""
import logging
# from constance import config
# from django.shortcuts import redirect
from social_auth.backends.facebook import FacebookBackend
from social_auth.backends.twitter import TwitterBackend
from social_auth.backends import google
from models import (
TwitterAccount,
# FacebookAccount, FacebookFeed,
)
LOGGER = logging.getLogger(__name__)
def check_feeds(social_user, user, details, request, response, backend,
is_new=False, *args, **kwargs):
""" Create Accounts & feeds associated with social networks. """
try:
if isinstance(backend, FacebookBackend):
pass
elif isinstance(backend, google.GoogleOAuth2Backend):
pass
elif isinstance(backend, TwitterBackend):
TwitterAccount.check_social_user(social_user, user, backend)
except:
LOGGER.exception(u'Could not check feeds for user %s from '
u'backend %s.', user, social_user)
|
# -*- coding: utf-8 -*-
u"""
Copyright 2013-2014 Olivier Cortès <oc@1flow.io>.
This file is part of the 1flow project.
It provides {python,django}-social-auth pipeline helpers.
1flow is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
1flow is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with 1flow. If not, see http://www.gnu.org/licenses/
"""
import logging
# from constance import config
# from django.shortcuts import redirect
# from social_auth.backends.facebook import FacebookBackend
# from social_auth.backends.twitter import TwitterBackend
# from social_auth.backends import google
# from models import (
# TwitterAccount,
# # FacebookAccount, FacebookFeed,
# )
LOGGER = logging.getLogger(__name__)
|
Remove useless/obsolete social pipeline function (it's done in social_auth post_save()+task to make pipeline independant and faster).
|
Remove useless/obsolete social pipeline function (it's done in social_auth post_save()+task to make pipeline independant and faster).
|
Python
|
agpl-3.0
|
1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,WillianPaiva/1flow
|
---
+++
@@ -26,34 +26,14 @@
# from django.shortcuts import redirect
-from social_auth.backends.facebook import FacebookBackend
-from social_auth.backends.twitter import TwitterBackend
-from social_auth.backends import google
+# from social_auth.backends.facebook import FacebookBackend
+# from social_auth.backends.twitter import TwitterBackend
+# from social_auth.backends import google
-from models import (
- TwitterAccount,
- # FacebookAccount, FacebookFeed,
-)
+# from models import (
+# TwitterAccount,
+# # FacebookAccount, FacebookFeed,
+# )
LOGGER = logging.getLogger(__name__)
-
-
-def check_feeds(social_user, user, details, request, response, backend,
- is_new=False, *args, **kwargs):
- """ Create Accounts & feeds associated with social networks. """
-
- try:
-
- if isinstance(backend, FacebookBackend):
- pass
-
- elif isinstance(backend, google.GoogleOAuth2Backend):
- pass
-
- elif isinstance(backend, TwitterBackend):
- TwitterAccount.check_social_user(social_user, user, backend)
-
- except:
- LOGGER.exception(u'Could not check feeds for user %s from '
- u'backend %s.', user, social_user)
|
44c174807d7362b5d7959f122f2a74ae9ccb7b38
|
coney/request.py
|
coney/request.py
|
from .exceptions import MalformedRequestException
class Request(object):
def __init__(self, version, metadata, **kwargs):
self._version = version
self._metadata = metadata
self._arguments = kwargs
@property
def version(self):
return self._version
@property
def arguments(self):
return self._arguments
@property
def metadata(self):
return self._metadata
@staticmethod
def loads(s, serializer):
try:
l = serializer.loads(s)
except(ValueError, TypeError):
raise MalformedRequestException(serializer.__name__, s)
try:
version, metadata, args = l[0:3]
except ValueError:
raise MalformedRequestException(serializer.__name__, s)
else:
return Request(version, metadata, args)
@staticmethod
def dumps(obj, serializer):
return serializer.dumps([obj.version, obj.metadata, obj.arguments])
|
from .exceptions import MalformedRequestException
class Request(object):
def __init__(self, version, metadata, arguments):
self._version = version
self._metadata = metadata
self._arguments = arguments
@property
def version(self):
return self._version
@property
def arguments(self):
return self._arguments
@property
def metadata(self):
return self._metadata
@staticmethod
def loads(s, serializer):
try:
l = serializer.loads(s)
except(ValueError, TypeError):
raise MalformedRequestException(serializer.__name__, s)
try:
version, metadata, args = l[0:3]
except ValueError:
raise MalformedRequestException(serializer.__name__, s)
else:
return Request(version, metadata, args)
@staticmethod
def dumps(obj, serializer):
return serializer.dumps([obj.version, obj.metadata, obj.arguments])
|
Fix rpc argument handling when constructing a Request
|
Fix rpc argument handling when constructing a Request
|
Python
|
mit
|
cbigler/jackrabbit
|
---
+++
@@ -2,10 +2,10 @@
class Request(object):
- def __init__(self, version, metadata, **kwargs):
+ def __init__(self, version, metadata, arguments):
self._version = version
self._metadata = metadata
- self._arguments = kwargs
+ self._arguments = arguments
@property
def version(self):
|
033773dce75dc2c352d657443cf415775e3b30cc
|
erudite/components/knowledge_provider.py
|
erudite/components/knowledge_provider.py
|
"""
Knowledge provider that will respond to requests made by the rdf publisher or another bot.
"""
from sleekxmpp.plugins.base import base_plugin
from rhobot.components.storage.client import StoragePayload
from rdflib.namespace import FOAF
from rhobot.namespace import RHO
import logging
logger = logging.getLogger(__name__)
class KnowledgeProvider(base_plugin):
name = 'knowledge_provider'
description = 'Knowledge Provider'
dependencies = {'rho_bot_storage_client', 'rho_bot_rdf_publish', }
type_requirements = {str(FOAF.Person), str(RHO.Owner), }
def plugin_init(self):
pass
def post_init(self):
base_plugin.post_init(self)
self.xmpp['rho_bot_rdf_publish'].add_message_handler(self._rdf_request_message)
def _rdf_request_message(self, rdf_payload):
logger.info('Looking up knowledge')
form = rdf_payload['form']
payload = StoragePayload(form)
intersection = self.type_requirements.intersection(set(payload.types()))
if len(intersection) == len(payload.types()):
results = self.xmpp['rho_bot_storage_client'].find_nodes(payload)
if len(results.results()):
return results
return None
knowledge_provider = KnowledgeProvider
|
"""
Knowledge provider that will respond to requests made by the rdf publisher or another bot.
"""
from sleekxmpp.plugins.base import base_plugin
from rhobot.components.storage.client import StoragePayload
from rdflib.namespace import FOAF
from rhobot.namespace import RHO
import logging
logger = logging.getLogger(__name__)
class KnowledgeProvider(base_plugin):
name = 'knowledge_provider'
description = 'Knowledge Provider'
dependencies = {'rho_bot_storage_client', 'rho_bot_rdf_publish', }
type_requirements = {str(FOAF.Person), str(RHO.Owner), }
def plugin_init(self):
pass
def post_init(self):
base_plugin.post_init(self)
self.xmpp['rho_bot_rdf_publish'].add_request_handler(self._rdf_request_message)
def _rdf_request_message(self, rdf_payload):
logger.info('Looking up knowledge')
form = rdf_payload['form']
payload = StoragePayload(form)
intersection = self.type_requirements.intersection(set(payload.types()))
if len(intersection) == len(payload.types()):
results = self.xmpp['rho_bot_storage_client'].find_nodes(payload)
if len(results.results()):
return results
return None
knowledge_provider = KnowledgeProvider
|
Update knowledge provider to work with API changes.
|
Update knowledge provider to work with API changes.
|
Python
|
bsd-3-clause
|
rerobins/rho_erudite
|
---
+++
@@ -22,7 +22,7 @@
def post_init(self):
base_plugin.post_init(self)
- self.xmpp['rho_bot_rdf_publish'].add_message_handler(self._rdf_request_message)
+ self.xmpp['rho_bot_rdf_publish'].add_request_handler(self._rdf_request_message)
def _rdf_request_message(self, rdf_payload):
logger.info('Looking up knowledge')
|
b4399f3dfb8f15f1a811fbcc31453575ad83d277
|
byceps/services/snippet/transfer/models.py
|
byceps/services/snippet/transfer/models.py
|
"""
byceps.services.snippet.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from enum import Enum
from typing import NewType
from uuid import UUID
from attr import attrib, attrs
from ...site.transfer.models import SiteID
from ....typing import BrandID
@attrs(frozen=True, slots=True)
class Scope:
type_ = attrib(type=str)
name = attrib(type=str)
@classmethod
def for_brand(cls, brand_id: BrandID):
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID):
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
|
"""
byceps.services.snippet.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from enum import Enum
from typing import NewType
from uuid import UUID
from attr import attrib, attrs
from ...site.transfer.models import SiteID
from ....typing import BrandID
@attrs(frozen=True, slots=True)
class Scope:
type_ = attrib(type=str)
name = attrib(type=str)
@classmethod
def for_brand(cls, brand_id: BrandID) -> 'Scope':
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> 'Scope':
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
|
Add missing return types to scope factory methods
|
Add missing return types to scope factory methods
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps
|
---
+++
@@ -23,11 +23,11 @@
name = attrib(type=str)
@classmethod
- def for_brand(cls, brand_id: BrandID):
+ def for_brand(cls, brand_id: BrandID) -> 'Scope':
return cls('brand', str(brand_id))
@classmethod
- def for_site(cls, site_id: SiteID):
+ def for_site(cls, site_id: SiteID) -> 'Scope':
return cls('site', str(site_id))
|
329fa135faca80bd9dee74989110aa6222e44e2b
|
landlab/io/vtk/vti.py
|
landlab/io/vtk/vti.py
|
#! /bin/env python
from landlab.io.vtk.writer import VtkWriter
from landlab.io.vtk.vtktypes import VtkUniformRectilinear
from landlab.io.vtk.vtkxml import (
VtkRootElement,
VtkGridElement,
VtkPieceElement,
VtkPointDataElement,
VtkCellDataElement,
VtkExtent,
VtkOrigin,
VtkSpacing,
)
class VtkUniformRectilinearWriter(VtkWriter):
_vtk_grid_type = VtkUniformRectilinear
def construct_field_elements(self, field):
extent = VtkExtent(field.shape[::-1])
origin = VtkOrigin(field.origin[::-1], field.spacing[::-1])
spacing = VtkSpacing(field.spacing[::-1])
element = {
'VTKFile':
VtkRootElement(VtkUniformRectilinear),
'Grid':
VtkGridElement(VtkUniformRectilinear, WholeExtent=extent,
Origin=origin, Spacing=spacing),
'Piece':
VtkPieceElement(Extent=extent),
'PointData':
VtkPointDataElement(field.at_node, append=self.data,
encoding=self.encoding),
'CellData':
VtkCellDataElement(field.at_cell, append=data,
encoding=encoding),
}
return element
|
#! /bin/env python
from landlab.io.vtk.writer import VtkWriter
from landlab.io.vtk.vtktypes import VtkUniformRectilinear
from landlab.io.vtk.vtkxml import (
VtkRootElement,
VtkGridElement,
VtkPieceElement,
VtkPointDataElement,
VtkCellDataElement,
VtkExtent,
VtkOrigin,
VtkSpacing,
)
class VtkUniformRectilinearWriter(VtkWriter):
_vtk_grid_type = VtkUniformRectilinear
def construct_field_elements(self, field):
extent = VtkExtent(field.shape[::-1])
origin = VtkOrigin(field.origin[::-1], field.spacing[::-1])
spacing = VtkSpacing(field.spacing[::-1])
element = {
'VTKFile':
VtkRootElement(VtkUniformRectilinear),
'Grid':
VtkGridElement(VtkUniformRectilinear, WholeExtent=extent,
Origin=origin, Spacing=spacing),
'Piece':
VtkPieceElement(Extent=extent),
'PointData':
VtkPointDataElement(field.at_node, append=self.data,
encoding=self.encoding),
'CellData':
VtkCellDataElement(field.at_cell, append=self.data,
encoding=self.encoding),
}
return element
|
Fix typos: encoding, data -> self.encoding, self.data
|
Fix typos: encoding, data -> self.encoding, self.data
|
Python
|
mit
|
landlab/landlab,landlab/landlab,cmshobe/landlab,cmshobe/landlab,amandersillinois/landlab,cmshobe/landlab,amandersillinois/landlab,landlab/landlab
|
---
+++
@@ -34,8 +34,8 @@
VtkPointDataElement(field.at_node, append=self.data,
encoding=self.encoding),
'CellData':
- VtkCellDataElement(field.at_cell, append=data,
- encoding=encoding),
+ VtkCellDataElement(field.at_cell, append=self.data,
+ encoding=self.encoding),
}
return element
|
1716d38b995638c6060faa0925861bd8ab4c0e2b
|
statsmodels/stats/tests/test_outliers_influence.py
|
statsmodels/stats/tests/test_outliers_influence.py
|
from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.tools import add_constant
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
|
from numpy.testing import assert_almost_equal
from statsmodels.datasets import statecrime, get_rdataset
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
import numpy as np
data = statecrime.load_pandas().data
def test_reset_stata():
mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']]))
res = mod.fit()
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
exog_idx = list(data.columns).index('urban')
X_arr = np.asarray(data)
vif = variance_inflation_factor(X_arr, exog_idx)
assert_almost_equal(vif, 16.4394, decimal=4)
|
Add pandas dataframe capability in variance_inflation_factor
|
ENH: Add pandas dataframe capability in variance_inflation_factor
|
Python
|
bsd-3-clause
|
josef-pkt/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,bashtage/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,bashtage/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,bashtage/statsmodels
|
---
+++
@@ -1,9 +1,12 @@
from numpy.testing import assert_almost_equal
-from statsmodels.datasets import statecrime
+from statsmodels.datasets import statecrime, get_rdataset
from statsmodels.regression.linear_model import OLS
from statsmodels.stats.outliers_influence import reset_ramsey
+from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools import add_constant
+
+import numpy as np
data = statecrime.load_pandas().data
@@ -14,3 +17,8 @@
stat = reset_ramsey(res, degree=4)
assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2)
assert_almost_equal(stat.pvalue, 0.2221, decimal=4)
+
+ exog_idx = list(data.columns).index('urban')
+ X_arr = np.asarray(data)
+ vif = variance_inflation_factor(X_arr, exog_idx)
+ assert_almost_equal(vif, 16.4394, decimal=4)
|
c7e4fc5038cb2069193aa888c4978e9aeff995f7
|
source/segue/backend/processor/background.py
|
source/segue/backend/processor/background.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import subprocess
import pickle
import base64
try:
from shlex import quote
except ImportError:
from pipes import quote
from .base import Processor
from .. import pickle_support
class BackgroundProcessor(Processor):
'''Local background processor.'''
def process(self, command, args=None, kw=None):
'''Process *command* with *args* and *kw*.'''
if args is None:
args = ()
if kw is None:
kw = {}
serialised = base64.b64encode(
pickle.dumps(
{'command': command, 'args': args, 'kw': kw},
pickle.HIGHEST_PROTOCOL
)
)
python_statement = (
'import pickle;'
'import base64;'
'data = base64.b64decode(\'{0}\');'
'data = pickle.loads(data);'
'data[\'command\'](*data[\'args\'], **data[\'kw\'])'
).format(serialised.replace("'", r"\'"))
command = ' '.join(['python', '-c', '"{0}"'.format(python_statement)])
process = subprocess.Popen(command)
return 'Background process started: {0}'.format(process.pid)
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import subprocess
import pickle
import base64
try:
from shlex import quote
except ImportError:
from pipes import quote
from .base import Processor
from .. import pickle_support
class BackgroundProcessor(Processor):
'''Local background processor.'''
def process(self, command, args=None, kw=None):
'''Process *command* with *args* and *kw*.'''
if args is None:
args = ()
if kw is None:
kw = {}
serialised = base64.b64encode(
pickle.dumps(
{'command': command, 'args': args, 'kw': kw},
pickle.HIGHEST_PROTOCOL
)
)
python_statement = (
'import pickle;'
'import base64;'
'data = base64.b64decode(\'{0}\');'
'data = pickle.loads(data);'
'data[\'command\'](*data[\'args\'], **data[\'kw\'])'
).format(serialised.replace("'", r"\'"))
command = ['python', '-c', python_statement]
process = subprocess.Popen(command)
return 'Background process started: {0}'.format(process.pid)
|
Fix failing command on Linux.
|
Fix failing command on Linux.
|
Python
|
apache-2.0
|
4degrees/segue
|
---
+++
@@ -41,7 +41,7 @@
'data[\'command\'](*data[\'args\'], **data[\'kw\'])'
).format(serialised.replace("'", r"\'"))
- command = ' '.join(['python', '-c', '"{0}"'.format(python_statement)])
+ command = ['python', '-c', python_statement]
process = subprocess.Popen(command)
return 'Background process started: {0}'.format(process.pid)
|
f2e770ec86fe60c6d1c2b5d7b606bd6c576d167d
|
common/djangoapps/enrollment/urls.py
|
common/djangoapps/enrollment/urls.py
|
"""
URLs for the Enrollment API
"""
from django.conf import settings
from django.conf.urls import patterns, url
from .views import (
EnrollmentView,
EnrollmentListView,
EnrollmentCourseDetailView
)
USERNAME_PATTERN = settings.USERNAME_PATTERN
urlpatterns = patterns(
'enrollment.views',
url(
r'^enrollment/{username},{course_key}$'.format(username=USERNAME_PATTERN,
course_key=settings.COURSE_ID_PATTERN),
EnrollmentView.as_view(),
name='courseenrollment'
),
url(
r'^enrollment/{course_key}'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentView.as_view(),
name='courseenrollment'
),
url(r'^enrollment$', EnrollmentListView.as_view(), name='courseenrollments'),
url(
r'^course/{course_key}$'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentCourseDetailView.as_view(),
name='courseenrollmentdetails'
),
)
|
"""
URLs for the Enrollment API
"""
from django.conf import settings
from django.conf.urls import patterns, url
from .views import (
EnrollmentView,
EnrollmentListView,
EnrollmentCourseDetailView
)
USERNAME_PATTERN = settings.USERNAME_PATTERN
urlpatterns = patterns(
'enrollment.views',
url(
r'^enrollment/{username},{course_key}$'.format(username=USERNAME_PATTERN,
course_key=settings.COURSE_ID_PATTERN),
EnrollmentView.as_view(),
name='courseenrollment'
),
url(
r'^enrollment/{course_key}$'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentView.as_view(),
name='courseenrollment'
),
url(r'^enrollment$', EnrollmentListView.as_view(), name='courseenrollments'),
url(
r'^course/{course_key}$'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentCourseDetailView.as_view(),
name='courseenrollmentdetails'
),
)
|
Revert "enrollment api endpoint has been updated to accept trailing forward slashes"
|
Revert "enrollment api endpoint has been updated to accept trailing forward slashes"
|
Python
|
agpl-3.0
|
Edraak/edx-platform,Edraak/edx-platform,Edraak/edx-platform,Edraak/edx-platform,Edraak/edx-platform
|
---
+++
@@ -22,7 +22,7 @@
name='courseenrollment'
),
url(
- r'^enrollment/{course_key}'.format(course_key=settings.COURSE_ID_PATTERN),
+ r'^enrollment/{course_key}$'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentView.as_view(),
name='courseenrollment'
),
|
9a221d5b0ca59a3384b3580c996aa518aaa90b0c
|
stand/runner/stand_server.py
|
stand/runner/stand_server.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import gettext
import eventlet
import os
from stand.socketio_events import StandSocketIO
locales_path = os.path.join(os.path.dirname(__file__), '..', 'i18n', 'locales')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", type=str,
help="Config file", required=True)
parser.add_argument("--lang", help="Minion messages language (i18n)",
required=False, default="en_US")
args = parser.parse_args()
eventlet.monkey_patch(all=True)
from stand.factory import create_app, create_babel_i18n, \
create_redis_store
t = gettext.translation('messages', locales_path, [args.lang],
fallback=True)
t.install(str=True)
app = create_app(config_file=args.config)
babel = create_babel_i18n(app)
# socketio, socketio_app = create_socket_io_app(app)
stand_socket_io = StandSocketIO(app)
redis_store = create_redis_store(app)
if app.debug:
app.run(debug=True)
else:
port = int(app.config['STAND_CONFIG'].get('port', 5000))
# noinspection PyUnresolvedReferences
eventlet.wsgi.server(eventlet.listen(('', port)),
stand_socket_io.socket_app)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import gettext
import eventlet
import os
from stand.socketio_events import StandSocketIO
locales_path = os.path.join(os.path.dirname(__file__), '..', 'i18n', 'locales')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", type=str,
help="Config file", required=True)
parser.add_argument("--lang", help="Minion messages language (i18n)",
required=False, default="en_US")
args = parser.parse_args()
eventlet.monkey_patch(all=True)
from stand.factory import create_app, create_babel_i18n, \
create_redis_store
t = gettext.translation('messages', locales_path, [args.lang],
fallback=True)
t.install()
app = create_app(config_file=args.config)
babel = create_babel_i18n(app)
# socketio, socketio_app = create_socket_io_app(app)
stand_socket_io = StandSocketIO(app)
redis_store = create_redis_store(app)
port = int(app.config['STAND_CONFIG'].get('port', 5000))
if app.debug:
app.run(debug=True, port=port)
else:
# noinspection PyUnresolvedReferences
eventlet.wsgi.server(eventlet.listen(('', port)),
stand_socket_io.socket_app)
|
Fix problem with Python 2 to 3
|
Fix problem with Python 2 to 3
|
Python
|
apache-2.0
|
eubr-bigsea/stand,eubr-bigsea/stand
|
---
+++
@@ -25,7 +25,7 @@
t = gettext.translation('messages', locales_path, [args.lang],
fallback=True)
- t.install(str=True)
+ t.install()
app = create_app(config_file=args.config)
babel = create_babel_i18n(app)
@@ -33,11 +33,10 @@
stand_socket_io = StandSocketIO(app)
redis_store = create_redis_store(app)
+ port = int(app.config['STAND_CONFIG'].get('port', 5000))
if app.debug:
- app.run(debug=True)
+ app.run(debug=True, port=port)
else:
- port = int(app.config['STAND_CONFIG'].get('port', 5000))
-
# noinspection PyUnresolvedReferences
eventlet.wsgi.server(eventlet.listen(('', port)),
stand_socket_io.socket_app)
|
4490e59bfe54874e17d3afd00ede0ad410dc7957
|
numba/cuda/tests/cudapy/test_userexc.py
|
numba/cuda/tests/cudapy/test_userexc.py
|
from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim
from numba import cuda
from numba.core import config
class MyError(Exception):
pass
regex_pattern = (
r'In function [\'"]test_exc[\'"], file [\.\/\\\-a-zA-Z_0-9]+, line \d+'
)
class TestUserExc(SerialMixin, unittest.TestCase):
def test_user_exception(self):
@cuda.jit("void(int32)", debug=True)
def test_exc(x):
if x == 1:
raise MyError
elif x == 2:
raise MyError("foo")
test_exc(0) # no raise
with self.assertRaises(MyError) as cm:
test_exc(1)
if not config.ENABLE_CUDASIM:
self.assertRegexpMatches(str(cm.exception), regex_pattern)
self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception))
with self.assertRaises(MyError) as cm:
test_exc(2)
if not config.ENABLE_CUDASIM:
self.assertRegexpMatches(str(cm.exception), regex_pattern)
self.assertRegexpMatches(str(cm.exception), regex_pattern)
self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception))
if __name__ == '__main__':
unittest.main()
|
from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim
from numba import cuda
from numba.core import config
class MyError(Exception):
pass
regex_pattern = (
r'In function [\'"]test_exc[\'"], file [\:\.\/\\\-a-zA-Z_0-9]+, line \d+'
)
class TestUserExc(SerialMixin, unittest.TestCase):
def test_user_exception(self):
@cuda.jit("void(int32)", debug=True)
def test_exc(x):
if x == 1:
raise MyError
elif x == 2:
raise MyError("foo")
test_exc(0) # no raise
with self.assertRaises(MyError) as cm:
test_exc(1)
if not config.ENABLE_CUDASIM:
self.assertRegexpMatches(str(cm.exception), regex_pattern)
self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception))
with self.assertRaises(MyError) as cm:
test_exc(2)
if not config.ENABLE_CUDASIM:
self.assertRegexpMatches(str(cm.exception), regex_pattern)
self.assertRegexpMatches(str(cm.exception), regex_pattern)
self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception))
if __name__ == '__main__':
unittest.main()
|
Add in windows drive pattern match.
|
Add in windows drive pattern match.
As title.
|
Python
|
bsd-2-clause
|
gmarkall/numba,stuartarchibald/numba,stuartarchibald/numba,stonebig/numba,sklam/numba,seibert/numba,numba/numba,stonebig/numba,stuartarchibald/numba,sklam/numba,stuartarchibald/numba,cpcloud/numba,IntelLabs/numba,numba/numba,sklam/numba,cpcloud/numba,stonebig/numba,stonebig/numba,seibert/numba,gmarkall/numba,seibert/numba,seibert/numba,gmarkall/numba,IntelLabs/numba,gmarkall/numba,IntelLabs/numba,stonebig/numba,numba/numba,cpcloud/numba,sklam/numba,seibert/numba,gmarkall/numba,sklam/numba,IntelLabs/numba,numba/numba,cpcloud/numba,numba/numba,stuartarchibald/numba,IntelLabs/numba,cpcloud/numba
|
---
+++
@@ -8,7 +8,7 @@
regex_pattern = (
- r'In function [\'"]test_exc[\'"], file [\.\/\\\-a-zA-Z_0-9]+, line \d+'
+ r'In function [\'"]test_exc[\'"], file [\:\.\/\\\-a-zA-Z_0-9]+, line \d+'
)
|
15a792e38152e9c7aa6a10bbc251e9b5f0df1341
|
aurora/optim/sgd.py
|
aurora/optim/sgd.py
|
import numpy as np
from .base import Base
class SGD(Base):
def __init__(self, cost, params, lr=0.1, momentum=0.9):
super().__init__(cost, params, lr)
self.momentum = momentum
self.velocity = self._init_velocity_vec(params)
def step(self, feed_dict):
exe_output = self.executor.run(feed_dict)
for i in range(len(self.params)):
self.velocity[i] = self.momentum * self.velocity[i] - self.lr * exe_output[1 + i]
self.params[i].const += self.velocity[i]
return exe_output[0]
@staticmethod
def _init_velocity_vec(params):
vector = []
for param in params:
vector.append(np.zeros_like(param.const))
return vector
|
import numpy as np
from .base import Base
class SGD(Base):
def __init__(self, cost, params, lr=0.1, momentum=0.9):
super().__init__(cost, params, lr)
self.momentum = momentum
self.velocity = [np.zeros_like(param.const)for param in params]
def step(self, feed_dict):
exe_output = self.executor.run(feed_dict)
for i in range(len(self.params)):
self.velocity[i] = self.momentum * self.velocity[i] - self.lr * exe_output[1 + i]
self.params[i].const += self.velocity[i]
return exe_output[0]
|
Improve velocity list initialisation in SGD
|
Improve velocity list initialisation in SGD
|
Python
|
apache-2.0
|
upul/Aurora,upul/Aurora,upul/Aurora
|
---
+++
@@ -6,7 +6,7 @@
def __init__(self, cost, params, lr=0.1, momentum=0.9):
super().__init__(cost, params, lr)
self.momentum = momentum
- self.velocity = self._init_velocity_vec(params)
+ self.velocity = [np.zeros_like(param.const)for param in params]
def step(self, feed_dict):
exe_output = self.executor.run(feed_dict)
@@ -15,9 +15,3 @@
self.params[i].const += self.velocity[i]
return exe_output[0]
- @staticmethod
- def _init_velocity_vec(params):
- vector = []
- for param in params:
- vector.append(np.zeros_like(param.const))
- return vector
|
79dc15a4db8f74bed5e06d19c5c4f8d895c04189
|
flaskext/debugtoolbar/panels/versions.py
|
flaskext/debugtoolbar/panels/versions.py
|
import pkg_resources
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
flask_version = pkg_resources.working_set.require('flask')[0].version
class VersionDebugPanel(DebugPanel):
"""
Panel that displays the Django version.
"""
name = 'Version'
has_content = False
def nav_title(self):
return _('Versions')
def nav_subtitle(self):
return 'Flask %s' % flask_version
def url(self):
return ''
def title(self):
return _('Versions')
def content(self):
return None
|
import pkg_resources
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
flask_version = pkg_resources.get_distribution('Flask').version
class VersionDebugPanel(DebugPanel):
"""
Panel that displays the Django version.
"""
name = 'Version'
has_content = False
def nav_title(self):
return _('Versions')
def nav_subtitle(self):
return 'Flask %s' % flask_version
def url(self):
return ''
def title(self):
return _('Versions')
def content(self):
return None
|
Modify the flask version retrieval (thanks donri)
|
Modify the flask version retrieval (thanks donri)
|
Python
|
bsd-3-clause
|
dianchang/flask-debugtoolbar,lepture/flask-debugtoolbar,lepture/flask-debugtoolbar,dianchang/flask-debugtoolbar,dianchang/flask-debugtoolbar
|
---
+++
@@ -4,7 +4,7 @@
_ = lambda x: x
-flask_version = pkg_resources.working_set.require('flask')[0].version
+flask_version = pkg_resources.get_distribution('Flask').version
class VersionDebugPanel(DebugPanel):
"""
|
5351ad8324fa8388ea3b82425d03f43ac16d7313
|
app.py
|
app.py
|
#!/usr/bin/env python
import os, requests, getSchedule
from flask import Flask, request, jsonify, render_template, abort
app = Flask(__name__)
@app.route('/')
def root():
return render_template('index.html')
@app.route('/m')
def mobileView():
stop = request.args.get('stop', 1, type=int)
route = requests.get('http://nextride.alykhan.com/api?stop='+str(stop)).json()
if route:
path = route
else:
abort(400)
return render_template('index.html', path=path)
@app.route('/api')
def api():
stop = request.args.get('stop', 1, type=int)
schedule = getSchedule.parseSchedule(getSchedule.getRides(stop))
if schedule:
response = jsonify(meta=dict(status=200, message='OK'),data=schedule)
else:
abort(400)
return response
@app.errorhandler(400)
def bad_request(error):
response = jsonify(meta=dict(status=error.code, message=error.message))
return response, error.code
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
#!/usr/bin/env python
import os, requests, getSchedule
from flask import Flask, request, jsonify, render_template, abort
app = Flask(__name__)
@app.route('/')
def root():
return render_template('index.html')
@app.route('/m')
def mobileView():
stop = request.args.get('stop', 1, type=int)
payload = {'stop': stop}
r = requests.get('http://nextride.alykhan.com/api', params=payload)
if r.status_code == requests.codes.ok:
path = r.json()
else:
abort(400)
return render_template('index.html', path=path)
@app.route('/api')
def api():
stop = request.args.get('stop', 1, type=int)
schedule = getSchedule.parseSchedule(getSchedule.getRides(stop))
if schedule:
response = jsonify(meta=dict(status=200, message='OK'),data=schedule)
else:
abort(400)
return response
@app.errorhandler(400)
def bad_request(error):
response = jsonify(meta=dict(status=error.code, message=error.message))
return response, error.code
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
Use Requests to encode stop as query param, verify API status code.
|
Use Requests to encode stop as query param, verify API status code.
|
Python
|
mit
|
alykhank/NextRide,alykhank/NextRide
|
---
+++
@@ -12,9 +12,10 @@
@app.route('/m')
def mobileView():
stop = request.args.get('stop', 1, type=int)
- route = requests.get('http://nextride.alykhan.com/api?stop='+str(stop)).json()
- if route:
- path = route
+ payload = {'stop': stop}
+ r = requests.get('http://nextride.alykhan.com/api', params=payload)
+ if r.status_code == requests.codes.ok:
+ path = r.json()
else:
abort(400)
return render_template('index.html', path=path)
|
36b37cc3439b1b99b2496c9a8037de9e412ad151
|
account_payment_partner/models/account_move_line.py
|
account_payment_partner/models/account_move_line.py
|
# Copyright 2016 Akretion (http://www.akretion.com/)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
payment_mode_id = fields.Many2one(
'account.payment.mode',
string='Payment Mode',
domain="[('company_id', '=', company_id)]",
ondelete='restrict'
)
|
# Copyright 2016 Akretion (http://www.akretion.com/)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
payment_mode_id = fields.Many2one(
'account.payment.mode',
string='Payment Mode',
domain="[('company_id', '=', company_id)]",
ondelete='restrict',
index=True,
)
|
Add indexes on account payment models
|
Add indexes on account payment models
The fields where the indexes are added are used in searches in
account_payment_order, which becomes really slow when a database have
many lines.
|
Python
|
agpl-3.0
|
OCA/bank-payment,OCA/bank-payment
|
---
+++
@@ -11,5 +11,6 @@
'account.payment.mode',
string='Payment Mode',
domain="[('company_id', '=', company_id)]",
- ondelete='restrict'
+ ondelete='restrict',
+ index=True,
)
|
d0fe2fd4bc619a45d18c3e5ba911b15045366849
|
api/tests/test_small_scripts.py
|
api/tests/test_small_scripts.py
|
"""This module tests the small scripts - admin, model, and wsgi."""
import unittest
class SmallScriptsTest(unittest.TestCase):
def test_admin(self):
import api.admin
def test_models(self):
import api.models
def test_wsgi(self):
import apel_rest.wsgi
|
"""This module tests the small scripts - admin, model, and wsgi."""
# Using unittest and not django.test as no need for overhead of database
import unittest
class SmallScriptsTest(unittest.TestCase):
def test_admin(self):
"""Check that admin is importable."""
import api.admin
def test_models(self):
"""Check that models is importable."""
import api.models
def test_wsgi(self):
"""Check that wsgi is importable."""
import apel_rest.wsgi
|
Add docstrings and comment to small scripts test
|
Add docstrings and comment to small scripts test
|
Python
|
apache-2.0
|
apel/rest,apel/rest
|
---
+++
@@ -1,14 +1,18 @@
"""This module tests the small scripts - admin, model, and wsgi."""
+# Using unittest and not django.test as no need for overhead of database
import unittest
class SmallScriptsTest(unittest.TestCase):
def test_admin(self):
+ """Check that admin is importable."""
import api.admin
def test_models(self):
+ """Check that models is importable."""
import api.models
def test_wsgi(self):
+ """Check that wsgi is importable."""
import apel_rest.wsgi
|
01b17ee30889afe1eadf8ec98c187ca9b856d0f7
|
connector/views.py
|
connector/views.py
|
from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'css': 'text/css'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
|
from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
|
Add mime type for sourcemaps.
|
Add mime type for sourcemaps.
|
Python
|
apache-2.0
|
ucscXena/ucsc-xena-client,ucscXena/ucsc-xena-client,acthp/ucsc-xena-client,ucscXena/ucsc-xena-client,ucscXena/ucsc-xena-client,ucscXena/ucsc-xena-client,acthp/ucsc-xena-client,acthp/ucsc-xena-client
|
---
+++
@@ -18,7 +18,8 @@
types = {
'js': 'application/javascript',
'png': 'image/png',
- 'css': 'text/css'
+ 'css': 'text/css',
+ 'map': 'application/json'
}
|
0bf7bf5ee30ddfd1510d50f189d3bb581ec5048d
|
tangled/website/resources.py
|
tangled/website/resources.py
|
from tangled.web import Resource, config
from tangled.site.resources.entry import Entry
class Docs(Entry):
@config('text/html', template='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
from tangled.web import Resource, config
from tangled.site.resources.entry import Entry
class Docs(Entry):
@config('text/html', template='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix) + '/',
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
Add trailing slashes to docs links
|
Add trailing slashes to docs links
This avoids hitting the app only to have it redirect back to nginx.
|
Python
|
mit
|
TangledWeb/tangled.website
|
---
+++
@@ -12,7 +12,7 @@
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
- 'href': '/'.join(prefix),
+ 'href': '/'.join(prefix) + '/',
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
|
ba6b70be6bd329e952491eae387281c613794718
|
pyledgertools/plugins/download/ofx.py
|
pyledgertools/plugins/download/ofx.py
|
"""OFX downloader."""
from ofxtools.Client import OFXClient, BankAcct
from ofxtools.Types import DateTime
from yapsy.IPlugin import IPlugin
def make_date_kwargs(config):
return {k:DateTime().convert(v) for k,v in config.items() if k.startswith('dt')}
class OFXDownload(IPlugin):
"""OFX plugin class."""
def download(self, config):
"""Setup account info and credentials."""
client = OFXClient(
config['url'],
config['org'],
config['fid'],
version=config['version'],
appid=config['appid'],
appver=config['appver']
)
account = [BankAcct(config['bankid'], config['acctnum'], config['type'])]
kwargs = make_date_kwargs(config)
request = client.statement_request(
config['ofxuser'],
config['ofxpswd'],
account,
**kwargs
)
response = client.download(request)
fname = '{}_{}.ofx'.format(config['bankid'], config['acctnum'])
with open(fname, 'w') as ofxfile:
print(response.text, file=ofxfile)
return fname
|
"""OFX downloader."""
from ofxtools.Client import OFXClient, BankAcct
from ofxtools.Types import DateTime
from yapsy.IPlugin import IPlugin
def make_date_kwargs(config):
return {k:DateTime().convert(v) for k,v in config.items() if k.startswith('dt')}
class OFXDownload(IPlugin):
"""OFX plugin class."""
def download(self, config):
"""Setup account info and credentials."""
client = OFXClient(
config['url'],
config['org'],
config['fid'],
version=config['version'],
appid=config['appid'],
appver=config['appver']
)
account = [BankAcct(config['fid'], config['acctnum'], config['type'])]
kwargs = make_date_kwargs(config)
request = client.statement_request(
config['ofxuser'],
config['ofxpswd'],
account,
**kwargs
)
response = client.download(request)
fname = '{}_{}.ofx'.format(config['fid'], config['acctnum'])
with open(fname, 'w') as ofxfile:
print(response.text, file=ofxfile)
return fname
|
Replace bankid with fid to avoid duplicate config options.
|
Replace bankid with fid to avoid duplicate config options.
|
Python
|
unlicense
|
cgiacofei/pyledgertools,cgiacofei/pyledgertools
|
---
+++
@@ -24,7 +24,7 @@
appver=config['appver']
)
- account = [BankAcct(config['bankid'], config['acctnum'], config['type'])]
+ account = [BankAcct(config['fid'], config['acctnum'], config['type'])]
kwargs = make_date_kwargs(config)
request = client.statement_request(
@@ -35,7 +35,7 @@
)
response = client.download(request)
- fname = '{}_{}.ofx'.format(config['bankid'], config['acctnum'])
+ fname = '{}_{}.ofx'.format(config['fid'], config['acctnum'])
with open(fname, 'w') as ofxfile:
print(response.text, file=ofxfile)
|
392f58abf7b163bb34e395f5818daa0a13d05342
|
pyscriptic/tests/instructions_test.py
|
pyscriptic/tests/instructions_test.py
|
from unittest import TestCase
from pyscriptic.instructions import PipetteOp, TransferGroup, PrePostMix
class PipetteOpTests(TestCase):
def setUp(self):
self.mix = PrePostMix(
volume="5:microliter",
speed="1:microliter/second",
repetitions=10,
)
def test_transfer(self):
op = PipetteOp(
groups=[
TransferGroup(
from_well="plate/A1",
to_well="plate/A2",
volume="20:microliter",
aspirate_speed="1:microliter/second",
dispense_speed="1:microliter/second",
mix_before=self.mix,
mix_after=self.mix,
),
],
)
def test_distribute(self):
pass
def test_consolidate(self):
pass
def test_mix(self):
pass
|
from unittest import TestCase
from pyscriptic.instructions import PipetteOp, TransferGroup, PrePostMix
from pyscriptic.submit import pyobj_to_std_types
class PipetteOpTests(TestCase):
def setUp(self):
self.mix = PrePostMix(
volume="5:microliter",
speed="0.5:microliter/second",
repetitions=10,
)
def test_transfer(self):
op = PipetteOp(
groups=[
TransferGroup(
from_well="plate/A1",
to_well="plate/A2",
volume="20:microliter",
aspirate_speed="1:microliter/second",
dispense_speed="2:microliter/second",
mix_before=self.mix,
mix_after=self.mix,
),
],
)
self.assertEqual(
pyobj_to_std_types(op),
{
"op": "pipette",
"groups": [{
"transfer": [{
"from": "plate/A1",
"to": "plate/A2",
"volume": "20:microliter",
"aspirate_speed": "1:microliter/second",
"dispense_speed": "2:microliter/second",
"mix_after": {
"volume": "5:microliter",
"speed": "0.5:microliter/second",
"repetitions": 10,
},
"mix_before": {
"volume": "5:microliter",
"speed": "0.5:microliter/second",
"repetitions": 10,
},
}]
}],
},
)
def test_distribute(self):
pass
def test_consolidate(self):
pass
def test_mix(self):
pass
|
Test conversion of Transfer to standard types works
|
Test conversion of Transfer to standard types works
|
Python
|
bsd-2-clause
|
naderm/pytranscriptic,naderm/pytranscriptic
|
---
+++
@@ -2,12 +2,13 @@
from unittest import TestCase
from pyscriptic.instructions import PipetteOp, TransferGroup, PrePostMix
+from pyscriptic.submit import pyobj_to_std_types
class PipetteOpTests(TestCase):
def setUp(self):
self.mix = PrePostMix(
volume="5:microliter",
- speed="1:microliter/second",
+ speed="0.5:microliter/second",
repetitions=10,
)
@@ -19,11 +20,36 @@
to_well="plate/A2",
volume="20:microliter",
aspirate_speed="1:microliter/second",
- dispense_speed="1:microliter/second",
+ dispense_speed="2:microliter/second",
mix_before=self.mix,
mix_after=self.mix,
),
],
+ )
+ self.assertEqual(
+ pyobj_to_std_types(op),
+ {
+ "op": "pipette",
+ "groups": [{
+ "transfer": [{
+ "from": "plate/A1",
+ "to": "plate/A2",
+ "volume": "20:microliter",
+ "aspirate_speed": "1:microliter/second",
+ "dispense_speed": "2:microliter/second",
+ "mix_after": {
+ "volume": "5:microliter",
+ "speed": "0.5:microliter/second",
+ "repetitions": 10,
+ },
+ "mix_before": {
+ "volume": "5:microliter",
+ "speed": "0.5:microliter/second",
+ "repetitions": 10,
+ },
+ }]
+ }],
+ },
)
def test_distribute(self):
|
5bb4a72f9541fa59fa3770a52da6edb619f5a897
|
submodules-to-glockfile.py
|
submodules-to-glockfile.py
|
#!/usr/bin/python
import re
import subprocess
def main():
source = open(".gitmodules").read()
paths = re.findall(r"path = (.*)", source)
for path in paths:
print "{repo} {sha}".format(
repo = path[7:],
sha = path_sha1(path)
)
def path_sha1(path):
cmd = "cd {} && git rev-parse HEAD".format(path)
sp = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
sha1 = sp.stdout.read()[:-1]
return sha1
if __name__ == "__main__": main()
|
#!/usr/bin/python
import re
import subprocess
def main():
source = open(".gitmodules").read()
paths = re.findall(r"path = (.*)", source)
print "github.com/localhots/satan {}".format(path_sha1("."))
for path in paths:
print "{repo} {sha}".format(
repo = path[7:],
sha = path_sha1(path)
)
def path_sha1(path):
cmd = "cd {} && git rev-parse HEAD".format(path)
sp = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
sha1 = sp.stdout.read()[:-1]
return sha1
if __name__ == "__main__": main()
|
Add satan sha to glockfile script
|
Add satan sha to glockfile script
|
Python
|
mit
|
localhots/satan,localhots/satan,localhots/satan,localhots/satan
|
---
+++
@@ -7,6 +7,7 @@
source = open(".gitmodules").read()
paths = re.findall(r"path = (.*)", source)
+ print "github.com/localhots/satan {}".format(path_sha1("."))
for path in paths:
print "{repo} {sha}".format(
repo = path[7:],
|
e72b6272469c382f14a6732514777aacbd457322
|
rest_framework_json_api/exceptions.py
|
rest_framework_json_api/exceptions.py
|
from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
if isinstance(error, dict):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
|
from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
if isinstance(error, dict):
errors.append(error)
else:
if isinstance(error, list):
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
else:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
|
Fix for some error messages that were split into several messages
|
Fix for some error messages that were split into several messages
The exception handler expects the error to be a list on line 33. In my
case they were a string, which lead to the split of the string into
multiple errors containing one character
|
Python
|
bsd-2-clause
|
django-json-api/rest_framework_ember,Instawork/django-rest-framework-json-api,leifurhauks/django-rest-framework-json-api,hnakamur/django-rest-framework-json-api,martinmaillard/django-rest-framework-json-api,pombredanne/django-rest-framework-json-api,lukaslundgren/django-rest-framework-json-api,leo-naeka/rest_framework_ember,schtibe/django-rest-framework-json-api,scottfisk/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,kaldras/django-rest-framework-json-api,leo-naeka/django-rest-framework-json-api,abdulhaq-e/django-rest-framework-json-api,grapo/django-rest-framework-json-api
|
---
+++
@@ -30,7 +30,16 @@
if isinstance(error, dict):
errors.append(error)
else:
- for message in error:
+ if isinstance(error, list):
+ for message in error:
+ errors.append({
+ 'detail': message,
+ 'source': {
+ 'pointer': pointer,
+ },
+ 'status': encoding.force_text(response.status_code),
+ })
+ else:
errors.append({
'detail': message,
'source': {
@@ -38,6 +47,7 @@
},
'status': encoding.force_text(response.status_code),
})
+
context['view'].resource_name = 'errors'
response.data = errors
|
385e9c0b8af79de58efd3cf43b1981b7981d0a53
|
sympy/geometry/__init__.py
|
sympy/geometry/__init__.py
|
"""
A geometry module for the SymPy library. This module contains all of the
entities and functions needed to construct basic geometrical data and to
perform simple informational queries.
Usage:
======
Notes:
======
Currently the geometry module is restricted to the 2-dimensional
Euclidean space.
Examples
========
"""
from sympy.geometry.point import Point
from sympy.geometry.line import Line, Ray, Segment
from sympy.geometry.ellipse import Ellipse, Circle
from sympy.geometry.polygon import Polygon, RegularPolygon, Triangle, rad, deg
from sympy.geometry.util import *
from sympy.geometry.exceptions import *
from sympy.geometry.curve import Curve
|
"""
A geometry module for the SymPy library. This module contains all of the
entities and functions needed to construct basic geometrical data and to
perform simple informational queries.
Usage:
======
Notes:
======
Currently the geometry module is restricted to the 2-dimensional
Euclidean space.
Examples
========
"""
from sympy.geometry.point import Point
from sympy.geometry.line import Line, Ray, Segment
from sympy.geometry.ellipse import Ellipse, Circle
from sympy.geometry.polygon import Polygon, RegularPolygon, Triangle, rad, deg
from sympy.geometry.util import are_similar, centroid, convex_hull, idiff, \
intersection
from sympy.geometry.exceptions import GeometryError
from sympy.geometry.curve import Curve
|
Remove glob imports from sympy.geometry.
|
Remove glob imports from sympy.geometry.
|
Python
|
bsd-3-clause
|
postvakje/sympy,Mitchkoens/sympy,farhaanbukhsh/sympy,sampadsaha5/sympy,kumarkrishna/sympy,MechCoder/sympy,lindsayad/sympy,maniteja123/sympy,yashsharan/sympy,sahilshekhawat/sympy,MechCoder/sympy,rahuldan/sympy,yashsharan/sympy,kevalds51/sympy,Designist/sympy,jaimahajan1997/sympy,emon10005/sympy,skidzo/sympy,mcdaniel67/sympy,kaushik94/sympy,bukzor/sympy,beni55/sympy,Curious72/sympy,lindsayad/sympy,ga7g08/sympy,jaimahajan1997/sympy,amitjamadagni/sympy,sampadsaha5/sympy,AkademieOlympia/sympy,kaichogami/sympy,asm666/sympy,atsao72/sympy,aktech/sympy,hargup/sympy,kumarkrishna/sympy,kaichogami/sympy,emon10005/sympy,Arafatk/sympy,kaushik94/sympy,Sumith1896/sympy,AunShiLord/sympy,sahilshekhawat/sympy,debugger22/sympy,drufat/sympy,Titan-C/sympy,jamesblunt/sympy,wanglongqi/sympy,cccfran/sympy,rahuldan/sympy,dqnykamp/sympy,maniteja123/sympy,jerli/sympy,Designist/sympy,hargup/sympy,asm666/sympy,drufat/sympy,wyom/sympy,madan96/sympy,kevalds51/sympy,mafiya69/sympy,liangjiaxing/sympy,farhaanbukhsh/sympy,Davidjohnwilson/sympy,hargup/sympy,jbbskinny/sympy,shikil/sympy,jamesblunt/sympy,jerli/sympy,ahhda/sympy,shipci/sympy,garvitr/sympy,sunny94/temp,saurabhjn76/sympy,shikil/sympy,skirpichev/omg,meghana1995/sympy,abloomston/sympy,vipulroxx/sympy,Arafatk/sympy,atreyv/sympy,Davidjohnwilson/sympy,atsao72/sympy,drufat/sympy,Gadal/sympy,debugger22/sympy,lidavidm/sympy,mcdaniel67/sympy,ChristinaZografou/sympy,hrashk/sympy,sampadsaha5/sympy,Shaswat27/sympy,Mitchkoens/sympy,sahmed95/sympy,bukzor/sympy,pbrady/sympy,atreyv/sympy,iamutkarshtiwari/sympy,souravsingh/sympy,garvitr/sympy,Shaswat27/sympy,Designist/sympy,yashsharan/sympy,ChristinaZografou/sympy,moble/sympy,debugger22/sympy,pandeyadarsh/sympy,ga7g08/sympy,Sumith1896/sympy,AunShiLord/sympy,maniteja123/sympy,AkademieOlympia/sympy,Davidjohnwilson/sympy,dqnykamp/sympy,Gadal/sympy,shipci/sympy,moble/sympy,kaushik94/sympy,mcdaniel67/sympy,meghana1995/sympy,jamesblunt/sympy,grevutiu-gabriel/sympy,VaibhavAgarwalVA/sympy,garvitr/sympy,pandeyadarsh/sympy,ga7g08/sympy,sahmed95/sympy,MridulS/sympy,rahuldan/sympy,aktech/sympy,madan96/sympy,skidzo/sympy,oliverlee/sympy,asm666/sympy,dqnykamp/sympy,Sumith1896/sympy,ahhda/sympy,emon10005/sympy,chaffra/sympy,yukoba/sympy,MridulS/sympy,farhaanbukhsh/sympy,jbbskinny/sympy,saurabhjn76/sympy,souravsingh/sympy,sunny94/temp,abloomston/sympy,Vishluck/sympy,kaichogami/sympy,mafiya69/sympy,Arafatk/sympy,Curious72/sympy,mafiya69/sympy,liangjiaxing/sympy,Titan-C/sympy,shikil/sympy,madan96/sympy,abhiii5459/sympy,hrashk/sympy,abhiii5459/sympy,kmacinnis/sympy,Vishluck/sympy,kmacinnis/sympy,jbbskinny/sympy,oliverlee/sympy,MridulS/sympy,jaimahajan1997/sympy,cccfran/sympy,atsao72/sympy,aktech/sympy,souravsingh/sympy,sunny94/temp,beni55/sympy,moble/sympy,ahhda/sympy,MechCoder/sympy,toolforger/sympy,cswiercz/sympy,vipulroxx/sympy,chaffra/sympy,jerli/sympy,AkademieOlympia/sympy,postvakje/sympy,meghana1995/sympy,iamutkarshtiwari/sympy,VaibhavAgarwalVA/sympy,Mitchkoens/sympy,abloomston/sympy,pbrady/sympy,yukoba/sympy,Gadal/sympy,kmacinnis/sympy,pbrady/sympy,lidavidm/sympy,grevutiu-gabriel/sympy,toolforger/sympy,diofant/diofant,kumarkrishna/sympy,ChristinaZografou/sympy,kevalds51/sympy,AunShiLord/sympy,flacjacket/sympy,liangjiaxing/sympy,postvakje/sympy,pandeyadarsh/sympy,oliverlee/sympy,beni55/sympy,shipci/sympy,saurabhjn76/sympy,grevutiu-gabriel/sympy,wanglongqi/sympy,cswiercz/sympy,lidavidm/sympy,abhiii5459/sympy,skidzo/sympy,wanglongqi/sympy,amitjamadagni/sympy,Shaswat27/sympy,cccfran/sympy,Titan-C/sympy,sahmed95/sympy,Vishluck/sympy,wyom/sympy,wyom/sympy,cswiercz/sympy,bukzor/sympy,VaibhavAgarwalVA/sympy,sahilshekhawat/sympy,lindsayad/sympy,yukoba/sympy,Curious72/sympy,iamutkarshtiwari/sympy,atreyv/sympy,toolforger/sympy,vipulroxx/sympy,hrashk/sympy,chaffra/sympy
|
---
+++
@@ -20,6 +20,7 @@
from sympy.geometry.line import Line, Ray, Segment
from sympy.geometry.ellipse import Ellipse, Circle
from sympy.geometry.polygon import Polygon, RegularPolygon, Triangle, rad, deg
-from sympy.geometry.util import *
-from sympy.geometry.exceptions import *
+from sympy.geometry.util import are_similar, centroid, convex_hull, idiff, \
+ intersection
+from sympy.geometry.exceptions import GeometryError
from sympy.geometry.curve import Curve
|
697fcbd5135c9c3610c4131fe36b9a2723be1eeb
|
mappyfile/__init__.py
|
mappyfile/__init__.py
|
# allow high-level functions to be accessed directly from the mappyfile module
from mappyfile.utils import load, loads, find, findall, dumps, write
|
# allow high-level functions to be accessed directly from the mappyfile module
from mappyfile.utils import load, loads, find, findall, dumps, write
__version__ = "0.3.0"
|
Add version to module init
|
Add version to module init
|
Python
|
mit
|
geographika/mappyfile,geographika/mappyfile
|
---
+++
@@ -1,2 +1,4 @@
# allow high-level functions to be accessed directly from the mappyfile module
from mappyfile.utils import load, loads, find, findall, dumps, write
+
+__version__ = "0.3.0"
|
683765c26e0c852d06fd06a491e3906369ae14cd
|
votes/urls.py
|
votes/urls.py
|
from django.conf.urls import include, url
from django.views.generic import TemplateView
from votes.views import VoteView
urlpatterns = [
url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view()),
]
|
from django.conf.urls import include, url
from django.views.generic import TemplateView
from votes.views import VoteView
urlpatterns = [
url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view(), name="vote"),
]
|
Add name to vote view URL
|
Add name to vote view URL
|
Python
|
mit
|
kuboschek/jay,kuboschek/jay,OpenJUB/jay,kuboschek/jay,OpenJUB/jay,OpenJUB/jay
|
---
+++
@@ -5,5 +5,5 @@
from votes.views import VoteView
urlpatterns = [
- url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view()),
+ url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view(), name="vote"),
]
|
0a60495fc2baef1c5115cd34e2c062c363dfedc8
|
test/streamparse/cli/test_run.py
|
test/streamparse/cli/test_run.py
|
from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from nose.tools import ok_
try:
from unittest.mock import patch
except ImportError:
from mock import patch
from streamparse.cli.run import main, subparser_hook
class RunTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('run' in subcommands)
@patch('streamparse.cli.run.run_local_topology', autospec=True)
def test_main_args_passed(self, run_local_mock):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
args = parser.parse_args('run -e my_env -n my_topo --ackers 1'.split())
main(args)
run_local_mock.assert_called_with(name='my_topo',
options={'topology.acker.executors': 1},
env_name='my_env',
time=0)
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, unicode_literals
import argparse
import unittest
from nose.tools import ok_
try:
from unittest.mock import patch
except ImportError:
from mock import patch
from streamparse.cli.run import main, subparser_hook
class RunTestCase(unittest.TestCase):
def test_subparser_hook(self):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
subcommands = parser._optionals._actions[1].choices.keys()
ok_('run' in subcommands)
@patch('streamparse.cli.run.run_local_topology', autospec=True)
def test_main_args_passed(self, run_local_mock):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
subparser_hook(subparsers)
args = parser.parse_args('run -e my_env -n my_topo --ackers 1'.split())
main(args)
run_local_mock.assert_called_with(name='my_topo',
options={'topology.acker.executors': 1},
env_name='my_env',
time=0,
config_file=None)
if __name__ == '__main__':
unittest.main()
|
Fix mock needing config_file variable
|
Fix mock needing config_file variable
|
Python
|
apache-2.0
|
Parsely/streamparse,Parsely/streamparse
|
---
+++
@@ -34,7 +34,8 @@
run_local_mock.assert_called_with(name='my_topo',
options={'topology.acker.executors': 1},
env_name='my_env',
- time=0)
+ time=0,
+ config_file=None)
if __name__ == '__main__':
|
b665da9bdebb6736eef08f782d7361a34dcd30c5
|
bin/import_media.py
|
bin/import_media.py
|
#!/usr/bin/python
import sys
sys.path.append('.')
from vacker.importer import Importer
importer = Importer()
# Need to obtain from arguments
importer.import_directory('../sample_photos')
|
#!/usr/bin/python
import sys
sys.path.append('.')
import argparse
from vacker.importer import Importer
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('--directory', '-d', type=str, dest='directory',
help='Directory to import', required=True)
args = parser.parse_args()
importer = Importer()
# Need to obtain from arguments
importer.import_directory(args.directory)
|
Update imported to use arg parser
|
Update imported to use arg parser
|
Python
|
apache-2.0
|
MatthewJohn/vacker,MatthewJohn/vacker,MatthewJohn/vacker
|
---
+++
@@ -2,9 +2,19 @@
import sys
sys.path.append('.')
+import argparse
from vacker.importer import Importer
+
+parser = argparse.ArgumentParser(description='Process some integers.')
+parser.add_argument('--directory', '-d', type=str, dest='directory',
+ help='Directory to import', required=True)
+
+args = parser.parse_args()
+
+
+
importer = Importer()
# Need to obtain from arguments
-importer.import_directory('../sample_photos')
+importer.import_directory(args.directory)
|
07f81307d10062cc15704a09015e542197edcafa
|
doxylink/setup.py
|
doxylink/setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as stream:
long_desc = stream.read()
requires = ['Sphinx>=0.6']
setup(
name='sphinxcontrib-doxylink',
version='0.3',
url='http://packages.python.org/sphinxcontrib-doxylink',
download_url='http://pypi.python.org/pypi/sphinxcontrib-doxylink',
license='BSD',
author='Matt Williams',
author_email='matt@milliams.com',
description='Sphinx extension doxylink',
long_description=long_desc,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
install_requires=requires,
namespace_packages=['sphinxcontrib'],
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as stream:
long_desc = stream.read()
requires = ['Sphinx>=0.6', 'pyparsing']
setup(
name='sphinxcontrib-doxylink',
version='0.3',
url='http://packages.python.org/sphinxcontrib-doxylink',
download_url='http://pypi.python.org/pypi/sphinxcontrib-doxylink',
license='BSD',
author='Matt Williams',
author_email='matt@milliams.com',
description='Sphinx extension doxylink',
long_description=long_desc,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
install_requires=requires,
namespace_packages=['sphinxcontrib'],
)
|
Add pyparsing to the dependencies.
|
Add pyparsing to the dependencies.
|
Python
|
bsd-2-clause
|
sphinx-contrib/spelling,sphinx-contrib/spelling
|
---
+++
@@ -5,7 +5,7 @@
with open('README.rst') as stream:
long_desc = stream.read()
-requires = ['Sphinx>=0.6']
+requires = ['Sphinx>=0.6', 'pyparsing']
setup(
name='sphinxcontrib-doxylink',
|
f5aa886ed3a38971fe49c115221c849eae1a8e10
|
byceps/util/instances.py
|
byceps/util/instances.py
|
# -*- coding: utf-8 -*-
"""
byceps.util.instances
~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
class ReprBuilder(object):
"""An instance representation builder."""
def __init__(self, instance):
self.instance = instance
self.attributes = []
def add_with_lookup(self, name):
"""Add the attribute with its value looked up on the instance."""
value = getattr(self.instance, name)
return self.add(name, repr(value))
def add(self, name, value):
"""Add the attribute with the given value."""
return self.add_custom('{}={}'.format(name, value))
def add_custom(self, value):
"""Add a custom value."""
self.attributes.append(value)
return self
def build(self):
"""Assemble the full textual representation."""
class_name = type(self.instance).__name__
attributes = ', '.join(self.attributes)
return '<{}({})>'.format(class_name, attributes)
|
# -*- coding: utf-8 -*-
"""
byceps.util.instances
~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
class ReprBuilder(object):
"""An instance representation builder."""
def __init__(self, instance):
self.instance = instance
self.attributes = []
def add_with_lookup(self, name):
"""Add the attribute with its value looked up on the instance."""
value = getattr(self.instance, name)
return self.add(name, value)
def add(self, name, value):
"""Add the attribute with the given value."""
return self.add_custom('{}={!r}'.format(name, value))
def add_custom(self, value):
"""Add a custom value."""
self.attributes.append(value)
return self
def build(self):
"""Assemble the full textual representation."""
class_name = type(self.instance).__name__
attributes = ', '.join(self.attributes)
return '<{}({})>'.format(class_name, attributes)
|
Apply `repr()` to values passed to `ReprBuilder.add`, too
|
Apply `repr()` to values passed to `ReprBuilder.add`, too
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
---
+++
@@ -19,11 +19,11 @@
def add_with_lookup(self, name):
"""Add the attribute with its value looked up on the instance."""
value = getattr(self.instance, name)
- return self.add(name, repr(value))
+ return self.add(name, value)
def add(self, name, value):
"""Add the attribute with the given value."""
- return self.add_custom('{}={}'.format(name, value))
+ return self.add_custom('{}={!r}'.format(name, value))
def add_custom(self, value):
"""Add a custom value."""
|
2d4310cab029269cd53c776a3da238fa375e2ee1
|
DebianChangesBot/mailparsers/accepted_upload.py
|
DebianChangesBot/mailparsers/accepted_upload.py
|
# -*- coding: utf-8 -*-
from DebianChangesBot import MailParser
from DebianChangesBot.messages import AcceptedUploadMessage
class AcceptedUploadParser(MailParser):
@staticmethod
def parse(headers, body):
msg = AcceptedUploadMessage()
mapping = {
'Source': 'package',
'Version': 'version',
'Distribution': 'distribution',
'Urgency': 'urgency',
'Changed-By': 'by',
'Closes': 'closes',
}
for line in body:
for field, target in mapping.iteritems():
if line.startswith('%s: ' % field):
val = line[len(field) + 2:]
setattr(msg, target, val)
del mapping[field]
break
# If we have found all the field, stop looking
if len(mapping) == 0:
break
try:
if msg.closes:
msg.closes = [int(x) for x in msg.closes.split(' ')]
except ValueError:
return
return msg
|
# -*- coding: utf-8 -*-
from DebianChangesBot import MailParser
from DebianChangesBot.messages import AcceptedUploadMessage
class AcceptedUploadParser(MailParser):
@staticmethod
def parse(headers, body):
if headers.get('List-Id', '') != '<debian-devel-changes.lists.debian.org>':
return
msg = AcceptedUploadMessage()
mapping = {
'Source': 'package',
'Version': 'version',
'Distribution': 'distribution',
'Urgency': 'urgency',
'Changed-By': 'by',
'Closes': 'closes',
}
for line in body:
for field, target in mapping.iteritems():
if line.startswith('%s: ' % field):
val = line[len(field) + 2:]
setattr(msg, target, val)
del mapping[field]
break
# If we have found all the field, stop looking
if len(mapping) == 0:
break
try:
if msg.closes:
msg.closes = [int(x) for x in msg.closes.split(' ')]
except ValueError:
return
return msg
|
Check accepted uploads List-Id, otherwise we get false +ves from bugs-dist
|
Check accepted uploads List-Id, otherwise we get false +ves from bugs-dist
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
|
Python
|
agpl-3.0
|
xtaran/debian-devel-changes-bot,sebastinas/debian-devel-changes-bot,xtaran/debian-devel-changes-bot,lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot
|
---
+++
@@ -7,6 +7,9 @@
@staticmethod
def parse(headers, body):
+ if headers.get('List-Id', '') != '<debian-devel-changes.lists.debian.org>':
+ return
+
msg = AcceptedUploadMessage()
mapping = {
|
4180680c9964661d3edd9eafad23b8d90699170d
|
fuzzyfinder/main.py
|
fuzzyfinder/main.py
|
# -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(input, collection, accessor=lambda x: x):
"""
Args:
input (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the `input`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collection` using the `input`.
"""
suggestions = []
input = str(input) if not isinstance(input, str) else input
pat = '.*?'.join(map(re.escape, input))
regex = re.compile(pat)
for item in collection:
r = regex.search(accessor(item))
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
|
# -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(input, collection, accessor=lambda x: x):
"""
Args:
input (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the `input`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collection` using the `input`.
"""
suggestions = []
input = str(input) if not isinstance(input, str) else input
pat = '.*?'.join(map(re.escape, input))
regex = re.compile(pat)
for item in collection:
r = regex.search(accessor(item))
if r:
suggestions.append((len(r.group()), r.start(), accessor(item), item))
return (z[-1] for z in sorted(suggestions))
|
Use accessor to use in sort.
|
Use accessor to use in sort.
|
Python
|
bsd-3-clause
|
amjith/fuzzyfinder
|
---
+++
@@ -21,6 +21,6 @@
for item in collection:
r = regex.search(accessor(item))
if r:
- suggestions.append((len(r.group()), r.start(), item))
+ suggestions.append((len(r.group()), r.start(), accessor(item), item))
- return (z for _, _, z in sorted(suggestions))
+ return (z[-1] for z in sorted(suggestions))
|
e80d4b35472e692f05e986116a5910e1a9612f74
|
build/android/pylib/gtest/gtest_config.py
|
build/android/pylib/gtest/gtest_config.py
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Configuration file for android gtest suites."""
# Add new suites here before upgrading them to the stable list below.
EXPERIMENTAL_TEST_SUITES = [
'TestWebKitAPI',
'sandbox_linux_unittests',
'webkit_unit_tests',
]
# Do not modify this list without approval of an android owner.
# This list determines which suites are run by default, both for local
# testing and on android trybots running on commit-queue.
STABLE_TEST_SUITES = [
'android_webview_unittests',
'base_unittests',
'cc_unittests',
'components_unittests',
'content_unittests',
'gpu_unittests',
'ipc_tests',
'media_unittests',
'net_unittests',
'sql_unittests',
'sync_unit_tests',
'ui_unittests',
'unit_tests',
'webkit_compositor_bindings_unittests',
]
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Configuration file for android gtest suites."""
# Add new suites here before upgrading them to the stable list below.
EXPERIMENTAL_TEST_SUITES = [
'sandbox_linux_unittests',
]
# Do not modify this list without approval of an android owner.
# This list determines which suites are run by default, both for local
# testing and on android trybots running on commit-queue.
STABLE_TEST_SUITES = [
'TestWebKitAPI',
'android_webview_unittests',
'base_unittests',
'cc_unittests',
'components_unittests',
'content_unittests',
'gpu_unittests',
'ipc_tests',
'media_unittests',
'net_unittests',
'sql_unittests',
'sync_unit_tests',
'ui_unittests',
'unit_tests',
'webkit_compositor_bindings_unittests',
'webkit_unit_tests',
]
|
Move andorid webkit tests to main waterfall and CQ
|
Move andorid webkit tests to main waterfall and CQ
They have been stable and fast on FYI bots for a week.
TBR=yfriedman@chromium.org
Review URL: https://codereview.chromium.org/12093034
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@179266 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,zcbenz/cefode-chromium,bright-sparks/chromium-spacewalk,dednal/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,ondra-novak/chromium.src,nacl-webkit/chrome_deps,dushu1203/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,Just-D/chromium-1,dednal/chromium.src,Fireblend/chromium-crosswalk,dushu1203/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk,zcbenz/cefode-chromium,Just-D/chromium-1,zcbenz/cefode-chromium,pozdnyakov/chromium-crosswalk,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,markYoungH/chromium.src,jaruba/chromium.src,Chilledheart/chromium,littlstar/chromium.src,timopulkkinen/BubbleFish,Just-D/chromium-1,markYoungH/chromium.src,Chilledheart/chromium,ltilve/chromium,axinging/chromium-crosswalk,anirudhSK/chromium,mogoweb/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dednal/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,mogoweb/chromium-crosswalk,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,dushu1203/chromium.src,nacl-webkit/chrome_deps,Jonekee/chromium.src,ChromiumWebApps/chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,dednal/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,ChromiumWebApps/chromium,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,anirudhSK/chromium,axinging/chromium-crosswalk,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,jaruba/chromium.src,hgl888/chromium-crosswalk,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,patrickm/chromium.src,jaruba/chromium.src,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,hujiajie/pa-chromium,hgl888/chromium-crosswalk,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,dushu1203/chromium.src,ChromiumWebApps/chromium,Chilledheart/chromium,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,nacl-webkit/chrome_deps,Chilledheart/chromium,Jonekee/chromium.src,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,Jonekee/chromium.src,patrickm/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,dednal/chromium.src,Jonekee/chromium.src,anirudhSK/chromium,zcbenz/cefode-chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,ltilve/chromium,fujunwei/chromium-crosswalk,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,Fireblend/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,timopulkkinen/BubbleFish,dushu1203/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,nacl-webkit/chrome_deps,anirudhSK/chromium,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,hujiajie/pa-chromium,Just-D/chromium-1,jaruba/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,zcbenz/cefode-chromium,littlstar/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,hujiajie/pa-chromium,Jonekee/chromium.src,chuan9/chromium-crosswalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,Chilledheart/chromium,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,anirudhSK/chromium,M4sse/chromium.src,hujiajie/pa-chromium,anirudhSK/chromium,M4sse/chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,Chilledheart/chromium,ltilve/chromium,ltilve/chromium,patrickm/chromium.src,hujiajie/pa-chromium,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hujiajie/pa-chromium,dushu1203/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,nacl-webkit/chrome_deps,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,markYoungH/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,littlstar/chromium.src,patrickm/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,Fireblend/chromium-crosswalk,M4sse/chromium.src,Just-D/chromium-1,ondra-novak/chromium.src,anirudhSK/chromium,M4sse/chromium.src,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk,zcbenz/cefode-chromium,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,nacl-webkit/chrome_deps,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,dushu1203/chromium.src,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,ChromiumWebApps/chromium,ltilve/chromium,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,timopulkkinen/BubbleFish
|
---
+++
@@ -6,15 +6,14 @@
# Add new suites here before upgrading them to the stable list below.
EXPERIMENTAL_TEST_SUITES = [
- 'TestWebKitAPI',
'sandbox_linux_unittests',
- 'webkit_unit_tests',
]
# Do not modify this list without approval of an android owner.
# This list determines which suites are run by default, both for local
# testing and on android trybots running on commit-queue.
STABLE_TEST_SUITES = [
+ 'TestWebKitAPI',
'android_webview_unittests',
'base_unittests',
'cc_unittests',
@@ -29,4 +28,5 @@
'ui_unittests',
'unit_tests',
'webkit_compositor_bindings_unittests',
+ 'webkit_unit_tests',
]
|
17ef821757df8eadfe8bf4769e57503625464f7b
|
bucketeer/test/test_commit.py
|
bucketeer/test/test_commit.py
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
|
Refactor test name to include the word 'To'
|
Refactor test name to include the word 'To'
Previous: testNewFileUploadExistingBucket
Current: testNewFileUploadToExistingBucket
|
Python
|
mit
|
mgarbacz/bucketeer
|
---
+++
@@ -45,7 +45,7 @@
def testMain(self):
self.assertTrue(commit)
- def testNewFileUploadExistingBucket(self):
+ def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
|
b39ea7848141037c7829a01d789591d91a81398e
|
ceph_medic/tests/test_main.py
|
ceph_medic/tests/test_main.py
|
import pytest
import ceph_medic.main
class TestMain(object):
def test_main(self):
assert ceph_medic.main
def test_invalid_ssh_config(self, capsys):
argv = ["ceph-medic", "--ssh-config", "/does/not/exist"]
with pytest.raises(SystemExit):
ceph_medic.main.Medic(argv)
out = capsys.readouterr()
assert 'the given ssh config path does not exist' in out.out
|
import pytest
import ceph_medic.main
class TestMain(object):
def test_main(self):
assert ceph_medic.main
def test_invalid_ssh_config(self, capsys):
argv = ["ceph-medic", "--ssh-config", "/does/not/exist"]
with pytest.raises(SystemExit):
ceph_medic.main.Medic(argv)
out = capsys.readouterr()
assert 'the given ssh config path does not exist' in out.out
def test_valid_ssh_config(self, capsys):
ssh_config = '/etc/ssh/ssh_config'
argv = ["ceph-medic", "--ssh-config", ssh_config]
ceph_medic.main.Medic(argv)
out = capsys.readouterr()
assert out.out == ''
assert ssh_config == ceph_medic.main.ceph_medic.config.ssh_config
|
Add test for valid ssh_config
|
tests/main: Add test for valid ssh_config
Signed-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com>
|
Python
|
mit
|
alfredodeza/ceph-doctor
|
---
+++
@@ -12,3 +12,11 @@
ceph_medic.main.Medic(argv)
out = capsys.readouterr()
assert 'the given ssh config path does not exist' in out.out
+
+ def test_valid_ssh_config(self, capsys):
+ ssh_config = '/etc/ssh/ssh_config'
+ argv = ["ceph-medic", "--ssh-config", ssh_config]
+ ceph_medic.main.Medic(argv)
+ out = capsys.readouterr()
+ assert out.out == ''
+ assert ssh_config == ceph_medic.main.ceph_medic.config.ssh_config
|
36998345ef900286527a3896f70cf4a85414ccf8
|
rohrpost/main.py
|
rohrpost/main.py
|
import json
from functools import partial
from . import handlers # noqa
from .message import send_error
from .registry import HANDLERS
REQUIRED_FIELDS = ['type', 'id']
try:
DECODE_ERRORS = (json.JSONDecodeError, TypeError)
except AttributeError:
# Python 3.3 and 3.4 raise a ValueError instead of json.JSONDecodeError
DECODE_ERRORS = (ValueError, TypeError)
def handle_rohrpost_message(message):
"""
Handling of a rohrpost message will validate the required format:
A valid JSON object including at least an "id" and "type" field.
It then hands off further handling to the registered handler (if any).
"""
_send_error = partial(send_error, message, None, None)
if not message.content['text']:
return _send_error('Received empty message.')
try:
request = json.loads(message.content['text'])
except DECODE_ERRORS as e:
return _send_error('Could not decode JSON message. Error: {}'.format(str(e)))
if not isinstance(request, dict):
return _send_error('Expected a JSON object as message.')
for field in REQUIRED_FIELDS:
if field not in request:
return _send_error("Missing required field '{}'.".format(field))
if not request['type'] in HANDLERS:
return send_error(
message, request['id'], request['type'],
"Unknown message type '{}'.".format(request['type']),
)
HANDLERS[request['type']](message, request)
|
import json
from functools import partial
from . import handlers # noqa
from .message import send_error
from .registry import HANDLERS
REQUIRED_FIELDS = ['type', 'id']
try:
DECODE_ERRORS = (json.JSONDecodeError, TypeError)
except AttributeError:
# Python 3.3 and 3.4 raise a ValueError instead of json.JSONDecodeError
DECODE_ERRORS = (ValueError, TypeError)
def handle_rohrpost_message(message):
"""
Handling of a rohrpost message will validate the required format:
A valid JSON object including at least an "id" and "type" field.
It then hands off further handling to the registered handler (if any).
"""
_send_error = partial(send_error, message=message, message_id=None, handler=None)
if not message.content['text']:
return _send_error(error='Received empty message.')
try:
request = json.loads(message.content['text'])
except DECODE_ERRORS as e:
return _send_error(error='Could not decode JSON message. Error: {}'.format(str(e)))
if not isinstance(request, dict):
return _send_error(error='Expected a JSON object as message.')
for field in REQUIRED_FIELDS:
if field not in request:
return _send_error(error="Missing required field '{}'.".format(field))
if not request['type'] in HANDLERS:
return send_error(
message=message, message_id=request['id'], handler=request['type'],
error="Unknown message type '{}'.".format(request['type']),
)
HANDLERS[request['type']](message, request)
|
Use keyword arguments in code
|
Use keyword arguments in code
|
Python
|
mit
|
axsemantics/rohrpost,axsemantics/rohrpost
|
---
+++
@@ -21,26 +21,26 @@
A valid JSON object including at least an "id" and "type" field.
It then hands off further handling to the registered handler (if any).
"""
- _send_error = partial(send_error, message, None, None)
+ _send_error = partial(send_error, message=message, message_id=None, handler=None)
if not message.content['text']:
- return _send_error('Received empty message.')
+ return _send_error(error='Received empty message.')
try:
request = json.loads(message.content['text'])
except DECODE_ERRORS as e:
- return _send_error('Could not decode JSON message. Error: {}'.format(str(e)))
+ return _send_error(error='Could not decode JSON message. Error: {}'.format(str(e)))
if not isinstance(request, dict):
- return _send_error('Expected a JSON object as message.')
+ return _send_error(error='Expected a JSON object as message.')
for field in REQUIRED_FIELDS:
if field not in request:
- return _send_error("Missing required field '{}'.".format(field))
+ return _send_error(error="Missing required field '{}'.".format(field))
if not request['type'] in HANDLERS:
return send_error(
- message, request['id'], request['type'],
- "Unknown message type '{}'.".format(request['type']),
+ message=message, message_id=request['id'], handler=request['type'],
+ error="Unknown message type '{}'.".format(request['type']),
)
HANDLERS[request['type']](message, request)
|
d9a205dce1f67151ff896909413bb7128e54a4ec
|
dduplicated/cli.py
|
dduplicated/cli.py
|
# The client of DDuplicated tool.
from os import path as opath, getcwd
from pprint import pprint
from sys import argv
from dduplicated import commands
def get_paths(params):
paths = []
for param in params:
path = opath.join(getcwd(), param)
if opath.exists(path) and opath.isdir(path) and not opath.islink(path):
paths.append(path)
return paths
def main():
params = argv
processed_files = []
# Remove the command name
del params[0]
if len(params) == 0 or "help" in params:
commands.help()
exit()
elif "detect" in params:
processed_files = commands.detect(get_paths(params))
elif "delete" in params:
processed_files = commands.delete(commands.detect(get_paths(params)))
elif "link" in params:
processed_files = commands.link(commands.detect(get_paths(params)))
else:
commands.help()
exit()
if len(processed_files) > 0:
pprint(processed_files)
else:
print("No duplicates found")
print("Great! Bye!")
exit(0)
|
# The client of DDuplicated tool.
from os import path as opath, getcwd
from pprint import pprint
from sys import argv
from dduplicated import commands
def get_paths(params):
paths = []
for param in params:
path = opath.join(getcwd(), param)
if opath.exists(path) and opath.isdir(path) and not opath.islink(path):
paths.append(path)
return paths
def main():
params = argv
processed_files = []
# Remove the command name
del params[0]
if len(params) == 0 or "help" in params:
commands.help()
exit(0)
elif "detect" in params:
processed_files = commands.detect(get_paths(params))
elif "delete" in params:
processed_files = commands.delete(commands.detect(get_paths(params)))
elif "link" in params:
processed_files = commands.link(commands.detect(get_paths(params)))
else:
commands.help()
exit(0)
if len(processed_files) > 0:
pprint(processed_files)
else:
print("No duplicates found")
print("Great! Bye!")
exit(0)
|
Fix in output to help command.
|
Fix in output to help command.
Signed-off-by: messiasthi <8562fc1efba9a3c99753c749fdfb1b6932b70fbf@gmail.com>
|
Python
|
mit
|
messiasthi/dduplicated-cli
|
---
+++
@@ -23,7 +23,7 @@
if len(params) == 0 or "help" in params:
commands.help()
- exit()
+ exit(0)
elif "detect" in params:
processed_files = commands.detect(get_paths(params))
@@ -36,7 +36,7 @@
else:
commands.help()
- exit()
+ exit(0)
if len(processed_files) > 0:
pprint(processed_files)
|
194557f236016ec0978e5cc465ba40e7b8dff714
|
s3backup/main.py
|
s3backup/main.py
|
# -*- coding: utf-8 -*-
from s3backup.clients import compare, LocalSyncClient
def sync():
local_client = LocalSyncClient('/home/michael/Notebooks')
current = local_client.get_current_state()
index = local_client.get_index_state()
print(list(compare(current, index)))
local_client.update_index()
|
# -*- coding: utf-8 -*-
import os
from s3backup.clients import compare, LocalSyncClient
def sync():
target_folder = os.path.expanduser('~/Notebooks')
local_client = LocalSyncClient(target_folder)
current = local_client.get_current_state()
index = local_client.get_index_state()
print(list(compare(current, index)))
local_client.update_index()
|
Use expanduser to prevent hardcoding username
|
Use expanduser to prevent hardcoding username
|
Python
|
mit
|
MichaelAquilina/s3backup,MichaelAquilina/s3backup
|
---
+++
@@ -1,10 +1,14 @@
# -*- coding: utf-8 -*-
+
+import os
from s3backup.clients import compare, LocalSyncClient
def sync():
- local_client = LocalSyncClient('/home/michael/Notebooks')
+ target_folder = os.path.expanduser('~/Notebooks')
+
+ local_client = LocalSyncClient(target_folder)
current = local_client.get_current_state()
index = local_client.get_index_state()
print(list(compare(current, index)))
|
a4a37a783efcfd1cbb21acc29077c8096a0a0198
|
spacy/lang/pl/__init__.py
|
spacy/lang/pl/__init__.py
|
# coding: utf8
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .stop_words import STOP_WORDS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...attrs import LANG
from ...util import update_exc
class Polish(Language):
lang = 'pl'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'pl'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS)
stop_words = set(STOP_WORDS)
__all__ = ['Polish']
|
# coding: utf8
from __future__ import unicode_literals
from .stop_words import STOP_WORDS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...attrs import LANG
from ...util import update_exc
class Polish(Language):
lang = 'pl'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'pl'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS)
stop_words = set(STOP_WORDS)
__all__ = ['Polish']
|
Remove import from non-existing module
|
Remove import from non-existing module
|
Python
|
mit
|
honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,honnibal/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy
|
---
+++
@@ -1,7 +1,6 @@
# coding: utf8
from __future__ import unicode_literals
-from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .stop_words import STOP_WORDS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
|
530b1b09b7fd6215822283c22c126ce7c18ac9a9
|
services/rdio.py
|
services/rdio.py
|
from werkzeug.urls import url_decode
from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_BODY
import foauth.providers
class Rdio(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.rdio.com/'
docs_url = 'http://developer.rdio.com/docs/REST/'
category = 'Music'
# URLs to interact with the API
request_token_url = 'http://api.rdio.com/oauth/request_token'
authorize_url = None # Provided when the request token is granted
access_token_url = 'http://api.rdio.com/oauth/access_token'
api_domain = 'api.rdio.com'
available_permissions = [
(None, 'access and manage your music'),
]
https = False
signature_type = SIGNATURE_TYPE_BODY
def parse_token(self, content):
# Override standard token request to also get the authorization URL
data = url_decode(content)
if 'login_url' in data:
self.authorize_url = data['login_url']
return super(Rdio, self).parse_token(content)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/1/', method='POST', data={
'method': 'currentUser',
})
return unicode(r.json[u'result'][u'key'])
|
from werkzeug.urls import url_decode
import foauth.providers
class Rdio(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'http://www.rdio.com/'
docs_url = 'http://developer.rdio.com/docs/REST/'
category = 'Music'
# URLs to interact with the API
request_token_url = 'http://api.rdio.com/oauth/request_token'
authorize_url = None # Provided when the request token is granted
access_token_url = 'http://api.rdio.com/oauth/access_token'
api_domain = 'api.rdio.com'
available_permissions = [
(None, 'access and manage your music'),
]
https = False
def parse_token(self, content):
# Override standard token request to also get the authorization URL
data = url_decode(content)
if 'login_url' in data:
self.authorize_url = data['login_url']
return super(Rdio, self).parse_token(content)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/1/', method='POST', data={
'method': 'currentUser',
})
return unicode(r.json[u'result'][u'key'])
|
Allow Rdio to use default signature handling
|
Allow Rdio to use default signature handling
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
|
---
+++
@@ -1,5 +1,4 @@
from werkzeug.urls import url_decode
-from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_BODY
import foauth.providers
@@ -21,7 +20,6 @@
]
https = False
- signature_type = SIGNATURE_TYPE_BODY
def parse_token(self, content):
# Override standard token request to also get the authorization URL
|
7486f423d018aaf53af94bc8af8bde6d46e73e71
|
class4/exercise6.py
|
class4/exercise6.py
|
from getpass import getpass
from netmiko import ConnectHandler
def main():
password = getpass()
pynet_rtr1 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 22}
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
pynet_jnpr_srx1 = {'device_type': 'juniper', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 9822}
for ssh in pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1:
ssh_connection = ConnectHandler(**ssh)
output = ssh_connection.send_command('show arp')
print output
if __name__ == "__main__":
main()
|
# Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
from getpass import getpass
from netmiko import ConnectHandler
def main():
password = getpass()
pynet_rtr1 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 22}
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
pynet_jnpr_srx1 = {'device_type': 'juniper', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 9822}
for ssh in pynet_rtr1, pynet_rtr2, pynet_jnpr_srx1:
ssh_connection = ConnectHandler(**ssh)
output = ssh_connection.send_command('show arp')
print output
if __name__ == "__main__":
main()
|
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
|
Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
|
Python
|
apache-2.0
|
linkdebian/pynet_course
|
---
+++
@@ -1,3 +1,4 @@
+# Use Netmiko to execute 'show arp' on pynet-rtr1, pynet-rtr2, and juniper-srx.
from getpass import getpass
from netmiko import ConnectHandler
|
3decbd1e235a6a43541bb8e9846ea1d08bec1ef8
|
tools/linter_lib/pyflakes.py
|
tools/linter_lib/pyflakes.py
|
import argparse
from typing import List
from zulint.linters import run_pyflakes
def check_pyflakes(files: List[str], options: argparse.Namespace) -> bool:
suppress_patterns = [
("scripts/lib/pythonrc.py", "imported but unused"),
# Intentionally imported by zerver/lib/webhooks/common.py
('', "'zerver.lib.exceptions.UnexpectedWebhookEventType' imported but unused"),
# Our ipython startup pythonrc file intentionally imports *
("scripts/lib/pythonrc.py",
" import *' used; unable to detect undefined names"),
("settings.py", "settings import *' used; unable to detect undefined names"),
("settings.py", "'from .prod_settings_template import *' used; unable to detect undefined names"),
("settings.py", "may be undefined, or defined from star imports"),
("settings.py", "settings.*' imported but unused"),
("settings.py", "'.prod_settings_template.*' imported but unused"),
# Sphinx adds `tags` specially to the environment when running conf.py.
("docs/conf.py", "undefined name 'tags'"),
]
if options.full:
suppress_patterns = []
return run_pyflakes(files, options, suppress_patterns)
|
import argparse
from typing import List
from zulint.linters import run_pyflakes
def check_pyflakes(files: List[str], options: argparse.Namespace) -> bool:
suppress_patterns = [
("scripts/lib/pythonrc.py", "imported but unused"),
# Intentionally imported by zerver/lib/webhooks/common.py
('', "'zerver.lib.exceptions.UnexpectedWebhookEventType' imported but unused"),
# Our ipython startup pythonrc file intentionally imports *
("scripts/lib/pythonrc.py",
" import *' used; unable to detect undefined names"),
("settings.py", "settings import *' used; unable to detect undefined names"),
("settings.py", "'from .prod_settings_template import *' used; unable to detect undefined names"),
("settings.py", "settings.*' imported but unused"),
("settings.py", "'.prod_settings_template.*' imported but unused"),
# Sphinx adds `tags` specially to the environment when running conf.py.
("docs/conf.py", "undefined name 'tags'"),
]
if options.full:
suppress_patterns = []
return run_pyflakes(files, options, suppress_patterns)
|
Remove settings exemption for possibly undefined star imports.
|
lint: Remove settings exemption for possibly undefined star imports.
Signed-off-by: Anders Kaseorg <dfdb7392591db597bc41cf266a9c3bc12a2706e5@zulip.com>
|
Python
|
apache-2.0
|
timabbott/zulip,eeshangarg/zulip,synicalsyntax/zulip,eeshangarg/zulip,showell/zulip,timabbott/zulip,synicalsyntax/zulip,kou/zulip,zulip/zulip,hackerkid/zulip,brainwane/zulip,punchagan/zulip,timabbott/zulip,showell/zulip,brainwane/zulip,brainwane/zulip,eeshangarg/zulip,kou/zulip,timabbott/zulip,hackerkid/zulip,zulip/zulip,timabbott/zulip,zulip/zulip,showell/zulip,zulip/zulip,shubhamdhama/zulip,shubhamdhama/zulip,synicalsyntax/zulip,shubhamdhama/zulip,punchagan/zulip,timabbott/zulip,showell/zulip,zulip/zulip,zulip/zulip,hackerkid/zulip,punchagan/zulip,synicalsyntax/zulip,andersk/zulip,shubhamdhama/zulip,eeshangarg/zulip,punchagan/zulip,rht/zulip,brainwane/zulip,hackerkid/zulip,punchagan/zulip,punchagan/zulip,hackerkid/zulip,kou/zulip,shubhamdhama/zulip,shubhamdhama/zulip,kou/zulip,andersk/zulip,rht/zulip,hackerkid/zulip,hackerkid/zulip,showell/zulip,brainwane/zulip,brainwane/zulip,synicalsyntax/zulip,synicalsyntax/zulip,andersk/zulip,synicalsyntax/zulip,andersk/zulip,eeshangarg/zulip,rht/zulip,showell/zulip,andersk/zulip,eeshangarg/zulip,andersk/zulip,rht/zulip,punchagan/zulip,shubhamdhama/zulip,kou/zulip,zulip/zulip,brainwane/zulip,rht/zulip,kou/zulip,showell/zulip,timabbott/zulip,eeshangarg/zulip,andersk/zulip,kou/zulip,rht/zulip,rht/zulip
|
---
+++
@@ -18,7 +18,6 @@
("settings.py", "settings import *' used; unable to detect undefined names"),
("settings.py", "'from .prod_settings_template import *' used; unable to detect undefined names"),
- ("settings.py", "may be undefined, or defined from star imports"),
("settings.py", "settings.*' imported but unused"),
("settings.py", "'.prod_settings_template.*' imported but unused"),
|
5231efb00409ffd0b1b0e1cf111d81782468cdd3
|
wye/regions/forms.py
|
wye/regions/forms.py
|
from django import forms
from django.core.exceptions import ValidationError
from wye.profiles.models import UserType
from . import models
class RegionalLeadForm(forms.ModelForm):
class Meta:
model = models.RegionalLead
exclude = ()
def clean(self):
location = self.cleaned_data['location']
error_message = []
for u in self.cleaned_data['leads']:
if not u.profile:
error_message.append('Profile for user %s not found' % (u))
elif u.profile.location != location:
error_message.append(
"User %s doesn't belong to region %s" % (u, location))
if error_message:
raise ValidationError(error_message)
def save(self, force_insert=False, force_update=False, commit=True):
m = super(RegionalLeadForm, self).save(commit=False)
for u in self.cleaned_data['leads']:
u.profile.usertype.add(UserType.objects.get(slug='lead'))
return m
class LocationForm(forms.ModelForm):
class Meta:
model = models.Location
exclude = ()
class StateForm(forms.ModelForm):
class Meta:
model = models.State
exclude = ()
|
from django import forms
from django.core.exceptions import ValidationError
from wye.profiles.models import UserType
from . import models
class RegionalLeadForm(forms.ModelForm):
class Meta:
model = models.RegionalLead
exclude = ()
def clean(self):
error_message = []
if (self.cleaned_data.get('location', '') and
self.cleaned_data.get('leads', '')):
location = self.cleaned_data['location']
for u in self.cleaned_data['leads']:
if not u.profile:
error_message.append('Profile for user %s not found' % (u))
elif u.profile.location != location:
error_message.append(
"User %s doesn't belong to region %s" % (u, location))
if error_message:
raise ValidationError(error_message)
def save(self, force_insert=False, force_update=False, commit=True):
m = super(RegionalLeadForm, self).save(commit=False)
for u in self.cleaned_data['leads']:
u.profile.usertype.add(UserType.objects.get(slug='lead'))
return m
class LocationForm(forms.ModelForm):
class Meta:
model = models.Location
exclude = ()
class StateForm(forms.ModelForm):
class Meta:
model = models.State
exclude = ()
|
Handle empty location and leads data
|
Handle empty location and leads data
|
Python
|
mit
|
shankig/wye,harisibrahimkv/wye,shankisg/wye,shankisg/wye,shankisg/wye,harisibrahimkv/wye,pythonindia/wye,pythonindia/wye,shankig/wye,DESHRAJ/wye,harisibrahimkv/wye,pythonindia/wye,shankig/wye,shankig/wye,shankisg/wye,DESHRAJ/wye,harisibrahimkv/wye,DESHRAJ/wye,DESHRAJ/wye,pythonindia/wye
|
---
+++
@@ -13,14 +13,16 @@
exclude = ()
def clean(self):
- location = self.cleaned_data['location']
error_message = []
- for u in self.cleaned_data['leads']:
- if not u.profile:
- error_message.append('Profile for user %s not found' % (u))
- elif u.profile.location != location:
- error_message.append(
- "User %s doesn't belong to region %s" % (u, location))
+ if (self.cleaned_data.get('location', '') and
+ self.cleaned_data.get('leads', '')):
+ location = self.cleaned_data['location']
+ for u in self.cleaned_data['leads']:
+ if not u.profile:
+ error_message.append('Profile for user %s not found' % (u))
+ elif u.profile.location != location:
+ error_message.append(
+ "User %s doesn't belong to region %s" % (u, location))
if error_message:
raise ValidationError(error_message)
|
e1514fa5bcc35df74295c254df65e8e99dc289a1
|
speeches/util.py
|
speeches/util.py
|
from speeches.tasks import transcribe_speech
from django.forms.widgets import SplitDateTimeWidget
"""Common utility functions/classes
Things that are needed by multiple bits of code but are specific enough to
this project not to be in a separate python package"""
def start_transcribing_speech(speech):
"""Kick off a celery task to transcribe a speech"""
# We only do anything if there's no text already
if not speech.text:
# If someone is adding a new audio file and there's already a task
# We need to clear it
if speech.celery_task_id:
celery.task.control.revoke(speech.celery_task_id)
# Now we can start a new one
result = transcribe_speech.delay(speech.id)
# Finally, we can remember the new task in the model
speech.celery_task_id = result.task_id
speech.save()
class BootstrapSplitDateTimeWidget(SplitDateTimeWidget):
"""
A Widget that splits datetime input into two <input type="text"> boxes and styles with Bootstrap
"""
def __init__(self, attrs=None, date_format=None, time_format=None):
super(BootstrapSplitDateTimeWidget, self).__init__(attrs, date_format, time_format)
def format_output(self, rendered_widgets):
"""Override the output formatting to return widgets with some Bootstrap niceness"""
output = ''
for i, widget in enumerate(rendered_widgets):
output += '<div class="input-append">'
output += widget
if i == 0:
output += '<span class="add-on"><i class="icon-calendar"></i></span>'
else:
output += '<span class="add-on"><i class="icon-time"></i></span>'
output += '</div>'
return output
|
from speeches.tasks import transcribe_speech
"""Common utility functions/classes
Things that are needed by multiple bits of code but are specific enough to
this project not to be in a separate python package"""
def start_transcribing_speech(speech):
"""Kick off a celery task to transcribe a speech"""
# We only do anything if there's no text already
if not speech.text:
# If someone is adding a new audio file and there's already a task
# We need to clear it
if speech.celery_task_id:
celery.task.control.revoke(speech.celery_task_id)
# Now we can start a new one
result = transcribe_speech.delay(speech.id)
# Finally, we can remember the new task in the model
speech.celery_task_id = result.task_id
speech.save()
|
Remove BootstrapSplitDateTimeWidget as it's no longer needed
|
Remove BootstrapSplitDateTimeWidget as it's no longer needed
|
Python
|
agpl-3.0
|
opencorato/sayit,opencorato/sayit,opencorato/sayit,opencorato/sayit
|
---
+++
@@ -1,5 +1,4 @@
from speeches.tasks import transcribe_speech
-from django.forms.widgets import SplitDateTimeWidget
"""Common utility functions/classes
Things that are needed by multiple bits of code but are specific enough to
@@ -18,27 +17,3 @@
# Finally, we can remember the new task in the model
speech.celery_task_id = result.task_id
speech.save()
-
-class BootstrapSplitDateTimeWidget(SplitDateTimeWidget):
- """
- A Widget that splits datetime input into two <input type="text"> boxes and styles with Bootstrap
- """
-
- def __init__(self, attrs=None, date_format=None, time_format=None):
- super(BootstrapSplitDateTimeWidget, self).__init__(attrs, date_format, time_format)
-
- def format_output(self, rendered_widgets):
- """Override the output formatting to return widgets with some Bootstrap niceness"""
-
- output = ''
-
- for i, widget in enumerate(rendered_widgets):
- output += '<div class="input-append">'
- output += widget
- if i == 0:
- output += '<span class="add-on"><i class="icon-calendar"></i></span>'
- else:
- output += '<span class="add-on"><i class="icon-time"></i></span>'
- output += '</div>'
-
- return output
|
ab0fd99e1c2c336cd5ce68e5fdb8a58384bfa794
|
elasticsearch.py
|
elasticsearch.py
|
#!/usr/bin/env python
import json
import requests
ES_HOST = 'localhost'
ES_PORT = '9200'
ELASTICSEARCH = 'http://{0}:{1}'.format(ES_HOST, ES_PORT)
def find_indices():
"""Find indices created by logstash."""
url = ELASTICSEARCH + '/_search'
r = requests.get(url, params={'_q': '_index like logstash%'})
return sorted(res['_index'] for res in r.json()['hits']['hits'])
def get_number_of(loglevel, index):
"""Retrieve the number of logs with level ``loglevel``."""
url = ELASTICSEARCH + '/' + index + '/_search'
r = requests.get(url, data=json.dumps({
'query': {
'query_string': {
'query': loglevel,
'fields': ['os_level', 'message']
}
}
}))
return r.json()['hits']['total']
def main():
latest = find_indices()[-1]
num_errors = get_number_of('ERROR', latest)
num_warnings = get_number_of('WARN*', latest)
print 'metric int NUMBER_OF_LOG_ERRORS {0}'.format(num_errors)
print 'metric int NUMBER_OF_LOG_WARNINGS {0}'.format(num_warnings)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import json
import requests
ES_HOST = 'localhost'
ES_PORT = '9200'
ELASTICSEARCH = 'http://{0}:{1}'.format(ES_HOST, ES_PORT)
def find_indices():
"""Find indices created by logstash."""
url = ELASTICSEARCH + '/_search'
r = requests.get(url, params={'_q': '_index like logstash%'})
return sorted(res['_index'] for res in r.json()['hits']['hits'])
def get_number_of(loglevel, index):
"""Retrieve the number of logs with level ``loglevel``."""
url = ELASTICSEARCH + '/' + index + '/_search'
r = requests.get(url, data=json.dumps({
'query': {
'query_string': {
'query': loglevel,
'fields': ['os_level', 'message']
}
}
}))
return r.json()['hits']['total']
def main():
indices = find_indices()
if not indices:
return
latest = indices[-1]
num_errors = get_number_of('ERROR', latest)
num_warnings = get_number_of('WARN*', latest)
print 'metric int NUMBER_OF_LOG_ERRORS {0}'.format(num_errors)
print 'metric int NUMBER_OF_LOG_WARNINGS {0}'.format(num_warnings)
if __name__ == '__main__':
main()
|
Handle the case where there are no logs
|
Handle the case where there are no logs
|
Python
|
apache-2.0
|
mancdaz/rpc-openstack,busterswt/rpc-openstack,npawelek/rpc-maas,git-harry/rpc-openstack,sigmavirus24/rpc-openstack,jpmontez/rpc-openstack,mattt416/rpc-openstack,xeregin/rpc-openstack,busterswt/rpc-openstack,stevelle/rpc-openstack,xeregin/rpc-openstack,miguelgrinberg/rpc-openstack,cfarquhar/rpc-openstack,xeregin/rpc-openstack,BjoernT/rpc-openstack,cloudnull/rpc-maas,sigmavirus24/rpc-openstack,claco/rpc-openstack,cloudnull/rpc-openstack,prometheanfire/rpc-openstack,major/rpc-openstack,nrb/rpc-openstack,npawelek/rpc-maas,andymcc/rpc-openstack,stevelle/rpc-openstack,hughsaunders/rpc-openstack,stevelle/rpc-openstack,cloudnull/rpc-maas,cloudnull/rpc-maas,galstrom21/rpc-openstack,byronmccollum/rpc-openstack,prometheanfire/rpc-openstack,andymcc/rpc-openstack,nrb/rpc-openstack,mancdaz/rpc-openstack,robb-romans/rpc-openstack,major/rpc-openstack,rcbops/rpc-openstack,busterswt/rpc-openstack,byronmccollum/rpc-openstack,darrenchan/rpc-openstack,jpmontez/rpc-openstack,cfarquhar/rpc-maas,hughsaunders/rpc-openstack,sigmavirus24/rpc-openstack,miguelgrinberg/rpc-openstack,briancurtin/rpc-maas,cfarquhar/rpc-openstack,shannonmitchell/rpc-openstack,andymcc/rpc-openstack,miguelgrinberg/rpc-openstack,shannonmitchell/rpc-openstack,sigmavirus24/rpc-openstack,jpmontez/rpc-openstack,darrenchan/rpc-openstack,BjoernT/rpc-openstack,claco/rpc-openstack,darrenchan/rpc-openstack,jacobwagner/rpc-openstack,cfarquhar/rpc-maas,git-harry/rpc-openstack,npawelek/rpc-maas,cfarquhar/rpc-maas,xeregin/rpc-openstack,byronmccollum/rpc-openstack,briancurtin/rpc-maas,mattt416/rpc-openstack,mattt416/rpc-openstack,jacobwagner/rpc-openstack,claco/rpc-openstack,nrb/rpc-openstack,briancurtin/rpc-maas,galstrom21/rpc-openstack,robb-romans/rpc-openstack,rcbops/rpc-openstack,cloudnull/rpc-openstack,darrenchan/rpc-openstack
|
---
+++
@@ -29,7 +29,10 @@
def main():
- latest = find_indices()[-1]
+ indices = find_indices()
+ if not indices:
+ return
+ latest = indices[-1]
num_errors = get_number_of('ERROR', latest)
num_warnings = get_number_of('WARN*', latest)
print 'metric int NUMBER_OF_LOG_ERRORS {0}'.format(num_errors)
|
48cc6633a6020114f5b5eeaaf53ddb08085bfae5
|
models/settings.py
|
models/settings.py
|
from openedoo_project import db
from openedoo_project import config
class Setting(db.Model):
__tablename__ = 'module_employee_site_setting'
__table_args__ = {'extend_existing': True}
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.Text)
def serialize(self):
return {
'id': self.id,
'name': self.name
}
def get_existing_name(self):
setting = self.query.limit(1).first()
return setting
def update(self, data):
setting = self.get_existing_name()
setting.name = data['name']
return db.session.commit()
|
from openedoo_project import db
class Setting(db.Model):
__tablename__ = 'module_employee_site_setting'
__table_args__ = {'extend_existing': True}
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.Text)
def serialize(self):
return {
'id': self.id,
'name': self.name
}
def get_existing_name(self):
setting = self.query.limit(1).first()
return setting
def update(self, data):
setting = self.get_existing_name()
setting.name = data['name']
return db.session.commit()
|
Remove Unused config imported from openedoo_project, pylint.
|
Remove Unused config imported from openedoo_project, pylint.
|
Python
|
mit
|
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
|
---
+++
@@ -1,5 +1,4 @@
from openedoo_project import db
-from openedoo_project import config
class Setting(db.Model):
|
cb2746f60cd63019b41eebedb148bfc5a25c1ba0
|
indra/preassembler/make_wm_ontmap.py
|
indra/preassembler/make_wm_ontmap.py
|
from indra.sources import eidos
from indra.sources.hume.make_hume_tsv import make_file
from indra.java_vm import autoclass
eidos_package = 'org.clulab.wm.eidos'
if __name__ == '__main__':
bbn_path = 'hume_examaples.tsv'
make_file(bbn_path)
sofia_path = 'sofia_examples.tsv'
om = autoclass(eidos_package + '.apps.OntologyMapper')
eidos = autoclass(eidos_package + '.EidosSystem')
es = eidos(autoclass('java.lang.Object')())
example_weight = 0.8
parent_weight = 0.1
topn = 10
table_str = om.mapOntologies(es, bbn_path, sofia_path, example_weight,
parent_weight, topn)
|
import sys
from indra.sources import eidos
from indra.sources.hume.make_hume_tsv import make_file as mht
from indra.sources.sofia.make_sofia_tsv import make_file as mst
from indra.java_vm import autoclass
eidos_package = 'org.clulab.wm.eidos'
if __name__ == '__main__':
sofia_ont_path = sys.argv[1]
hume_path = 'hume_ontology_examaples.tsv'
mht(hume_path)
sofia_path = 'sofia_ontology_examples.tsv'
mst(sofia_ont_path, sofia_path)
om = autoclass(eidos_package + '.apps.OntologyMapper')
eidos = autoclass(eidos_package + '.EidosSystem')
es = eidos(autoclass('java.lang.Object')())
example_weight = 0.8
parent_weight = 0.1
topn = 10
table_str = om.mapOntologies(es, hume_path, sofia_path, example_weight,
parent_weight, topn)
|
Update make WM ontmap with SOFIA
|
Update make WM ontmap with SOFIA
|
Python
|
bsd-2-clause
|
pvtodorov/indra,johnbachman/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/indra,bgyori/indra,johnbachman/belpy
|
---
+++
@@ -1,13 +1,17 @@
+import sys
from indra.sources import eidos
-from indra.sources.hume.make_hume_tsv import make_file
+from indra.sources.hume.make_hume_tsv import make_file as mht
+from indra.sources.sofia.make_sofia_tsv import make_file as mst
from indra.java_vm import autoclass
eidos_package = 'org.clulab.wm.eidos'
if __name__ == '__main__':
- bbn_path = 'hume_examaples.tsv'
- make_file(bbn_path)
- sofia_path = 'sofia_examples.tsv'
+ sofia_ont_path = sys.argv[1]
+ hume_path = 'hume_ontology_examaples.tsv'
+ mht(hume_path)
+ sofia_path = 'sofia_ontology_examples.tsv'
+ mst(sofia_ont_path, sofia_path)
om = autoclass(eidos_package + '.apps.OntologyMapper')
eidos = autoclass(eidos_package + '.EidosSystem')
@@ -16,5 +20,5 @@
example_weight = 0.8
parent_weight = 0.1
topn = 10
- table_str = om.mapOntologies(es, bbn_path, sofia_path, example_weight,
+ table_str = om.mapOntologies(es, hume_path, sofia_path, example_weight,
parent_weight, topn)
|
a3ec10088f379c25e0ab9c7b7e29abd2bf952806
|
karld/iter_utils.py
|
karld/iter_utils.py
|
from functools import partial
from itertools import imap
from itertools import islice
from operator import itemgetter
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
|
from functools import partial
from itertools import imap
from itertools import islice
from operator import itemgetter
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
for items_batch in iter(lambda: tuple(islice(iterable_items, max_size)),
tuple()):
yield items_batch
|
Use iter's sentinel arg instead of infinite loop
|
Use iter's sentinel arg instead of infinite loop
|
Python
|
apache-2.0
|
johnwlockwood/karl_data,johnwlockwood/stream_tap,johnwlockwood/stream_tap,johnwlockwood/iter_karld_tools
|
---
+++
@@ -38,9 +38,6 @@
:type iterable: iter
"""
iterable_items = iter(iterable)
-
- while True:
- items_batch = tuple(islice(iterable_items, max_size))
- if not items_batch:
- break
+ for items_batch in iter(lambda: tuple(islice(iterable_items, max_size)),
+ tuple()):
yield items_batch
|
67cce913a6ab960b7ddc476fa9a16adb39a69862
|
compose/__init__.py
|
compose/__init__.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
__version__ = '1.25.1'
|
from __future__ import absolute_import
from __future__ import unicode_literals
__version__ = '1.26.0dev'
|
Set dev version to 1.26.0dev after releasing 1.25.1
|
Set dev version to 1.26.0dev after releasing 1.25.1
Signed-off-by: Ulysses Souza <9b58b28cc7619bff4119b8572e41bbb4dd363aab@gmail.com>
|
Python
|
apache-2.0
|
vdemeester/compose,thaJeztah/compose,vdemeester/compose,thaJeztah/compose
|
---
+++
@@ -1,4 +1,4 @@
from __future__ import absolute_import
from __future__ import unicode_literals
-__version__ = '1.25.1'
+__version__ = '1.26.0dev'
|
d8a93f06cf6d78c543607d7046017cad3acc6c32
|
tests/test_callback.py
|
tests/test_callback.py
|
import tests
class CallbackTests(tests.TestCase):
def test_hello_world(self):
result = []
def hello_world(loop):
result.append('Hello World')
loop.stop()
self.loop.call_soon(hello_world, self.loop)
self.loop.run_forever()
self.assertEqual(result, ['Hello World'])
def test_soon_stop_soon(self):
result = []
def hello():
result.append("Hello")
def world():
result.append("World")
self.loop.stop()
self.loop.call_soon(hello)
self.loop.stop()
self.loop.call_soon(world)
self.loop.run_forever()
self.assertEqual(result, ["Hello"])
self.loop.run_forever()
self.assertEqual(result, ["Hello", "World"])
def test_close_soon(self):
def func():
pass
self.loop.close()
# FIXME: calling call_soon() on a closed event loop should raise an
# exception:
# http://bugs.python.org/issue22922
self.loop.call_soon(func)
if __name__ == '__main__':
import unittest
unittest.main()
|
import tests
class CallbackTests(tests.TestCase):
def test_hello_world(self):
result = []
def hello_world(loop):
result.append('Hello World')
loop.stop()
self.loop.call_soon(hello_world, self.loop)
self.loop.run_forever()
self.assertEqual(result, ['Hello World'])
def test_soon_stop_soon(self):
result = []
def hello():
result.append("Hello")
def world():
result.append("World")
self.loop.stop()
self.loop.call_soon(hello)
self.loop.stop()
self.loop.call_soon(world)
self.loop.run_forever()
self.assertEqual(result, ["Hello"])
self.loop.run_forever()
self.assertEqual(result, ["Hello", "World"])
if __name__ == '__main__':
import unittest
unittest.main()
|
Remove a test which behaves differently depending on the the version of asyncio/trollius
|
Remove a test which behaves differently depending on the the version of asyncio/trollius
|
Python
|
apache-2.0
|
overcastcloud/aioeventlet
|
---
+++
@@ -32,16 +32,6 @@
self.loop.run_forever()
self.assertEqual(result, ["Hello", "World"])
- def test_close_soon(self):
- def func():
- pass
-
- self.loop.close()
- # FIXME: calling call_soon() on a closed event loop should raise an
- # exception:
- # http://bugs.python.org/issue22922
- self.loop.call_soon(func)
-
if __name__ == '__main__':
import unittest
|
5b6ac8301908777a69dbbf74eb85af8b505fa76f
|
download_agents.py
|
download_agents.py
|
#!/usr/bin/env python3
from __future__ import print_function
from argparse import ArgumentParser
import json
import os
from urllib.request import urlopen
import subprocess
import sys
def main():
parser = ArgumentParser()
parser.add_argument('downloads_file', metavar='downloads-file')
args = parser.parse_args()
with open(args.downloads_file) as downloads_file:
downloads = json.load(downloads_file)
for download in downloads:
path = download['path']
if os.path.isfile(path):
print('File already exists: {}'.format(path))
else:
print('Downloading: {}'.format(path), end='')
sys.stdout.flush()
with open(download['path'], 'wb') as target:
with urlopen(download['url']) as source:
while True:
chunk = source.read(102400)
if len(chunk) == 0:
break
target.write(chunk)
print('.', end='', file=sys.stderr)
sys.stderr.flush()
print('')
print('Verifying hash')
hashsum = subprocess.check_output(
['sha256sum', path]).split(b' ', 1)[0]
hashsum = hashsum.decode('ascii')
expected = download['sha256']
print(' {}\n {}'.format(hashsum, expected))
if hashsum != expected:
raise ValueError('Incorrect hash for {}'.format(path))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
from __future__ import print_function
from argparse import ArgumentParser
import errno
import json
import os
from urllib.request import urlopen
import subprocess
import sys
def main():
parser = ArgumentParser()
parser.add_argument('downloads_file', metavar='downloads-file')
args = parser.parse_args()
with open(args.downloads_file) as downloads_file:
downloads = json.load(downloads_file)
for download in downloads:
path = download['path']
if os.path.isfile(path):
print('File already exists: {}'.format(path))
else:
print('Downloading: {}'.format(path), end='')
sys.stdout.flush()
try:
os.makedirs(os.path.dirname(download['path']))
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(download['path'], 'wb') as target:
with urlopen(download['url']) as source:
while True:
chunk = source.read(102400)
if len(chunk) == 0:
break
target.write(chunk)
print('.', end='', file=sys.stderr)
sys.stderr.flush()
print('')
print('Verifying hash')
hashsum = subprocess.check_output(
['sha256sum', path]).split(b' ', 1)[0]
hashsum = hashsum.decode('ascii')
expected = download['sha256']
print(' {}\n {}'.format(hashsum, expected))
if hashsum != expected:
raise ValueError('Incorrect hash for {}'.format(path))
if __name__ == '__main__':
main()
|
Create parent directories as needed.
|
Create parent directories as needed.
|
Python
|
agpl-3.0
|
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
|
---
+++
@@ -2,6 +2,7 @@
from __future__ import print_function
from argparse import ArgumentParser
+import errno
import json
import os
from urllib.request import urlopen
@@ -22,6 +23,11 @@
else:
print('Downloading: {}'.format(path), end='')
sys.stdout.flush()
+ try:
+ os.makedirs(os.path.dirname(download['path']))
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
with open(download['path'], 'wb') as target:
with urlopen(download['url']) as source:
while True:
|
af0f42b86a1e3f916041eb78a4332daf0f22531a
|
OIPA/manage.py
|
OIPA/manage.py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "OIPA.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
from dotenv import find_dotenv, load_dotenv
load_dotenv(find_dotenv())
if __name__ == "__main__":
current_settings = os.getenv("DJANGO_SETTINGS_MODULE", None)
if not current_settings:
raise Exception(
"Please configure your .env file along-side manage.py file and "
"set 'DJANGO_SETTINGS_MODULE=OIPA.settings_file' variable there!"
)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", current_settings)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Load current settings from .env file
|
Load current settings from .env file
OIPA-645
|
Python
|
agpl-3.0
|
openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA
|
---
+++
@@ -2,8 +2,20 @@
import os
import sys
+from dotenv import find_dotenv, load_dotenv
+
+load_dotenv(find_dotenv())
+
if __name__ == "__main__":
- os.environ.setdefault("DJANGO_SETTINGS_MODULE", "OIPA.settings")
+ current_settings = os.getenv("DJANGO_SETTINGS_MODULE", None)
+
+ if not current_settings:
+ raise Exception(
+ "Please configure your .env file along-side manage.py file and "
+ "set 'DJANGO_SETTINGS_MODULE=OIPA.settings_file' variable there!"
+ )
+
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", current_settings)
from django.core.management import execute_from_command_line
|
c1b96a3ee94c25cfbe3d66eec76052badacfb38e
|
udata/tests/organization/test_notifications.py
|
udata/tests/organization/test_notifications.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from udata.models import MembershipRequest, Member
from udata.core.user.factories import UserFactory
from udata.core.organization.factories import OrganizationFactory
from udata.core.organization.notifications import (
membership_request_notifications
)
from .. import TestCase, DBTestMixin
class OrganizationNotificationsTest(TestCase, DBTestMixin):
def test_pending_membership_requests(self):
admin = UserFactory()
editor = UserFactory()
applicant = UserFactory()
request = MembershipRequest(user=applicant, comment='test')
members = [
Member(user=editor, role='editor'),
Member(user=admin, role='admin')
]
org = OrganizationFactory(members=members, requests=[request])
self.assertEqual(len(membership_request_notifications(applicant)), 0)
self.assertEqual(len(membership_request_notifications(editor)), 0)
notifications = membership_request_notifications(admin)
self.assertEqual(len(notifications), 1)
dt, details = notifications[0]
self.assertEqualDates(dt, request.created)
self.assertEqual(details['id'], request.id)
self.assertEqual(details['organization'], org.id)
self.assertEqual(details['user']['id'], applicant.id)
self.assertEqual(details['user']['fullname'], applicant.fullname)
self.assertEqual(details['user']['avatar'], str(applicant.avatar))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import pytest
from udata.models import MembershipRequest, Member
from udata.core.user.factories import UserFactory
from udata.core.organization.factories import OrganizationFactory
from udata.core.organization.notifications import (
membership_request_notifications
)
from udata.tests.helpers import assert_equal_dates
@pytest.mark.usefixtures('clean_db')
class OrganizationNotificationsTest:
def test_pending_membership_requests(self):
admin = UserFactory()
editor = UserFactory()
applicant = UserFactory()
request = MembershipRequest(user=applicant, comment='test')
members = [
Member(user=editor, role='editor'),
Member(user=admin, role='admin')
]
org = OrganizationFactory(members=members, requests=[request])
assert len(membership_request_notifications(applicant)) is 0
assert len(membership_request_notifications(editor)) is 0
notifications = membership_request_notifications(admin)
assert len(notifications) is 1
dt, details = notifications[0]
assert_equal_dates(dt, request.created)
assert details['id'] == request.id
assert details['organization'] == org.id
assert details['user']['id'] == applicant.id
assert details['user']['fullname'] == applicant.fullname
assert details['user']['avatar'] == str(applicant.avatar)
|
Migrate org notif tests to pytest
|
Migrate org notif tests to pytest
|
Python
|
agpl-3.0
|
opendatateam/udata,etalab/udata,etalab/udata,opendatateam/udata,opendatateam/udata,etalab/udata
|
---
+++
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
+
+import pytest
from udata.models import MembershipRequest, Member
@@ -9,10 +11,11 @@
membership_request_notifications
)
-from .. import TestCase, DBTestMixin
+from udata.tests.helpers import assert_equal_dates
-class OrganizationNotificationsTest(TestCase, DBTestMixin):
+@pytest.mark.usefixtures('clean_db')
+class OrganizationNotificationsTest:
def test_pending_membership_requests(self):
admin = UserFactory()
editor = UserFactory()
@@ -24,15 +27,15 @@
]
org = OrganizationFactory(members=members, requests=[request])
- self.assertEqual(len(membership_request_notifications(applicant)), 0)
- self.assertEqual(len(membership_request_notifications(editor)), 0)
+ assert len(membership_request_notifications(applicant)) is 0
+ assert len(membership_request_notifications(editor)) is 0
notifications = membership_request_notifications(admin)
- self.assertEqual(len(notifications), 1)
+ assert len(notifications) is 1
dt, details = notifications[0]
- self.assertEqualDates(dt, request.created)
- self.assertEqual(details['id'], request.id)
- self.assertEqual(details['organization'], org.id)
- self.assertEqual(details['user']['id'], applicant.id)
- self.assertEqual(details['user']['fullname'], applicant.fullname)
- self.assertEqual(details['user']['avatar'], str(applicant.avatar))
+ assert_equal_dates(dt, request.created)
+ assert details['id'] == request.id
+ assert details['organization'] == org.id
+ assert details['user']['id'] == applicant.id
+ assert details['user']['fullname'] == applicant.fullname
+ assert details['user']['avatar'] == str(applicant.avatar)
|
5a3935caab0bf720db6707bb7974eec2400f3701
|
prompt_toolkit/key_binding/bindings/auto_suggest.py
|
prompt_toolkit/key_binding/bindings/auto_suggest.py
|
"""
Key bindings for auto suggestion (for fish-style auto suggestion).
"""
from __future__ import unicode_literals
from prompt_toolkit.application.current import get_app
from prompt_toolkit.key_binding.key_bindings import KeyBindings
from prompt_toolkit.filters import Condition
__all__ = [
'load_auto_suggest_bindings',
]
def load_auto_suggest_bindings():
"""
Key bindings for accepting auto suggestion text.
(This has to come after the Vi bindings, because they also have an
implementation for the "right arrow", but we really want the suggestion
binding when a suggestion is available.)
"""
key_bindings = KeyBindings()
handle = key_bindings.add
@Condition
def suggestion_available():
app = get_app()
return (app.current_buffer.suggestion is not None and
app.current_buffer.document.is_cursor_at_the_end)
@handle('c-f', filter=suggestion_available)
@handle('c-e', filter=suggestion_available)
@handle('right', filter=suggestion_available)
def _(event):
" Accept suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
b.insert_text(suggestion.text)
return key_bindings
|
"""
Key bindings for auto suggestion (for fish-style auto suggestion).
"""
from __future__ import unicode_literals
import re
from prompt_toolkit.application.current import get_app
from prompt_toolkit.key_binding.key_bindings import KeyBindings
from prompt_toolkit.filters import Condition, emacs_mode
__all__ = [
'load_auto_suggest_bindings',
]
def load_auto_suggest_bindings():
"""
Key bindings for accepting auto suggestion text.
(This has to come after the Vi bindings, because they also have an
implementation for the "right arrow", but we really want the suggestion
binding when a suggestion is available.)
"""
key_bindings = KeyBindings()
handle = key_bindings.add
@Condition
def suggestion_available():
app = get_app()
return (app.current_buffer.suggestion is not None and
app.current_buffer.document.is_cursor_at_the_end)
@handle('c-f', filter=suggestion_available)
@handle('c-e', filter=suggestion_available)
@handle('right', filter=suggestion_available)
def _(event):
" Accept suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
b.insert_text(suggestion.text)
@handle('escape', 'f', filter=suggestion_available & emacs_mode)
def _(event):
" Fill partial suggestion. "
b = event.current_buffer
suggestion = b.suggestion
if suggestion:
t = re.split(r'(\S+\s+)', suggestion.text)
b.insert_text(next(x for x in t if x))
return key_bindings
|
Add alt-f binding for auto-suggestion.
|
Add alt-f binding for auto-suggestion.
|
Python
|
bsd-3-clause
|
jonathanslenders/python-prompt-toolkit
|
---
+++
@@ -2,9 +2,10 @@
Key bindings for auto suggestion (for fish-style auto suggestion).
"""
from __future__ import unicode_literals
+import re
from prompt_toolkit.application.current import get_app
from prompt_toolkit.key_binding.key_bindings import KeyBindings
-from prompt_toolkit.filters import Condition
+from prompt_toolkit.filters import Condition, emacs_mode
__all__ = [
'load_auto_suggest_bindings',
@@ -39,4 +40,14 @@
if suggestion:
b.insert_text(suggestion.text)
+ @handle('escape', 'f', filter=suggestion_available & emacs_mode)
+ def _(event):
+ " Fill partial suggestion. "
+ b = event.current_buffer
+ suggestion = b.suggestion
+
+ if suggestion:
+ t = re.split(r'(\S+\s+)', suggestion.text)
+ b.insert_text(next(x for x in t if x))
+
return key_bindings
|
ea3deb560aaddab4d66a84e840e10854cfad581d
|
nass/__init__.py
|
nass/__init__.py
|
# -*- coding: utf-8 -*-
"""
USDA National Agricultural Statistics Service API wrapper
This Python wrapper implements the public API for the USDA National
Agricultural Statistics Service. It is a very thin layer over the Requests
package.
This product uses the NASS API but is not endorsed or certified by NASS.
:copyright: (c) 2015 by Nick Frost.
:license: MIT, see LICENSE for more details.
"""
__author__ = 'Nick Frost'
__version__ = '0.1.1'
__license__ = 'MIT'
from .api import NassApi
|
# -*- coding: utf-8 -*-
"""
USDA National Agricultural Statistics Service API wrapper
This Python wrapper implements the public API for the USDA National
Agricultural Statistics Service. It is a very thin layer over the Requests
package.
This product uses the NASS API but is not endorsed or certified by NASS.
:copyright: (c) 2015 by Nick Frost.
:license: MIT, see LICENSE for more details.
"""
from .api import NassApi
__author__ = 'Nick Frost'
__version__ = '0.1.1'
__license__ = 'MIT'
|
Make package-level import at the top (pep8)
|
Make package-level import at the top (pep8)
|
Python
|
mit
|
nickfrostatx/nass
|
---
+++
@@ -12,8 +12,8 @@
:license: MIT, see LICENSE for more details.
"""
+from .api import NassApi
+
__author__ = 'Nick Frost'
__version__ = '0.1.1'
__license__ = 'MIT'
-
-from .api import NassApi
|
fd302e3f9cbc5bcf06d47600adc3e0f0df33c114
|
f8a_jobs/auth.py
|
f8a_jobs/auth.py
|
from flask import session
from flask_oauthlib.client import OAuth
import f8a_jobs.defaults as configuration
oauth = OAuth()
github = oauth.remote_app(
'github',
consumer_key=configuration.GITHUB_CONSUMER_KEY,
consumer_secret=configuration.GITHUB_CONSUMER_SECRET,
request_token_params={'scope': 'user:email'},
base_url='https://api.github.com/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://github.com/login/oauth/access_token',
authorize_url='https://github.com/login/oauth/authorize'
)
@github.tokengetter
def get_github_oauth_token():
return session.get('auth_token')
|
from flask import session
from flask_oauthlib.client import OAuth
import f8a_jobs.defaults as configuration
oauth = OAuth()
github = oauth.remote_app(
'github',
consumer_key=configuration.GITHUB_CONSUMER_KEY,
consumer_secret=configuration.GITHUB_CONSUMER_SECRET,
request_token_params={'scope': 'user:email,read:org'},
base_url='https://api.github.com/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://github.com/login/oauth/access_token',
authorize_url='https://github.com/login/oauth/authorize'
)
@github.tokengetter
def get_github_oauth_token():
return session.get('auth_token')
|
Add read organization scope for OAuth
|
Add read organization scope for OAuth
This will enable to access jobs service even for not public organization
members.
|
Python
|
apache-2.0
|
fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs
|
---
+++
@@ -7,7 +7,7 @@
'github',
consumer_key=configuration.GITHUB_CONSUMER_KEY,
consumer_secret=configuration.GITHUB_CONSUMER_SECRET,
- request_token_params={'scope': 'user:email'},
+ request_token_params={'scope': 'user:email,read:org'},
base_url='https://api.github.com/',
request_token_url=None,
access_token_method='POST',
|
6a2782b11bcec2c1493258957ce7e8652d6990e8
|
core/build/views.py
|
core/build/views.py
|
from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.pop("raw", False):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
|
from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.get('raw'):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
|
Revert "use pop instead of get because it doens't cause uncaught exceptions."
|
Revert "use pop instead of get because it doens't cause uncaught exceptions."
This reverts commit 7aa3e4128b9df890a2683faee0ebe2ee8e64ce33.
|
Python
|
bsd-3-clause
|
zeeman/cyder,murrown/cyder,akeym/cyder,OSU-Net/cyder,murrown/cyder,drkitty/cyder,murrown/cyder,drkitty/cyder,akeym/cyder,akeym/cyder,zeeman/cyder,drkitty/cyder,zeeman/cyder,OSU-Net/cyder,akeym/cyder,zeeman/cyder,drkitty/cyder,OSU-Net/cyder,murrown/cyder,OSU-Net/cyder
|
---
+++
@@ -8,7 +8,7 @@
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
- if request.GET.pop("raw", False):
+ if request.GET.get('raw'):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
|
5b0d308d1859920cc59e7241626472edb42c7856
|
djangosanetesting/testrunner.py
|
djangosanetesting/testrunner.py
|
from django.test.utils import setup_test_environment, teardown_test_environment
from django.db.backends.creation import create_test_db, destroy_test_db
import nose
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
""" Run tests with nose instead of defualt test runner """
setup_test_environment()
old_name = settings.DATABASE_NAME
create_test_db(verbosity, autoclobber=not interactive)
argv_backup = sys.argv
# we have to strip script name before passing to nose
sys.argv = argv_backup[0:1]
config = Config(files=all_config_files(), plugins=DefaultPluginManager())
nose.run(config=config)
sys.argv = argv_backup
destroy_test_db(old_name, verbosity)
teardown_test_environment()
run_tests.__test__ = False
|
import sys
from django.conf import settings
from django.test.utils import setup_test_environment, teardown_test_environment
import nose
from nose.config import Config, all_config_files
from nose.plugins.manager import DefaultPluginManager
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
""" Run tests with nose instead of defualt test runner """
setup_test_environment()
from django.db import connection
old_name = settings.DATABASE_NAME
connection.creation.create_test_db(verbosity, autoclobber=not interactive)
argv_backup = sys.argv
# we have to strip script name before passing to nose
sys.argv = argv_backup[0:1]
config = Config(files=all_config_files(), plugins=DefaultPluginManager())
nose.run(config=config)
sys.argv = argv_backup
connection.creation.destroy_test_db(old_name, verbosity)
teardown_test_environment()
#TODO: return len(result.failures) + len(result.errors)
run_tests.__test__ = False
|
Use database connection instead of old-style functions
|
Use database connection instead of old-style functions
|
Python
|
bsd-3-clause
|
Almad/django-sane-testing
|
---
+++
@@ -1,20 +1,32 @@
+import sys
+
+from django.conf import settings
from django.test.utils import setup_test_environment, teardown_test_environment
-from django.db.backends.creation import create_test_db, destroy_test_db
+
import nose
+from nose.config import Config, all_config_files
+from nose.plugins.manager import DefaultPluginManager
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
""" Run tests with nose instead of defualt test runner """
setup_test_environment()
+
+ from django.db import connection
old_name = settings.DATABASE_NAME
- create_test_db(verbosity, autoclobber=not interactive)
+ connection.creation.create_test_db(verbosity, autoclobber=not interactive)
argv_backup = sys.argv
+
# we have to strip script name before passing to nose
sys.argv = argv_backup[0:1]
config = Config(files=all_config_files(), plugins=DefaultPluginManager())
+
nose.run(config=config)
+
sys.argv = argv_backup
- destroy_test_db(old_name, verbosity)
+ connection.creation.destroy_test_db(old_name, verbosity)
teardown_test_environment()
+
+ #TODO: return len(result.failures) + len(result.errors)
run_tests.__test__ = False
|
e40797a40e1e8f76a48ffeaec2dcdb179b702062
|
microdrop/tests/test_dmf_device.py
|
microdrop/tests/test_dmf_device.py
|
from path import path
from nose.tools import raises
from dmf_device import DmfDevice
from utility import Version
def test_load_dmf_device():
"""
test loading DMF device files
"""
# version 0.2.0 files
for i in [0,1]:
yield load_device, (path(__file__).parent /
path('devices') /
path('device %d v%s' % (i, Version(0,2,0))))
# version 0.3.0 files
for i in [1]:
yield load_device, (path(__file__).parent /
path('devices') /
path('device %d v%s' % (i, Version(0,3,0))))
def load_device(name):
DmfDevice.load(name)
assert True
@raises(IOError)
def test_load_non_existant_dmf_device():
"""
test loading DMF device file that doesn't exist
"""
DmfDevice.load(path(__file__).parent /
path('devices') /
path('no device'))
|
from path import path
from nose.tools import raises
from dmf_device import DmfDevice
from utility import Version
def test_load_dmf_device():
"""
test loading DMF device files
"""
# version 0.2.0 files
for i in [0, 1]:
yield load_device, (path(__file__).parent /
path('devices') /
path('device %d v%s' % (i, Version(0,2,0))))
# version 0.3.0 files
for i in [0, 1]:
yield load_device, (path(__file__).parent /
path('devices') /
path('device %d v%s' % (i, Version(0,3,0))))
def load_device(name):
DmfDevice.load(name)
assert True
@raises(IOError)
def test_load_non_existant_dmf_device():
"""
test loading DMF device file that doesn't exist
"""
DmfDevice.load(path(__file__).parent /
path('devices') /
path('no device'))
|
Add test for device 0 v0.3.0
|
Add test for device 0 v0.3.0
|
Python
|
bsd-3-clause
|
wheeler-microfluidics/microdrop
|
---
+++
@@ -10,13 +10,13 @@
"""
# version 0.2.0 files
- for i in [0,1]:
+ for i in [0, 1]:
yield load_device, (path(__file__).parent /
path('devices') /
path('device %d v%s' % (i, Version(0,2,0))))
# version 0.3.0 files
- for i in [1]:
+ for i in [0, 1]:
yield load_device, (path(__file__).parent /
path('devices') /
path('device %d v%s' % (i, Version(0,3,0))))
|
b92fb486107ef6feb4def07f601e7390d80db565
|
plugins/androidapp.py
|
plugins/androidapp.py
|
"""
paragoo plugin for retrieving card on an Android app
"""
import os
import requests
from bs4 import BeautifulSoup
class AppNotFoundException(Exception):
pass
def render(site_path, params):
"""
Look up the Android app details from its Play Store listing
Format of params: <app_key>
app_key looks like com.linkbubble.license.playstore
"""
app_key = params[0]
url_full = 'https://play.google.com/store/apps/details?id=' + app_key
url = 'https://play.google.com/store/apps/details'
url_params = {'id': app_key }
result = requests.get(url, params=url_params)
if result.status_code != requests.codes.ok:
raise AppNotFoundException(params[0])
else:
soup = BeautifulSoup(result.text, 'html.parser')
# TODO: render a card(?) with the site's androidapp.html template
return '<a href="' + url_full + '">' + soup.title.text.replace(' - Android-apps op Google Play', '') + '</a>'
|
"""
paragoo plugin for retrieving card on an Android app
"""
import os
import requests
from bs4 import BeautifulSoup
class AppNotFoundException(Exception):
pass
def get_app_details(app_key):
url_full = 'https://play.google.com/store/apps/details?id=' + app_key
url = 'https://play.google.com/store/apps/details'
url_params = {'id': app_key }
result = requests.get(url, params=url_params)
if result.status_code != requests.codes.ok:
raise AppNotFoundException(params[0])
else:
soup = BeautifulSoup(result.text, 'html.parser')
return {'title': soup.title.text.replace(' - Android-apps op Google Play', ''), 'url': url_full}
def render(site_path, params):
"""
Look up the Android app details from its Play Store listing
Format of params: <app_key>:optional description
app_key looks like com.linkbubble.license.playstore
"""
app_key = params[0]
details = get_app_details(app_key)
# TODO: render a card(?) with the site's androidapp.html template
return '<a href="' + details['url'] + '">' + details['title'] + '</a>'
|
Split out the app detail lookup into function
|
Split out the app detail lookup into function
|
Python
|
apache-2.0
|
aquatix/paragoo,aquatix/paragoo
|
---
+++
@@ -10,13 +10,7 @@
pass
-def render(site_path, params):
- """
- Look up the Android app details from its Play Store listing
- Format of params: <app_key>
- app_key looks like com.linkbubble.license.playstore
- """
- app_key = params[0]
+def get_app_details(app_key):
url_full = 'https://play.google.com/store/apps/details?id=' + app_key
url = 'https://play.google.com/store/apps/details'
url_params = {'id': app_key }
@@ -25,5 +19,16 @@
raise AppNotFoundException(params[0])
else:
soup = BeautifulSoup(result.text, 'html.parser')
- # TODO: render a card(?) with the site's androidapp.html template
- return '<a href="' + url_full + '">' + soup.title.text.replace(' - Android-apps op Google Play', '') + '</a>'
+ return {'title': soup.title.text.replace(' - Android-apps op Google Play', ''), 'url': url_full}
+
+
+def render(site_path, params):
+ """
+ Look up the Android app details from its Play Store listing
+ Format of params: <app_key>:optional description
+ app_key looks like com.linkbubble.license.playstore
+ """
+ app_key = params[0]
+ details = get_app_details(app_key)
+ # TODO: render a card(?) with the site's androidapp.html template
+ return '<a href="' + details['url'] + '">' + details['title'] + '</a>'
|
5344c97e7486229f9fae40bef2b73488d5aa2ffd
|
uchicagohvz/users/tasks.py
|
uchicagohvz/users/tasks.py
|
from celery import task
from django.conf import settings
from django.core import mail
import smtplib
@task(rate_limit=0.2)
def do_sympa_update(user, listname, subscribe):
if subscribe:
body = "QUIET ADD %s %s %s" % (listname, user.email, user.get_full_name())
else:
body = "QUIET DELETE %s %s" % (listname, user.email)
email = mail.EmailMessage(subject='', body=body, from_email=settings.SYMPA_FROM_EMAIL, to=[settings.SYMPA_TO_EMAIL])
email.send()
@task
def smtp_localhost_send(from_addr, to_addrs, msg):
server = smtplib.SMTP('localhost')
server.sendmail(from_addr, to_addrs, msg)
server.quit()
|
from celery import task
from django.conf import settings
from django.core import mail
import smtplib
@task
def do_sympa_update(user, listname, subscribe):
if subscribe:
body = "QUIET ADD %s %s %s" % (listname, user.email, user.get_full_name())
else:
body = "QUIET DELETE %s %s" % (listname, user.email)
email = mail.EmailMessage(subject='', body=body, from_email=settings.SYMPA_FROM_EMAIL, to=[settings.SYMPA_TO_EMAIL])
email.send()
@task
def smtp_localhost_send(from_addr, to_addrs, msg):
server = smtplib.SMTP('localhost')
server.sendmail(from_addr, to_addrs, msg)
server.quit()
|
Remove rate limit from do_sympa_update
|
Remove rate limit from do_sympa_update
|
Python
|
mit
|
kz26/uchicago-hvz,kz26/uchicago-hvz,kz26/uchicago-hvz
|
---
+++
@@ -5,7 +5,7 @@
import smtplib
-@task(rate_limit=0.2)
+@task
def do_sympa_update(user, listname, subscribe):
if subscribe:
body = "QUIET ADD %s %s %s" % (listname, user.email, user.get_full_name())
|
5c9bc019ea1461a82b9dbdd4b3df5c55be2a8274
|
unihan_db/__about__.py
|
unihan_db/__about__.py
|
__title__ = 'unihan-db'
__package_name__ = 'unihan_db'
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__email__ = 'cihai@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017 Tony Narlock'
|
__title__ = 'unihan-db'
__package_name__ = 'unihan_db'
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/cihai/unihan-db'
__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = 'cihai@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017- cihai software foundation'
|
Update to cihai software foundation, add github and pypi
|
Metadata: Update to cihai software foundation, add github and pypi
|
Python
|
mit
|
cihai/unihan-db
|
---
+++
@@ -3,6 +3,8 @@
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
+__github__ = 'https://github.com/cihai/unihan-db'
+__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = 'cihai@git-pull.com'
__license__ = 'MIT'
-__copyright__ = 'Copyright 2017 Tony Narlock'
+__copyright__ = 'Copyright 2017- cihai software foundation'
|
131129b96995c0055ea0a7e27d7491a833e46566
|
wwwhisper_auth/assets.py
|
wwwhisper_auth/assets.py
|
# wwwhisper - web access control.
# Copyright (C) 2013 Jan Wrobel <jan@mixedbit.org>
import os
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from django.views.decorators.cache import cache_page
from django.views.generic import View
from wwwhisper_auth import http
class Asset:
"""Stores a static file to be returned by requests."""
def __init__(self, prefix, *args):
assert prefix is not None
self.body = file(os.path.join(prefix, *args)).read()
class StaticFileView(View):
""" A view to serve a single static file."""
asset = None
@method_decorator(cache_control(private=True, max_age=60 * 60 * 5))
def get(self, request):
return self.do_get(self.asset.body)
class HtmlFileView(StaticFileView):
def do_get(self, body):
return http.HttpResponseOKHtml(body)
class JsFileView(StaticFileView):
def do_get(self, body):
return http.HttpResponseOKJs(body)
|
# wwwhisper - web access control.
# Copyright (C) 2013-2022 Jan Wrobel <jan@mixedbit.org>
import os
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from django.views.decorators.cache import cache_page
from django.views.generic import View
from wwwhisper_auth import http
class Asset:
"""Stores a static file to be returned by requests."""
def __init__(self, prefix, *args):
assert prefix is not None
self.body = open(os.path.join(prefix, *args)).read()
class StaticFileView(View):
""" A view to serve a single static file."""
asset = None
@method_decorator(cache_control(private=True, max_age=60 * 60 * 5))
def get(self, request):
return self.do_get(self.asset.body)
class HtmlFileView(StaticFileView):
def do_get(self, body):
return http.HttpResponseOKHtml(body)
class JsFileView(StaticFileView):
def do_get(self, body):
return http.HttpResponseOKJs(body)
|
Use 'open' instead of 'file' (no longer available in Python 3).
|
Use 'open' instead of 'file' (no longer available in Python 3).
|
Python
|
mit
|
wrr/wwwhisper,wrr/wwwhisper,wrr/wwwhisper,wrr/wwwhisper
|
---
+++
@@ -1,5 +1,5 @@
# wwwhisper - web access control.
-# Copyright (C) 2013 Jan Wrobel <jan@mixedbit.org>
+# Copyright (C) 2013-2022 Jan Wrobel <jan@mixedbit.org>
import os
@@ -15,7 +15,7 @@
def __init__(self, prefix, *args):
assert prefix is not None
- self.body = file(os.path.join(prefix, *args)).read()
+ self.body = open(os.path.join(prefix, *args)).read()
class StaticFileView(View):
|
68fe680266f705bea2b33e614d7aac2ae13b46a2
|
url_shortener/forms.py
|
url_shortener/forms.py
|
# -*- coding: utf-8 -*-
from flask_wtf import Form
from wtforms import StringField, validators
from .validation import not_spam
class ShortenedUrlForm(Form):
url = StringField(
'Url to be shortened',
[
validators.DataRequired(),
validators.URL(message="A valid url is required"),
not_spam
]
)
|
# -*- coding: utf-8 -*-
from flask_wtf import Form
from wtforms import StringField, validators
from .validation import not_blacklisted_nor_spam
class ShortenedUrlForm(Form):
url = StringField(
'Url to be shortened',
[
validators.DataRequired(),
validators.URL(message="A valid url is required"),
not_blacklisted_nor_spam
]
)
|
Replace not_spam validator with not_blacklisted_nor_spam in form class
|
Replace not_spam validator with not_blacklisted_nor_spam in form class
|
Python
|
mit
|
piotr-rusin/url-shortener,piotr-rusin/url-shortener
|
---
+++
@@ -2,7 +2,7 @@
from flask_wtf import Form
from wtforms import StringField, validators
-from .validation import not_spam
+from .validation import not_blacklisted_nor_spam
class ShortenedUrlForm(Form):
@@ -11,6 +11,6 @@
[
validators.DataRequired(),
validators.URL(message="A valid url is required"),
- not_spam
+ not_blacklisted_nor_spam
]
)
|
1cf354d834fbb81260c88718c57533a546fc9dfa
|
src/robots/actions/attitudes.py
|
src/robots/actions/attitudes.py
|
import logging; logger = logging.getLogger("robot." + __name__)
from robots.exception import RobotError
from robots.actions.look_at import sweep
from robots.action import *
###############################################################################
@action
def sorry(robot, speed = 0.5):
return sweep(robot, 45, speed)
|
import logging; logger = logging.getLogger("robot." + __name__)
import random
from robots.exception import RobotError
from robots.lowlevel import *
from robots.actions.look_at import sweep
from robots.action import *
###############################################################################
@action
@workswith(ALL)
def satisfied(robot):
actions = kb_satisfied()
return actions
@action
@same_requirements_as(sweep)
def sorry(robot, speed = 0.5):
actions = kb_sorry()
actions += sweep(robot, 45, speed)
return actions
def _generate_id():
sequence = "abcdefghijklmopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
sample = random.sample(sequence, 5)
return "".join(sample)
def _send_state(state):
state_id = _generate_id()
statements = [state_id + " rdf:type " + state,
"myself experiences " + state_id]
logger.info("Setting my mood to " + state)
return add_knowledge(statements, lifespan=10)
def kb_confused():
return _send_state("ConfusedState")
def kb_satisfied():
return _send_state("SatisfiedState")
def kb_sorry():
return _send_state("SorryState")
def kb_happy():
return _send_state("HappyState")
def kb_angry():
return _send_state("AngryState")
def kb_sad():
return _send_state("SadState")
|
Update the knowledge base according to the emotion
|
[actions/attitude] Update the knowledge base according to the emotion
|
Python
|
isc
|
chili-epfl/pyrobots,chili-epfl/pyrobots-nao
|
---
+++
@@ -1,7 +1,10 @@
import logging; logger = logging.getLogger("robot." + __name__)
+
+import random
from robots.exception import RobotError
+from robots.lowlevel import *
from robots.actions.look_at import sweep
from robots.action import *
@@ -9,6 +12,52 @@
@action
+@workswith(ALL)
+def satisfied(robot):
+ actions = kb_satisfied()
+ return actions
+
+
+@action
+@same_requirements_as(sweep)
def sorry(robot, speed = 0.5):
- return sweep(robot, 45, speed)
-
+ actions = kb_sorry()
+ actions += sweep(robot, 45, speed)
+ return actions
+
+def _generate_id():
+ sequence = "abcdefghijklmopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ sample = random.sample(sequence, 5)
+ return "".join(sample)
+
+
+def _send_state(state):
+
+
+ state_id = _generate_id()
+ statements = [state_id + " rdf:type " + state,
+ "myself experiences " + state_id]
+
+ logger.info("Setting my mood to " + state)
+ return add_knowledge(statements, lifespan=10)
+
+
+def kb_confused():
+ return _send_state("ConfusedState")
+
+def kb_satisfied():
+ return _send_state("SatisfiedState")
+
+def kb_sorry():
+ return _send_state("SorryState")
+
+def kb_happy():
+ return _send_state("HappyState")
+
+def kb_angry():
+ return _send_state("AngryState")
+
+def kb_sad():
+ return _send_state("SadState")
+
+
|
4a0f4bb837151a28b8c9f495db4f9bd33eb45a77
|
src/python/expedient_geni/backends.py
|
src/python/expedient_geni/backends.py
|
'''
Created on Aug 12, 2010
@author: jnaous
'''
import logging
import re
from django.contrib.auth.backends import RemoteUserBackend
from django.conf import settings
from expedient.common.permissions.shortcuts import give_permission_to
from django.contrib.auth.models import User
logger = logging.getLogger("expedient_geni.backends")
urn_matcher = re.compile(r"(?P<prefix>.*)\+(?P<role>.*)\+(?P<name>.*)")
class GENIRemoteUserBackend(RemoteUserBackend):
"""
Extends the RemoteUserBackend to create GENI users.
"""
create_unknown_user = True
def clean_username(self, username):
logger.debug("Cleaning username %s" % username)
match = urn_matcher.match(username)
if match:
if match.group("prefix") == settings.GCF_URN_PREFIX:
username = match.group("name")
else:
username = match.group("name")+"@"+match.group("prefix")
return username
else:
return username
class MagicWordBackend(object):
"""Authenticates users if the magic word "MagicWord" is given as credentials"""
MAGIC_WORD = "MagicWord"
def authenticate(self, magicword=None, user=None):
if magicword == self.MAGIC_WORD:
return user
else:
return None
|
'''
Created on Aug 12, 2010
@author: jnaous
'''
import logging
import traceback
from django.contrib.auth.backends import RemoteUserBackend
from sfa.trust.gid import GID
from expedient_geni.utils import get_user_urn, urn_to_username
from geni.util.urn_util import URN
logger = logging.getLogger("expedient_geni.backends")
class GENIRemoteUserBackend(RemoteUserBackend):
"""
Extends the RemoteUserBackend to create GENI users.
"""
create_unknown_user = True
def clean_username(self, username):
try:
# The username field should be the full certificate
gid = GID(string=username)
logger.debug("Getting username from %s" % gid.dump)
# extract the URN in the subjectAltName
urn_str = gid.get_urn()
except:
logger.warn("Failed to get certificate from username.")
logger.warn(traceback.format_exc())
return username
try:
urn = URN(urn=str(urn_str))
except ValueError:
return username
# check if this user is one of ours
home_urn = get_user_urn(urn.getName())
if home_urn == urn.urn_string():
username = urn.getName()
else:
username = urn_to_username(urn.urn_string())
return username
class MagicWordBackend(object):
"""Authenticates users if the magic word "MagicWord" is given as credentials"""
MAGIC_WORD = "MagicWord"
def authenticate(self, magicword=None, user=None):
if magicword == self.MAGIC_WORD:
return user
else:
return None
|
Use urn from certificate to create username
|
Use urn from certificate to create username
|
Python
|
bsd-3-clause
|
avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf
|
---
+++
@@ -4,15 +4,13 @@
@author: jnaous
'''
import logging
-import re
+import traceback
from django.contrib.auth.backends import RemoteUserBackend
-from django.conf import settings
-from expedient.common.permissions.shortcuts import give_permission_to
-from django.contrib.auth.models import User
+from sfa.trust.gid import GID
+from expedient_geni.utils import get_user_urn, urn_to_username
+from geni.util.urn_util import URN
logger = logging.getLogger("expedient_geni.backends")
-
-urn_matcher = re.compile(r"(?P<prefix>.*)\+(?P<role>.*)\+(?P<name>.*)")
class GENIRemoteUserBackend(RemoteUserBackend):
"""
@@ -21,17 +19,30 @@
create_unknown_user = True
def clean_username(self, username):
- logger.debug("Cleaning username %s" % username)
+ try:
+ # The username field should be the full certificate
+ gid = GID(string=username)
+ logger.debug("Getting username from %s" % gid.dump)
+
+ # extract the URN in the subjectAltName
+ urn_str = gid.get_urn()
+ except:
+ logger.warn("Failed to get certificate from username.")
+ logger.warn(traceback.format_exc())
+ return username
- match = urn_matcher.match(username)
- if match:
- if match.group("prefix") == settings.GCF_URN_PREFIX:
- username = match.group("name")
- else:
- username = match.group("name")+"@"+match.group("prefix")
+ try:
+ urn = URN(urn=str(urn_str))
+ except ValueError:
return username
+
+ # check if this user is one of ours
+ home_urn = get_user_urn(urn.getName())
+ if home_urn == urn.urn_string():
+ username = urn.getName()
else:
- return username
+ username = urn_to_username(urn.urn_string())
+ return username
class MagicWordBackend(object):
"""Authenticates users if the magic word "MagicWord" is given as credentials"""
|
8c51722bff4460b33a33d0380b75047649119175
|
pyhpeimc/__init__.py
|
pyhpeimc/__init__.py
|
#!/usr/bin/env python
# -*- coding: <encoding-name> -*-
'''
Copyright 2015 Hewlett Packard Enterprise Development LP
Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
“AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
'''
|
#!/usr/bin/env python
# -*- coding: ascii -*-
'''
Copyright 2015 Hewlett Packard Enterprise Development LP
Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
“AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
'''
|
Fix in groups.py for get_custom_views function.
|
Fix in groups.py for get_custom_views function.
|
Python
|
apache-2.0
|
HPNetworking/HP-Intelligent-Management-Center,HPENetworking/PYHPEIMC,netmanchris/PYHPEIMC
|
---
+++
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# -*- coding: <encoding-name> -*-
+# -*- coding: ascii -*-
'''
Copyright 2015 Hewlett Packard Enterprise Development LP
|
cf03026a27f8f7d35430807d2295bf062c4e0ca9
|
master/skia_master_scripts/android_factory.py
|
master/skia_master_scripts/android_factory.py
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility class to build the Skia master BuildFactory's for Android buildbots.
Overrides SkiaFactory with any Android-specific steps."""
from skia_master_scripts import factory as skia_factory
class AndroidFactory(skia_factory.SkiaFactory):
"""Overrides for Android builds."""
def Build(self, clobber=None):
"""Build and return the complete BuildFactory.
clobber: boolean indicating whether we should clean before building
"""
if clobber is None:
clobber = self._default_clobber
if clobber:
self._skia_cmd_obj.AddClean()
self._skia_cmd_obj.AddRun(
run_command='../android/bin/android_make all -d xoom %s' % (
self._make_flags),
description='BuildAll')
return self._factory
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility class to build the Skia master BuildFactory's for Android buildbots.
Overrides SkiaFactory with any Android-specific steps."""
from skia_master_scripts import factory as skia_factory
class AndroidFactory(skia_factory.SkiaFactory):
"""Overrides for Android builds."""
def Build(self, clobber=None):
"""Build and return the complete BuildFactory.
clobber: boolean indicating whether we should clean before building
"""
if clobber is None:
clobber = self._default_clobber
if clobber:
self._skia_cmd_obj.AddClean()
self._skia_cmd_obj.AddRunCommand(
command='../android/bin/android_make all -d nexus_s %s' % (
self._make_flags),
description='BuildAll')
self.PushBinaryToDeviceAndRun(binary_name='tests', description='RunTests')
return self._factory
def PushBinaryToDeviceAndRun(self, binary_name, description, timeout=None):
"""Adds a build step: push a binary file to the USB-connected Android
device and run it.
binary_name: which binary to run on the device
description: text description (e.g., 'RunTests')
timeout: timeout in seconds, or None to use the default timeout
"""
path_to_adb = self.TargetPathJoin('..', 'android', 'bin', 'linux', 'adb')
command_list = [
'%s root' % path_to_adb,
'%s remount' % path_to_adb,
'%s push out/%s/%s /system/bin/%s' % (
path_to_adb, self._configuration, binary_name, binary_name),
'%s logcat -c' % path_to_adb,
'%s shell %s' % (path_to_adb, binary_name),
'%s logcat -d' % path_to_adb,
]
self._skia_cmd_obj.AddRunCommandList(
command_list=command_list, description=description)
|
Add RunTests step for Android buildbots
|
Add RunTests step for Android buildbots
Requires https://codereview.appspot.com/5966078 ('Add AddRunCommandList(), a cleaner way of running multiple shell commands as a single buildbot step') to work.
Review URL: https://codereview.appspot.com/5975072
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@3594 2bbb7eff-a529-9590-31e7-b0007b416f81
|
Python
|
bsd-3-clause
|
google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot
|
---
+++
@@ -21,9 +21,32 @@
if clobber:
self._skia_cmd_obj.AddClean()
- self._skia_cmd_obj.AddRun(
- run_command='../android/bin/android_make all -d xoom %s' % (
+ self._skia_cmd_obj.AddRunCommand(
+ command='../android/bin/android_make all -d nexus_s %s' % (
self._make_flags),
description='BuildAll')
+ self.PushBinaryToDeviceAndRun(binary_name='tests', description='RunTests')
+
return self._factory
+
+ def PushBinaryToDeviceAndRun(self, binary_name, description, timeout=None):
+ """Adds a build step: push a binary file to the USB-connected Android
+ device and run it.
+
+ binary_name: which binary to run on the device
+ description: text description (e.g., 'RunTests')
+ timeout: timeout in seconds, or None to use the default timeout
+ """
+ path_to_adb = self.TargetPathJoin('..', 'android', 'bin', 'linux', 'adb')
+ command_list = [
+ '%s root' % path_to_adb,
+ '%s remount' % path_to_adb,
+ '%s push out/%s/%s /system/bin/%s' % (
+ path_to_adb, self._configuration, binary_name, binary_name),
+ '%s logcat -c' % path_to_adb,
+ '%s shell %s' % (path_to_adb, binary_name),
+ '%s logcat -d' % path_to_adb,
+ ]
+ self._skia_cmd_obj.AddRunCommandList(
+ command_list=command_list, description=description)
|
b3a9027940f854f84cbf8f05af79c1f98a56d349
|
pretix/settings.py
|
pretix/settings.py
|
from pretix.settings import * # noqa
SECRET_KEY = "{{secret_key}}"
LOGGING["handlers"]["mail_admins"]["include_html"] = True # noqa
STATICFILES_STORAGE = (
"django.contrib.staticfiles.storage.ManifestStaticFilesStorage" # noqa
)
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql",
"NAME": "{{database_name}}",
"USER": "{{database_username}}",
"PASSWORD": "{{database_password}}",
"HOST": "{{database_host}}",
"PORT": "5432",
}
}
USE_X_FORWARDED_HOST = True
SITE_URL = "https://tickets.pycon.it"
MAIL_FROM = SERVER_EMAIL = DEFAULT_FROM_EMAIL = "noreply@pycon.it"
EMAIL_HOST = "email-smtp.us-east-1.amazonaws.com"
EMAIL_PORT = 587
EMAIL_HOST_USER = "{{mail_user}}"
EMAIL_HOST_PASSWORD = "{{mail_password}}"
EMAIL_USE_TLS = True
EMAIL_USE_SSL = False
EMAIL_SUBJECT_PREFIX = "[PyCon Tickets] "
|
from pretix.settings import * # noqa
SECRET_KEY = "{{secret_key}}"
LOGGING["handlers"]["mail_admins"]["include_html"] = True # noqa
STATICFILES_STORAGE = (
"django.contrib.staticfiles.storage.ManifestStaticFilesStorage" # noqa
)
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql",
"NAME": "{{database_name}}",
"USER": "{{database_username}}",
"PASSWORD": "{{database_password}}",
"HOST": "{{database_host}}",
"PORT": "5432",
}
}
# Allow all the languages
# see: pretix/settings.py#L425-L435
LANGUAGES = [(k, v) for k, v in ALL_LANGUAGES] # noqa
USE_X_FORWARDED_HOST = True
SITE_URL = "https://tickets.pycon.it"
MAIL_FROM = SERVER_EMAIL = DEFAULT_FROM_EMAIL = "noreply@pycon.it"
EMAIL_HOST = "email-smtp.us-east-1.amazonaws.com"
EMAIL_PORT = 587
EMAIL_HOST_USER = "{{mail_user}}"
EMAIL_HOST_PASSWORD = "{{mail_password}}"
EMAIL_USE_TLS = True
EMAIL_USE_SSL = False
EMAIL_SUBJECT_PREFIX = "[PyCon Tickets] "
|
Allow all languages on pretix
|
Allow all languages on pretix
|
Python
|
mit
|
patrick91/pycon,patrick91/pycon
|
---
+++
@@ -18,6 +18,10 @@
}
}
+# Allow all the languages
+# see: pretix/settings.py#L425-L435
+LANGUAGES = [(k, v) for k, v in ALL_LANGUAGES] # noqa
+
USE_X_FORWARDED_HOST = True
SITE_URL = "https://tickets.pycon.it"
|
d343ba2abc476e1c6a26e273b9262aa5974b8ab5
|
fireplace/rules.py
|
fireplace/rules.py
|
"""
Base game rules (events, etc)
"""
from .actions import Attack, Damage, Destroy, Hit
from .dsl.selector import FRIENDLY_HERO, MINION, SELF
POISONOUS = Damage(MINION, None, SELF).on(Destroy(Damage.TARGETS))
class WeaponRules:
base_events = [
Attack(FRIENDLY_HERO).on(Hit(SELF, 1))
]
|
"""
Base game rules (events, etc)
"""
from .actions import Attack, Damage, Destroy, Hit
from .dsl.selector import FRIENDLY_HERO, MINION, SELF
POISONOUS = Damage(MINION, None, SELF).on(Destroy(Damage.TARGETS))
class WeaponRules:
base_events = [
Attack(FRIENDLY_HERO).after(Hit(SELF, 1))
]
|
Move Weapon durability hits to Attack.after()
|
Move Weapon durability hits to Attack.after()
|
Python
|
agpl-3.0
|
smallnamespace/fireplace,smallnamespace/fireplace,jleclanche/fireplace,amw2104/fireplace,NightKev/fireplace,beheh/fireplace,Ragowit/fireplace,Ragowit/fireplace,amw2104/fireplace
|
---
+++
@@ -10,5 +10,5 @@
class WeaponRules:
base_events = [
- Attack(FRIENDLY_HERO).on(Hit(SELF, 1))
+ Attack(FRIENDLY_HERO).after(Hit(SELF, 1))
]
|
639824dfa86b2aa98b1ae2ca3d4a5cec6ca329ea
|
nbgrader/preprocessors/__init__.py
|
nbgrader/preprocessors/__init__.py
|
from .headerfooter import IncludeHeaderFooter
from .lockcells import LockCells
from .clearsolutions import ClearSolutions
from .findstudentid import FindStudentID
from .saveautogrades import SaveAutoGrades
from .displayautogrades import DisplayAutoGrades
from .computechecksums import ComputeChecksums
from .savecells import SaveCells
from .overwritecells import OverwriteCells
from .checkcellmetadata import CheckCellMetadata
from .execute import Execute
from .getgrades import GetGrades
|
from .headerfooter import IncludeHeaderFooter
from .lockcells import LockCells
from .clearsolutions import ClearSolutions
from .saveautogrades import SaveAutoGrades
from .displayautogrades import DisplayAutoGrades
from .computechecksums import ComputeChecksums
from .savecells import SaveCells
from .overwritecells import OverwriteCells
from .checkcellmetadata import CheckCellMetadata
from .execute import Execute
from .getgrades import GetGrades
|
Remove FindStudentID from preprocessors init
|
Remove FindStudentID from preprocessors init
|
Python
|
bsd-3-clause
|
EdwardJKim/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,alope107/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,jdfreder/nbgrader,jdfreder/nbgrader,ellisonbg/nbgrader,alope107/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,modulexcite/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,dementrock/nbgrader
|
---
+++
@@ -1,7 +1,6 @@
from .headerfooter import IncludeHeaderFooter
from .lockcells import LockCells
from .clearsolutions import ClearSolutions
-from .findstudentid import FindStudentID
from .saveautogrades import SaveAutoGrades
from .displayautogrades import DisplayAutoGrades
from .computechecksums import ComputeChecksums
|
83080df101aca13b9b044996a013794c94ab82ed
|
pronto/parsers/obo.py
|
pronto/parsers/obo.py
|
import os
import fastobo
from .base import BaseParser
from ._fastobo import FastoboParser
class OboParser(FastoboParser, BaseParser):
@classmethod
def can_parse(cls, path, buffer):
return buffer.lstrip().startswith((b"format-version:", b"[Term", b"[Typedef"))
def parse_from(self, handle):
# Load the OBO document through an iterator using fastobo
doc = fastobo.iter(handle)
# Extract metadata from the OBO header and resolve imports
self.ont.metadata = self.extract_metadata(doc.header())
self.ont.imports.update(
self.process_imports(
self.ont.metadata.imports,
self.ont.import_depth,
os.path.dirname(self.ont.path or str()),
self.ont.timeout,
)
)
# Extract frames from the current document.
try:
for frame in doc:
if isinstance(frame, fastobo.term.TermFrame):
self.enrich_term(frame)
elif isinstance(frame, fastobo.typedef.TypedefFrame):
self.enrich_relationship(frame)
except SyntaxError as s:
location = self.ont.path, s.lineno, s.offset, s.text
raise SyntaxError(s.args[0], location) from None
|
import os
import fastobo
from .base import BaseParser
from ._fastobo import FastoboParser
class OboParser(FastoboParser, BaseParser):
@classmethod
def can_parse(cls, path, buffer):
return buffer.lstrip().startswith((b"format-version:", b"[Term", b"[Typedef"))
def parse_from(self, handle):
# Load the OBO document through an iterator using fastobo
doc = fastobo.iter(handle, ordered=True)
# Extract metadata from the OBO header and resolve imports
self.ont.metadata = self.extract_metadata(doc.header())
self.ont.imports.update(
self.process_imports(
self.ont.metadata.imports,
self.ont.import_depth,
os.path.dirname(self.ont.path or str()),
self.ont.timeout,
)
)
# Extract frames from the current document.
try:
for frame in doc:
if isinstance(frame, fastobo.term.TermFrame):
self.enrich_term(frame)
elif isinstance(frame, fastobo.typedef.TypedefFrame):
self.enrich_relationship(frame)
except SyntaxError as s:
location = self.ont.path, s.lineno, s.offset, s.text
raise SyntaxError(s.args[0], location) from None
|
Make sure to parse OBO documents in order
|
Make sure to parse OBO documents in order
|
Python
|
mit
|
althonos/pronto
|
---
+++
@@ -13,7 +13,7 @@
def parse_from(self, handle):
# Load the OBO document through an iterator using fastobo
- doc = fastobo.iter(handle)
+ doc = fastobo.iter(handle, ordered=True)
# Extract metadata from the OBO header and resolve imports
self.ont.metadata = self.extract_metadata(doc.header())
|
44e062dd5f302c5eed66e2d54858e1b8f78b745b
|
src/data.py
|
src/data.py
|
import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
@property
def neighborhood(self):
return self['NEIGHBORHOOD']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
|
import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def application_type(self):
return self['APPLICATION TYPE']
@property
def account_number(self):
return self['ACCOUNT NUMBER']
@property
def site_number(self):
return self['SITE NUMBER']
@property
def neighborhood(self):
return self['NEIGHBORHOOD']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
|
Add site number and application type to properties. For better filtering of new and old biz.
|
Add site number and application type to properties. For better filtering of new and old biz.
|
Python
|
unlicense
|
datascopeanalytics/chicago-new-business,datascopeanalytics/chicago-new-business
|
---
+++
@@ -34,8 +34,16 @@
)
@property
+ def application_type(self):
+ return self['APPLICATION TYPE']
+
+ @property
def account_number(self):
return self['ACCOUNT NUMBER']
+
+ @property
+ def site_number(self):
+ return self['SITE NUMBER']
@property
def neighborhood(self):
|
fec974d5eceed68fdfc2b30e4c4a0f78dfbb8808
|
messagebird/base.py
|
messagebird/base.py
|
from datetime import datetime
class Base(object):
def load(self, data):
for name, value in data.items():
if hasattr(self, name):
setattr(self, name, value)
return self
def value_to_time(self, value):
if value != None:
return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S+00:00')
|
from datetime import datetime
class Base(object):
def load(self, data):
for name, value in list(data.items()):
if hasattr(self, name):
setattr(self, name, value)
return self
def value_to_time(self, value):
if value != None:
return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S+00:00')
|
Update dict.items() for Python 3 compatibility
|
Update dict.items() for Python 3 compatibility
In Python 3 `items()` return iterators, and a list is never fully
build. The `items()` method in Python 3 works like `viewitems()` in
Python 2.7.
For more information see:
https://docs.python.org/3/whatsnew/3.0.html#views-and-iterators-instead-of-lists
|
Python
|
bsd-2-clause
|
messagebird/python-rest-api
|
---
+++
@@ -2,7 +2,7 @@
class Base(object):
def load(self, data):
- for name, value in data.items():
+ for name, value in list(data.items()):
if hasattr(self, name):
setattr(self, name, value)
|
e7a09ad3e3d57291aa509cd45b8d3ae7a4cadaf8
|
scripts/delete_couchdb_collection.py
|
scripts/delete_couchdb_collection.py
|
import sys
import argparse
import os
from harvester.couchdb_init import get_couchdb
import couchdb
from harvester.couchdb_sync_db_by_collection import delete_collection
def confirm_deletion(cid):
prompt = "Are you sure you want to delete all couchdb " + \
"documents for %s? yes to confirm\n" % cid
while True:
ans = raw_input(prompt).lower()
if ans == "yes":
return True
else:
return False
if __name__=='__main__':
parser = argparse.ArgumentParser(
description='Delete all documents in given collection')
parser.add_argument('collection_id',
help='Registry id for the collection')
parser.add_argument('--yes', action='store_true',
help="Don't prompt for deletion, just do it")
args = parser.parse_args(sys.argv[1:])
if args.yes or confirm_deletion(args.collection_id):
print 'DELETING COLLECTION {}'.format(args.collection_id)
num, deleted_ids = delete_collection(args.collection_id)
print "DELETED {} DOCS".format(num)
else:
print "Exiting without deleting"
|
#! /bin/env python
import sys
import argparse
import os
from harvester.couchdb_init import get_couchdb
import couchdb
from harvester.couchdb_sync_db_by_collection import delete_collection
def confirm_deletion(cid):
prompt = "Are you sure you want to delete all couchdb " + \
"documents for %s? yes to confirm\n" % cid
while True:
ans = raw_input(prompt).lower()
if ans == "yes":
return True
else:
return False
if __name__=='__main__':
parser = argparse.ArgumentParser(
description='Delete all documents in given collection')
parser.add_argument('collection_id',
help='Registry id for the collection')
parser.add_argument('--yes', action='store_true',
help="Don't prompt for deletion, just do it")
args = parser.parse_args(sys.argv[1:])
if args.yes or confirm_deletion(args.collection_id):
print 'DELETING COLLECTION {}'.format(args.collection_id)
num, deleted_ids = delete_collection(args.collection_id)
print "DELETED {} DOCS".format(num)
else:
print "Exiting without deleting"
|
Make it runnable directly from cli, no python in front
|
Make it runnable directly from cli, no python in front
|
Python
|
bsd-3-clause
|
mredar/harvester,mredar/harvester,ucldc/harvester,barbarahui/harvester,ucldc/harvester,barbarahui/harvester
|
---
+++
@@ -1,3 +1,4 @@
+#! /bin/env python
import sys
import argparse
import os
|
733d48510c4d6d8f4b9f07b6e33075cc20d1720a
|
gewebehaken/app.py
|
gewebehaken/app.py
|
# -*- coding: utf-8 -*-
"""
Gewebehaken
~~~~~~~~~~~
The WSGI application
:Copyright: 2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:License: MIT, see LICENSE for details.
"""
import logging
from logging import FileHandler, Formatter
from flask import Flask
from .hooks.twitter import blueprint as twitter_blueprint
def create_app(log_filename=None):
"""Create the actual application."""
app = Flask(__name__)
if log_filename:
configure_logging(app, log_filename)
app.register_blueprint(twitter_blueprint)
return app
def configure_logging(app, filename):
"""Configure app to log to that file."""
handler = FileHandler(filename, encoding='utf-8')
handler.setFormatter(Formatter('%(asctime)s %(funcName)s %(message)s'))
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
|
# -*- coding: utf-8 -*-
"""
Gewebehaken
~~~~~~~~~~~
The WSGI application
:Copyright: 2015 `Jochen Kupperschmidt <http://homework.nwsnet.de/>`_
:License: MIT, see LICENSE for details.
"""
import logging
from logging import FileHandler, Formatter
from flask import Flask
from .hooks.twitter import blueprint as twitter_blueprint
def create_app(log_filename=None):
"""Create the actual application."""
app = Flask(__name__, static_folder=None)
if log_filename:
configure_logging(app, log_filename)
app.register_blueprint(twitter_blueprint)
return app
def configure_logging(app, filename):
"""Configure app to log to that file."""
handler = FileHandler(filename, encoding='utf-8')
handler.setFormatter(Formatter('%(asctime)s %(funcName)s %(message)s'))
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
|
Remove default route for serving static files from URL map.
|
Remove default route for serving static files from URL map.
|
Python
|
mit
|
homeworkprod/gewebehaken
|
---
+++
@@ -20,7 +20,7 @@
def create_app(log_filename=None):
"""Create the actual application."""
- app = Flask(__name__)
+ app = Flask(__name__, static_folder=None)
if log_filename:
configure_logging(app, log_filename)
|
75db5105d609a2b28f19ee675de866425e2c5c3e
|
salt/modules/cp.py
|
salt/modules/cp.py
|
'''
Minion side functions for salt-cp
'''
import os
def recv(files, dest):
'''
Used with salt-cp, pass the files dict, and the destination
'''
ret = {}
for path, data in files.items():
final = ''
if os.path.basename(path) == os.path.basename(dest)\
and not os.path.isdir(dest):
final = dest
elif os.path.isdir(dest):
final = os.path.join(dest, os.path.basename(path))
elif os.path.isdir(os.path.dirname(dest)):
final = dest
else:
return 'Destination not available'
try:
open(final, 'w+').write(data)
ret[final] = True
except IOError:
ret[final] = False
return ret
|
'''
Minion side functions for salt-cp
'''
# Import python libs
import os
# Import salt libs
import salt.simpleauth
def recv(files, dest):
'''
Used with salt-cp, pass the files dict, and the destination.
This function recieves small fast copy files from the master via salt-cp
'''
ret = {}
for path, data in files.items():
final = ''
if os.path.basename(path) == os.path.basename(dest)\
and not os.path.isdir(dest):
final = dest
elif os.path.isdir(dest):
final = os.path.join(dest, os.path.basename(path))
elif os.path.isdir(os.path.dirname(dest)):
final = dest
else:
return 'Destination not available'
try:
open(final, 'w+').write(data)
ret[final] = True
except IOError:
ret[final] = False
return ret
#def get_file(path, dest):
# '''
# Used to get a single file from the salt master
# '''
# auth = salt.simpleauth.SAuth(__opts__)
def get_files(paths):
'''
Used to gather many files from the master, the gathered files will be
saved in the minion cachedir reflective to the paths retrived from the
master.
'''
auth = salt.simpleauth.SAuth(__opts__)
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.connect(__opts__['master_uri'])
payload = {'enc': 'aes'}
for path in paths:
dest = os.path.join(__opts__['cachedir'], 'files', path)
dirname = os.path.dirname(dest)
if not os.path.isdir(dirname):
os.makedirs(dirname)
fn_ = open(dest, 'w+')
load = {'path': path,
'cmd': '_serve_file'}
while True:
load['loc'] = fn_.tell()
payload['load'] = self.crypticle.dumps(load)
socket.send_pyobj(payload)
data = auth.crypticle.loads(socket.recv())
if not data:
break
fn_.write(data)
|
Add in the minion module function to download files from the master
|
Add in the minion module function to download files from the master
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
---
+++
@@ -1,11 +1,17 @@
'''
Minion side functions for salt-cp
'''
+# Import python libs
import os
+
+# Import salt libs
+import salt.simpleauth
def recv(files, dest):
'''
- Used with salt-cp, pass the files dict, and the destination
+ Used with salt-cp, pass the files dict, and the destination.
+
+ This function recieves small fast copy files from the master via salt-cp
'''
ret = {}
for path, data in files.items():
@@ -27,3 +33,38 @@
ret[final] = False
return ret
+
+#def get_file(path, dest):
+# '''
+# Used to get a single file from the salt master
+# '''
+# auth = salt.simpleauth.SAuth(__opts__)
+
+
+def get_files(paths):
+ '''
+ Used to gather many files from the master, the gathered files will be
+ saved in the minion cachedir reflective to the paths retrived from the
+ master.
+ '''
+ auth = salt.simpleauth.SAuth(__opts__)
+ context = zmq.Context()
+ socket = context.socket(zmq.REQ)
+ socket.connect(__opts__['master_uri'])
+ payload = {'enc': 'aes'}
+ for path in paths:
+ dest = os.path.join(__opts__['cachedir'], 'files', path)
+ dirname = os.path.dirname(dest)
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+ fn_ = open(dest, 'w+')
+ load = {'path': path,
+ 'cmd': '_serve_file'}
+ while True:
+ load['loc'] = fn_.tell()
+ payload['load'] = self.crypticle.dumps(load)
+ socket.send_pyobj(payload)
+ data = auth.crypticle.loads(socket.recv())
+ if not data:
+ break
+ fn_.write(data)
|
1f3eb1c526171b0ee8d2cab05e182c067bfb6c2e
|
tests/unit/modules/defaults_test.py
|
tests/unit/modules/defaults_test.py
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import inspect
# Import Salt Libs
from salt.modules import defaults
# Globals
defaults.__grains__ = {}
defaults.__salt__ = {}
defaults.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class DefaultsTestCase(TestCase):
'''
Test cases for salt.modules.defaults
'''
# 'get' function tests: 1
def test_get(self):
'''
Test if it execute a defaults client run and return a dict
'''
mock = MagicMock(return_value='')
with patch.dict(defaults.__salt__, {'pillar.get': mock}):
self.assertEqual(defaults.get('core:users:root'), '')
@patch('salt.modules.defaults.get',
MagicMock(return_value={'users': {'root': [0]}}))
def test_get_mock(self):
'''
Test if it execute a defaults client run and return a dict
'''
with patch.object(inspect, 'stack', MagicMock(return_value=[])):
self.assertEqual(defaults.get('core:users:root'),
{'users': {'root': [0]}})
if __name__ == '__main__':
from integration import run_tests
run_tests(DefaultsTestCase, needs_daemon=False)
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import inspect
# Import Salt Libs
from salt.modules import defaults
# Globals
defaults.__grains__ = {}
defaults.__salt__ = {}
defaults.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class DefaultsTestCase(TestCase):
'''
Test cases for salt.modules.defaults
'''
@patch('salt.modules.defaults.get',
MagicMock(return_value={'users': {'root': [0]}}))
def test_get_mock(self):
'''
Test if it execute a defaults client run and return a dict
'''
with patch.object(inspect, 'stack', MagicMock(return_value=[])):
self.assertEqual(defaults.get('core:users:root'),
{'users': {'root': [0]}})
if __name__ == '__main__':
from integration import run_tests
run_tests(DefaultsTestCase, needs_daemon=False)
|
Remove useless mocked unit test
|
Remove useless mocked unit test
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
---
+++
@@ -30,16 +30,6 @@
'''
Test cases for salt.modules.defaults
'''
- # 'get' function tests: 1
-
- def test_get(self):
- '''
- Test if it execute a defaults client run and return a dict
- '''
- mock = MagicMock(return_value='')
- with patch.dict(defaults.__salt__, {'pillar.get': mock}):
- self.assertEqual(defaults.get('core:users:root'), '')
-
@patch('salt.modules.defaults.get',
MagicMock(return_value={'users': {'root': [0]}}))
def test_get_mock(self):
|
794a233a70ac8cdd4fc0812bd651757b35e605f2
|
tests/unit/utils/test_sanitizers.py
|
tests/unit/utils/test_sanitizers.py
|
# -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
from salt.ext.six import text_type as text
# Import Salt Libs
from salt.utils.sanitizers import clean
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SanitizersTestCase(TestCase):
'''
TestCase for sanitizers
'''
def test_sanitized_trim(self):
'''
Test sanitized input for trimming
'''
value = ' sample '
response = clean.trim(value)
assert response == 'sample'
assert type(response) == text
def test_sanitized_filename(self):
'''
Test sanitized input for filename
'''
value = '/absolute/path/to/the/file.txt'
response = clean.filename(value)
assert response == 'file.txt'
value = '../relative/path/to/the/file.txt'
response = clean.filename(value)
assert response == 'file.txt'
def test_sanitized_hostname(self):
'''
Test sanitized input for hostname (id)
'''
value = ' ../ ../some/dubious/hostname '
response = clean.hostname(value)
assert response == 'somedubioushostname'
test_sanitized_id = test_sanitized_hostname
|
# -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
from salt.ext.six import text_type as text
# Import Salt Libs
from salt.utils.sanitizers import clean, mask_args_value
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SanitizersTestCase(TestCase):
'''
TestCase for sanitizers
'''
def test_sanitized_trim(self):
'''
Test sanitized input for trimming
'''
value = ' sample '
response = clean.trim(value)
assert response == 'sample'
assert type(response) == text
def test_sanitized_filename(self):
'''
Test sanitized input for filename
'''
value = '/absolute/path/to/the/file.txt'
response = clean.filename(value)
assert response == 'file.txt'
value = '../relative/path/to/the/file.txt'
response = clean.filename(value)
assert response == 'file.txt'
def test_sanitized_hostname(self):
'''
Test sanitized input for hostname (id)
'''
value = ' ../ ../some/dubious/hostname '
response = clean.hostname(value)
assert response == 'somedubioushostname'
test_sanitized_id = test_sanitized_hostname
def test_value_masked(self):
'''
Test if the values are masked.
:return:
'''
out = mask_args_value('quantum: fluctuations', 'quant*')
assert out == 'quantum: ** hidden **'
|
Add unit test for masking key:value of YAML
|
Add unit test for masking key:value of YAML
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
---
+++
@@ -5,7 +5,7 @@
from salt.ext.six import text_type as text
# Import Salt Libs
-from salt.utils.sanitizers import clean
+from salt.utils.sanitizers import clean, mask_args_value
# Import Salt Testing Libs
from tests.support.unit import TestCase, skipIf
@@ -47,3 +47,11 @@
assert response == 'somedubioushostname'
test_sanitized_id = test_sanitized_hostname
+
+ def test_value_masked(self):
+ '''
+ Test if the values are masked.
+ :return:
+ '''
+ out = mask_args_value('quantum: fluctuations', 'quant*')
+ assert out == 'quantum: ** hidden **'
|
b222fbbbcca019a1849e70cc46b1527fa5fe2082
|
database.py
|
database.py
|
from redis import StrictRedis
class QuizDB(StrictRedis):
def get_all_quizzes(self):
return self.smembers('quiz')
|
from redis import StrictRedis
class QuizDB(StrictRedis):
def get_all_quizzes(self):
return self.smembers('quiz')
def get_question(self, quizid, questionid):
return self.hget("{0}:question".format(quizid), questionid)
|
Add function to get a question
|
Add function to get a question
|
Python
|
bsd-2-clause
|
estreeper/quizalicious,estreeper/quizalicious,estreeper/quizalicious
|
---
+++
@@ -3,3 +3,7 @@
class QuizDB(StrictRedis):
def get_all_quizzes(self):
return self.smembers('quiz')
+
+ def get_question(self, quizid, questionid):
+ return self.hget("{0}:question".format(quizid), questionid)
+
|
bf9addce584961e30456c74b767afe05ca5dbb71
|
tests/test_it.py
|
tests/test_it.py
|
import requests
def test_notifications_admin_index():
# response = requests.request("GET", "http://localhost:6012")
response = requests.request("GET", "http://notifications-admin.herokuapp.com/")
assert response.status_code == 200
assert 'GOV.UK Notify' in response.content
|
import requests
def test_notifications_admin_index():
# response = requests.request("GET", "http://localhost:6012")
response = requests.request("GET", "http://notifications-admin.herokuapp.com/")
assert response.status_code == 200
assert 'GOV.UK Notify' in str(response.content)
|
Convert bytes to str for assertion
|
Convert bytes to str for assertion
|
Python
|
mit
|
alphagov/notifications-functional-tests,alphagov/notifications-functional-tests
|
---
+++
@@ -5,4 +5,4 @@
# response = requests.request("GET", "http://localhost:6012")
response = requests.request("GET", "http://notifications-admin.herokuapp.com/")
assert response.status_code == 200
- assert 'GOV.UK Notify' in response.content
+ assert 'GOV.UK Notify' in str(response.content)
|
26d7b8a1e0fef6b32b5705634fe40504a6aa258d
|
tests/test_elsewhere_twitter.py
|
tests/test_elsewhere_twitter.py
|
from __future__ import print_function, unicode_literals
from gittip.elsewhere import twitter
from gittip.testing import Harness
class TestElsewhereTwitter(Harness):
def test_get_user_info_gets_user_info(self):
twitter.TwitterAccount(self.db, "1", {'screen_name': 'alice'}).opt_in('alice')
expected = {"screen_name": "alice"}
actual = twitter.get_user_info(self.db, 'alice')
assert actual == expected
|
from __future__ import print_function, unicode_literals
from gittip.elsewhere import twitter
from gittip.testing import Harness
class TestElsewhereTwitter(Harness):
def test_get_user_info_gets_user_info(self):
twitter.TwitterAccount(self.db, "1", {'screen_name': 'alice'}).opt_in('alice')
expected = {"screen_name": "alice"}
actual = twitter.get_user_info(self.db, 'alice')
assert actual == expected
def test_get_user_info_gets_user_info_long(self):
twitter.TwitterAccount(self.db, 2147483648, {'screen_name': 'alice'}).opt_in('alice')
expected = {"screen_name": "alice"}
actual = twitter.get_user_info(self.db, 'alice')
assert actual == expected
|
Add a test for Twitter accounts with long identifier.
|
Add a test for Twitter accounts with long identifier.
|
Python
|
mit
|
gratipay/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com,studio666/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com
|
---
+++
@@ -11,3 +11,9 @@
expected = {"screen_name": "alice"}
actual = twitter.get_user_info(self.db, 'alice')
assert actual == expected
+
+ def test_get_user_info_gets_user_info_long(self):
+ twitter.TwitterAccount(self.db, 2147483648, {'screen_name': 'alice'}).opt_in('alice')
+ expected = {"screen_name": "alice"}
+ actual = twitter.get_user_info(self.db, 'alice')
+ assert actual == expected
|
a6bed0c1de2fc437d3ad84f0b22d27d4706eb5ab
|
presentations/urls.py
|
presentations/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'presentations.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'presentations.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('presentationsapp.urls')),
)
|
Add URL routing to app
|
Add URL routing to app
|
Python
|
mit
|
masonsbro/presentations
|
---
+++
@@ -9,4 +9,5 @@
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
+ url(r'^', include('presentationsapp.urls')),
)
|
dd2d5e96672fc7870434f030ca63f6d7111642f9
|
resources/launchers/alfanousDesktop.py
|
resources/launchers/alfanousDesktop.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import alfanousDesktop.Gui
alfanousDesktop.Gui.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
# The paths should be generated by setup script
sys.argv.extend(
'-i', '/usr/share/alfanous-indexes/',
'-l', '/usr/locale/',
'-c', '/usr/share/alfanous-config/')
from alfanousDesktop.Gui import *
main()
|
Add resource paths to python launcher script (proxy)
|
Add resource paths to python launcher script (proxy)
Former-commit-id: 7d20874c43637f1236442333f60a88ec653f53f2
|
Python
|
agpl-3.0
|
muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous
|
---
+++
@@ -1,6 +1,14 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-import alfanousDesktop.Gui
+import sys
-alfanousDesktop.Gui.main()
+# The paths should be generated by setup script
+sys.argv.extend(
+ '-i', '/usr/share/alfanous-indexes/',
+ '-l', '/usr/locale/',
+ '-c', '/usr/share/alfanous-config/')
+
+from alfanousDesktop.Gui import *
+
+main()
|
ed8139a505a93c3a99fbb147817cc5695aa0ffc7
|
service/settings/local.py
|
service/settings/local.py
|
import os
from service.settings.production import *
DEBUG = { 0: False, 1: True }[int(os.getenv('DEBUG'))]
if DEBUG:
MIDDLEWARE += [
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
INSTALLED_APPS += [
'debug_toolbar',
]
INTERNAL_IPS = (
'127.0.0.1',
# Docker IPs
# '172.20.0.1',
# '172.20.0.5',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TOOLBAR_CALLBACK': 'service.settings.local.show_toolbar',
}
def is_running_in_docker(*args):
import subprocess
return 'docker' in subprocess.getoutput('cat /proc/1/cgroup')
def show_toolbar(request):
if request.is_ajax():
return False
return True
|
import os
from service.settings.production import *
DEBUG = { 0: False, 1: True }[int(os.getenv('DEBUG'))]
# SSL/HTTPS Security
## Set SECURE_SSL_REDIRECT to True, so that requests over HTTP are redirected to HTTPS.
SECURE_PROXY_SSL_HEADER = None
SECURE_SSL_REDIRECT = False
## Use ‘secure’ cookies.
SESSION_COOKIE_SECURE = False
CSRF_COOKIE_SECURE = False
## Use HTTP Strict Transport Security (HSTS)
SECURE_HSTS_SECONDS = 0
SECURE_HSTS_INCLUDE_SUBDOMAINS = False
if DEBUG:
MIDDLEWARE += [
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
INSTALLED_APPS += [
'debug_toolbar',
]
INTERNAL_IPS = (
'127.0.0.1',
# Docker IPs
# '172.20.0.1',
# '172.20.0.5',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TOOLBAR_CALLBACK': 'service.settings.local.show_toolbar',
}
def is_running_in_docker(*args):
import subprocess
return 'docker' in subprocess.getoutput('cat /proc/1/cgroup')
def show_toolbar(request):
if request.is_ajax():
return False
return True
|
Disable SSL/HTTPS (reverts to default values)
|
Disable SSL/HTTPS (reverts to default values)
Needing to explicitly set something to it’s default value perhaps isn’t ideal.
|
Python
|
unlicense
|
Mystopia/fantastic-doodle
|
---
+++
@@ -4,6 +4,19 @@
DEBUG = { 0: False, 1: True }[int(os.getenv('DEBUG'))]
+# SSL/HTTPS Security
+
+## Set SECURE_SSL_REDIRECT to True, so that requests over HTTP are redirected to HTTPS.
+SECURE_PROXY_SSL_HEADER = None
+SECURE_SSL_REDIRECT = False
+
+## Use ‘secure’ cookies.
+SESSION_COOKIE_SECURE = False
+CSRF_COOKIE_SECURE = False
+
+## Use HTTP Strict Transport Security (HSTS)
+SECURE_HSTS_SECONDS = 0
+SECURE_HSTS_INCLUDE_SUBDOMAINS = False
if DEBUG:
MIDDLEWARE += [
|
7cf3741070cba4d4e0016a7175158ec5993fd7f2
|
klein/__init__.py
|
klein/__init__.py
|
from functools import wraps
from twisted.internet import reactor
from twisted.web.server import Site
from klein.decorators import expose
from klein.resource import KleinResource
routes = {}
def route(r):
def deco(f):
# Swallow self.
# XXX hilariously, staticmethod would be *great* here.
@wraps(f)
def inner(self, *args, **kwargs):
return f(*args, **kwargs)
routes[f.__name__] = expose(r)(inner)
return deco
def run(host=None, port=8080):
# Invoke the metaclass directly.
runner = KleinResource.__metaclass__("runner", (KleinResource,), routes)
site = Site(runner())
reactor.listenTCP(port, site, interface=host)
reactor.run()
|
Add a couple things for Bottle-like behavior.
|
Add a couple things for Bottle-like behavior.
|
Python
|
mit
|
macmania/klein,brighid/klein,hawkowl/klein,macmania/klein,joac/klein,alex/klein,joac/klein,brighid/klein
|
---
+++
@@ -0,0 +1,26 @@
+from functools import wraps
+
+from twisted.internet import reactor
+from twisted.web.server import Site
+
+from klein.decorators import expose
+from klein.resource import KleinResource
+
+routes = {}
+
+def route(r):
+ def deco(f):
+ # Swallow self.
+ # XXX hilariously, staticmethod would be *great* here.
+ @wraps(f)
+ def inner(self, *args, **kwargs):
+ return f(*args, **kwargs)
+ routes[f.__name__] = expose(r)(inner)
+ return deco
+
+def run(host=None, port=8080):
+ # Invoke the metaclass directly.
+ runner = KleinResource.__metaclass__("runner", (KleinResource,), routes)
+ site = Site(runner())
+ reactor.listenTCP(port, site, interface=host)
+ reactor.run()
|
|
7669e6c65d46615c8e52e53dba5a1b4812e34a02
|
soccerstats/api.py
|
soccerstats/api.py
|
"""
Blueprint implementing the API wrapper.
:author: 2013, Pascal Hartig <phartig@weluse.de>
:license: BSD
"""
import json
from flask import Blueprint, request, abort
from .utils import JSONError
from .calc import calculate_scores
api = Blueprint('api', __name__, url_prefix='/v1')
class ScoresResponse(object):
def __init__(self, scores, errors):
self.scores = scores
self.errors = errors
def to_json(self):
return {'scores': self.scores, 'errors': list(self.errors)}
@api.route('/score', methods=['POST'])
def score():
"""Calculate the score for a given result set."""
try:
results = json.loads(request.data)['results']
except (ValueError, KeyError):
abort(400)
if not isinstance(results, list):
abort(400)
try:
results = calculate_scores(results)
except Exception as err:
return JSONError('CALCULATION_ERROR', code=-1, message=str(err))\
.to_error()
return ScoresResponse(*results)
|
"""
Blueprint implementing the API wrapper.
:author: 2013, Pascal Hartig <phartig@weluse.de>
:license: BSD
"""
import json
from flask import Blueprint, request, abort
from .utils import JSONError
from .calc import calculate_scores
api = Blueprint('api', __name__, url_prefix='/v1')
class ScoresResponse(object):
def __init__(self, scores, errors):
self.scores = scores
self.errors = errors
@property
def sorted_scores(self):
# Sort by descending by value
return dict(sorted(self.scores.items(), key=lambda x: -x[1]))
def to_json(self):
return {'scores': self.sorted_scores, 'errors': list(self.errors)}
@api.route('/score', methods=['POST'])
def score():
"""Calculate the score for a given result set."""
try:
results = json.loads(request.data)['results']
except (ValueError, KeyError):
abort(400)
if not isinstance(results, list):
abort(400)
try:
results = calculate_scores(results)
except Exception as err:
return JSONError('CALCULATION_ERROR', code=-1, message=str(err))\
.to_error()
return ScoresResponse(*results)
|
Return sorted values for scores
|
Return sorted values for scores
|
Python
|
bsd-3-clause
|
passy/soccer-stats-backend
|
---
+++
@@ -19,8 +19,13 @@
self.scores = scores
self.errors = errors
+ @property
+ def sorted_scores(self):
+ # Sort by descending by value
+ return dict(sorted(self.scores.items(), key=lambda x: -x[1]))
+
def to_json(self):
- return {'scores': self.scores, 'errors': list(self.errors)}
+ return {'scores': self.sorted_scores, 'errors': list(self.errors)}
@api.route('/score', methods=['POST'])
|
3eaf0ea514b0f78906af7e614079f3a90624bcc7
|
estimate.py
|
estimate.py
|
#!/usr/bin/python3
from sys import stdin
def estimateConf(conf):
"""Estimate configuration from a string."""
confElements = [int(x) for x in conf.split(sep=" ")]
disk = confElements[0]
print(disk)
procRates = confElements[1:]
print(procRates)
def estimateConfsFromInput():
"""Parse and estimate configurations from stdin."""
for line in stdin:
confs = line.splitlines()
for conf in confs:
estimateConf(conf)
if __name__ == "__main__":
estimateConfsFromInput()
|
#!/usr/bin/python3
from sys import stdin
def calcExhaustion(disk, procRates):
"""Calculate how many seconds before the disk is filled.
procRates lists the rates at which each process fills 1 byte of disk
space."""
print(disk)
print(procRates)
def estimateConf(conf):
"""Estimate configuration from a string."""
confElements = [int(x) for x in conf.split(sep=" ")]
disk = confElements[0]
procRates = confElements[1:]
eta = calcExhaustion(disk, procRates);
def estimateConfsFromInput():
"""Parse and estimate configurations from stdin."""
for line in stdin:
confs = line.splitlines()
for conf in confs:
estimateConf(conf)
if __name__ == "__main__":
estimateConfsFromInput()
|
Create fn for calculating exhaustion
|
Create fn for calculating exhaustion
|
Python
|
mit
|
MattHeard/EstimateDiskExhaustion
|
---
+++
@@ -2,14 +2,20 @@
from sys import stdin
+def calcExhaustion(disk, procRates):
+ """Calculate how many seconds before the disk is filled.
+
+ procRates lists the rates at which each process fills 1 byte of disk
+ space."""
+ print(disk)
+ print(procRates)
def estimateConf(conf):
"""Estimate configuration from a string."""
confElements = [int(x) for x in conf.split(sep=" ")]
disk = confElements[0]
- print(disk)
procRates = confElements[1:]
- print(procRates)
+ eta = calcExhaustion(disk, procRates);
def estimateConfsFromInput():
"""Parse and estimate configurations from stdin."""
|
93ac186e90790c17014d905fd2f85e7e7dde1271
|
osbrain/__init__.py
|
osbrain/__init__.py
|
import os
import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZERS_ACCEPTED.add('dill')
Pyro4.config.SERIALIZER = 'dill'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'thread'
Pyro4.config.REQUIRE_EXPOSE = False
Pyro4.config.COMMTIMEOUT = 0.
Pyro4.config.DETAILED_TRACEBACK = True
os.environ['OSBRAIN_DEFAULT_TRANSPORT'] = 'ipc'
os.environ['OSBRAIN_DEFAULT_SAFE'] = 'true'
os.environ['OSBRAIN_DEFAULT_SERIALIZER'] = 'pickle'
os.environ['OSBRAIN_DEFAULT_LINGER'] = '-1'
__version__ = '0.4.0'
from .agent import Agent, AgentProcess, run_agent
from .nameserver import run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
|
import os
import Pyro4
Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
Pyro4.config.SERIALIZERS_ACCEPTED.add('dill')
Pyro4.config.SERIALIZER = 'dill'
Pyro4.config.THREADPOOL_SIZE = 16
Pyro4.config.SERVERTYPE = 'thread'
Pyro4.config.REQUIRE_EXPOSE = False
Pyro4.config.COMMTIMEOUT = 0.
Pyro4.config.DETAILED_TRACEBACK = True
os.environ['OSBRAIN_DEFAULT_TRANSPORT'] = 'ipc'
os.environ['OSBRAIN_DEFAULT_SAFE'] = 'true'
os.environ['OSBRAIN_DEFAULT_SERIALIZER'] = 'pickle'
os.environ['OSBRAIN_DEFAULT_LINGER'] = '1'
__version__ = '0.4.0'
from .agent import Agent, AgentProcess, run_agent
from .nameserver import run_nameserver
from .proxy import Proxy, NSProxy
from .address import SocketAddress, AgentAddress
from .logging import Logger, run_logger
|
Set default linger to 1 second
|
Set default linger to 1 second
|
Python
|
apache-2.0
|
opensistemas-hub/osbrain
|
---
+++
@@ -11,7 +11,7 @@
os.environ['OSBRAIN_DEFAULT_TRANSPORT'] = 'ipc'
os.environ['OSBRAIN_DEFAULT_SAFE'] = 'true'
os.environ['OSBRAIN_DEFAULT_SERIALIZER'] = 'pickle'
-os.environ['OSBRAIN_DEFAULT_LINGER'] = '-1'
+os.environ['OSBRAIN_DEFAULT_LINGER'] = '1'
__version__ = '0.4.0'
|
8e2596db204d2f6779280309aaa06d90872e9fb2
|
tests/test_bot_support.py
|
tests/test_bot_support.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
from .test_bot import TestBot
class TestBotSupport(TestBot):
@pytest.mark.parametrize('url,result', [
('https://google.com', ['https://google.com']),
('google.com', ['google.com']),
('google.com/search?q=instabot', ['google.com/search?q=instabot']),
('https://google.com/search?q=instabot', ['https://google.com/search?q=instabot']),
('мвд.рф', ['мвд.рф']),
('https://мвд.рф', ['https://мвд.рф']),
('http://мвд.рф/news/', ['http://мвд.рф/news/']),
('hello, google.com/search?q=test and bing.com', ['google.com/search?q=test', 'bing.com']),
])
def test_extract_urls(self, url, result):
assert self.BOT.extract_urls(url) == result
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import pytest
from .test_bot import TestBot
class TestBotSupport(TestBot):
@pytest.mark.parametrize('url,result', [
('https://google.com', ['https://google.com']),
('google.com', ['google.com']),
('google.com/search?q=instabot', ['google.com/search?q=instabot']),
('https://google.com/search?q=instabot', ['https://google.com/search?q=instabot']),
('мвд.рф', ['мвд.рф']),
('https://мвд.рф', ['https://мвд.рф']),
('http://мвд.рф/news/', ['http://мвд.рф/news/']),
('hello, google.com/search?q=test and bing.com', ['google.com/search?q=test', 'bing.com']),
])
def test_extract_urls(self, url, result):
assert self.BOT.extract_urls(url) == result
def test_check_if_file_exist(self):
test_file = open('test', 'w')
assert self.BOT.check_if_file_exists('test')
test_file.close()
os.remove('test')
def test_check_if_file_exist_fail(self):
assert not self.BOT.check_if_file_exists('test')
|
Add test on check file if exist
|
Add test on check file if exist
|
Python
|
apache-2.0
|
instagrambot/instabot,ohld/instabot,instagrambot/instabot
|
---
+++
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
+
+import os
import pytest
@@ -20,3 +22,14 @@
])
def test_extract_urls(self, url, result):
assert self.BOT.extract_urls(url) == result
+
+ def test_check_if_file_exist(self):
+ test_file = open('test', 'w')
+
+ assert self.BOT.check_if_file_exists('test')
+
+ test_file.close()
+ os.remove('test')
+
+ def test_check_if_file_exist_fail(self):
+ assert not self.BOT.check_if_file_exists('test')
|
3c00c5de9d0bd6ecf860d09b786db9625e212102
|
tools/perf_expectations/PRESUBMIT.py
|
tools/perf_expectations/PRESUBMIT.py
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for perf_expectations.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
PERF_EXPECTATIONS = 'perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == input_api.os_path.basename(path):
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == input_api.os_path.basename(path):
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for perf_expectations.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
PERF_EXPECTATIONS = 'tools/perf_expectations/perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
|
Use full pathname to perf_expectations in test.
|
Use full pathname to perf_expectations in test.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/266055
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@28770 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
adobe/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,ropik/chromium,adobe/chromium,Crystalnix/house-of-life-chromium,ropik/chromium,gavinp/chromium,adobe/chromium,ropik/chromium,adobe/chromium,gavinp/chromium,adobe/chromium,yitian134/chromium,yitian134/chromium,yitian134/chromium,gavinp/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,adobe/chromium,gavinp/chromium,gavinp/chromium,adobe/chromium,ropik/chromium,ropik/chromium,yitian134/chromium,gavinp/chromium,adobe/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,ropik/chromium,gavinp/chromium,yitian134/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,ropik/chromium,ropik/chromium,ropik/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,adobe/chromium
|
---
+++
@@ -13,12 +13,12 @@
'tests.perf_expectations_unittest',
]
-PERF_EXPECTATIONS = 'perf_expectations.json'
+PERF_EXPECTATIONS = 'tools/perf_expectations/perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
- if PERF_EXPECTATIONS == input_api.os_path.basename(path):
+ if PERF_EXPECTATIONS == path:
run_tests = True
output = []
@@ -32,7 +32,7 @@
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
- if PERF_EXPECTATIONS == input_api.os_path.basename(path):
+ if PERF_EXPECTATIONS == path:
run_tests = True
output = []
|
490ce27b6e9213cd9200b6fb42e7676af58abd58
|
zou/app/models/custom_action.py
|
zou/app/models/custom_action.py
|
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class CustomAction(db.Model, BaseMixin, SerializerMixin):
name = db.Column(db.String(80), nullable=False)
url = db.Column(db.String(400))
|
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class CustomAction(db.Model, BaseMixin, SerializerMixin):
name = db.Column(db.String(80), nullable=False)
url = db.Column(db.String(400))
entity_type = db.Column(db.String(40), default="all")
|
Add entity type column to actions
|
Add entity type column to actions
|
Python
|
agpl-3.0
|
cgwire/zou
|
---
+++
@@ -6,3 +6,4 @@
class CustomAction(db.Model, BaseMixin, SerializerMixin):
name = db.Column(db.String(80), nullable=False)
url = db.Column(db.String(400))
+ entity_type = db.Column(db.String(40), default="all")
|
76d9ff900204678423208967b4578764013984ad
|
tests/test-recipes/metadata/always_include_files_glob/run_test.py
|
tests/test-recipes/metadata/always_include_files_glob/run_test.py
|
import os
import sys
import json
def main():
prefix = os.environ['PREFIX']
info_file = os.path.join(prefix, 'conda-meta',
'always_include_files_regex-0.1-0.json')
with open(info_file, 'r') as fh:
info = json.load(fh)
if sys.platform == 'darwin':
assert sorted(info['files']) == ['lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib']
elif sys.platform.startswith('linux'):
assert sorted(info['files']) == ['lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0']
if __name__ == '__main__':
main()
|
import os
import sys
import json
def main():
prefix = os.environ['PREFIX']
info_file = os.path.join(prefix, 'conda-meta',
'always_include_files_regex-0.1-0.json')
with open(info_file, 'r') as fh:
info = json.load(fh)
if sys.platform == 'darwin':
assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}
elif sys.platform.startswith('linux'):
assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}
if __name__ == '__main__':
main()
|
Test sets instead of lists
|
Test sets instead of lists
|
Python
|
bsd-3-clause
|
dan-blanchard/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,sandhujasmine/conda-build,dan-blanchard/conda-build,ilastik/conda-build,frol/conda-build,mwcraig/conda-build,rmcgibbo/conda-build,shastings517/conda-build,shastings517/conda-build,shastings517/conda-build,sandhujasmine/conda-build,frol/conda-build,mwcraig/conda-build,ilastik/conda-build,ilastik/conda-build,rmcgibbo/conda-build,frol/conda-build,sandhujasmine/conda-build,rmcgibbo/conda-build
|
---
+++
@@ -11,9 +11,9 @@
info = json.load(fh)
if sys.platform == 'darwin':
- assert sorted(info['files']) == ['lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib']
+ assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}
elif sys.platform.startswith('linux'):
- assert sorted(info['files']) == ['lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0']
+ assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}
if __name__ == '__main__':
main()
|
025c3f6b73c97fdb58b1a492efcb6efe44cfdab0
|
twisted/plugins/caldav.py
|
twisted/plugins/caldav.py
|
from zope.interface import implements
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.python import reflect
def serviceMakerProperty(propname):
def getProperty(self):
return getattr(reflect.namedClass(self.serviceMakerClass), propname)
return property(getProperty)
class TAP(object):
implements(IPlugin, IServiceMaker)
def __init__(self, serviceMakerClass):
self.serviceMakerClass = serviceMakerClass
self._serviceMaker = None
options = serviceMakerProperty("options")
tapname = serviceMakerProperty("tapname")
description = serviceMakerProperty("description")
def makeService(self, options):
if self._serviceMaker is None:
self._serviceMaker = reflect.namedClass(self.serviceMakerClass)()
return self._serviceMaker.makeService(options)
TwistedCalDAV = TAP("calendarserver.tap.caldav.CalDAVServiceMaker")
CalDAVNotifier = TAP("twistedcaldav.notify.NotificationServiceMaker")
CalDAVMailGateway = TAP("twistedcaldav.mail.MailGatewayServiceMaker")
|
from zope.interface import implements
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.python import reflect
from twisted.internet.protocol import Factory
Factory.noisy = False
def serviceMakerProperty(propname):
def getProperty(self):
return getattr(reflect.namedClass(self.serviceMakerClass), propname)
return property(getProperty)
class TAP(object):
implements(IPlugin, IServiceMaker)
def __init__(self, serviceMakerClass):
self.serviceMakerClass = serviceMakerClass
self._serviceMaker = None
options = serviceMakerProperty("options")
tapname = serviceMakerProperty("tapname")
description = serviceMakerProperty("description")
def makeService(self, options):
if self._serviceMaker is None:
self._serviceMaker = reflect.namedClass(self.serviceMakerClass)()
return self._serviceMaker.makeService(options)
TwistedCalDAV = TAP("calendarserver.tap.caldav.CalDAVServiceMaker")
CalDAVNotifier = TAP("twistedcaldav.notify.NotificationServiceMaker")
CalDAVMailGateway = TAP("twistedcaldav.mail.MailGatewayServiceMaker")
|
Set Factory.noisy to False by default
|
Set Factory.noisy to False by default
git-svn-id: 81e381228600e5752b80483efd2b45b26c451ea2@3933 e27351fd-9f3e-4f54-a53b-843176b1656c
|
Python
|
apache-2.0
|
trevor/calendarserver,trevor/calendarserver,trevor/calendarserver
|
---
+++
@@ -3,6 +3,10 @@
from twisted.application.service import IServiceMaker
from twisted.python import reflect
+
+from twisted.internet.protocol import Factory
+Factory.noisy = False
+
def serviceMakerProperty(propname):
def getProperty(self):
@@ -13,12 +17,13 @@
class TAP(object):
implements(IPlugin, IServiceMaker)
+
def __init__(self, serviceMakerClass):
self.serviceMakerClass = serviceMakerClass
self._serviceMaker = None
- options = serviceMakerProperty("options")
- tapname = serviceMakerProperty("tapname")
+ options = serviceMakerProperty("options")
+ tapname = serviceMakerProperty("tapname")
description = serviceMakerProperty("description")
def makeService(self, options):
|
1a16d598c902218a8112841219f89044724155da
|
smatic/templatetags/smatic_tags.py
|
smatic/templatetags/smatic_tags.py
|
import os
from commands import getstatusoutput
from django import template
from django.conf import settings
from django.utils._os import safe_join
register = template.Library()
def scss(file_path):
"""
Converts an scss file into css and returns the output
"""
input_path = safe_join(settings.SMATIC_SCSS_PATH, file_path)
if not os.path.exists(input_path):
raise Exception('File does not exist: %s\n' % input_path)
sass_dict = { 'bin' : settings.SASS_BIN, 'sass_style' : 'compact', 'input' : input_path }
cmd = "%(bin)s --scss -t %(sass_style)s -C %(input)s" % sass_dict
(status, output) = getstatusoutput(cmd)
if not status == 0:
raise Exception(output)
return output
register.simple_tag(scss)
def js(file_path):
input_path = safe_join(settings.SMATIC_JS_PATH, file_path)
if not os.path.exists(input_path):
# TODO: check if enabled on
raise Exception('File does not exist: %s\n' % input_path)
return '<script type="text/javascript" src="%sjs/%s"></script>' % (settings.STATIC_URL, file_path)
register.simple_tag(js)
|
import os
from commands import getstatusoutput
from django import template
from django.conf import settings
from django.utils._os import safe_join
register = template.Library()
@register.simple_tag
def scss(file_path):
"""
Convert an scss file into css and returns the output.
"""
input_path = safe_join(settings.SMATIC_SCSS_PATH, file_path)
if not os.path.exists(input_path):
raise Exception('File does not exist: %s\n' % input_path)
cmd = "%(bin)s --scss -t %(sass_style)s -C %(input)s" % {
'bin': getattr(settings, 'SASS_BIN', 'sass'),
'sass_style': 'compact',
'input': input_path,
}
(status, output) = getstatusoutput(cmd)
if not status == 0:
raise Exception(output)
return output
@register.simple_tag
def js(file_path):
input_path = safe_join(settings.SMATIC_JS_PATH, file_path)
if not os.path.exists(input_path):
# TODO: check if enabled on
raise Exception('File does not exist: %s\n' % input_path)
return '<script type="text/javascript" src="%sjs/%s"></script>' % (
settings.STATIC_URL, file_path
)
|
Tidy up the code, and don't make settings.SASS_BIN a requirement (default to 'sass')
|
Tidy up the code, and don't make settings.SASS_BIN a requirement (default to 'sass')
|
Python
|
bsd-3-clause
|
lincolnloop/django-smatic
|
---
+++
@@ -6,32 +6,35 @@
register = template.Library()
+
+@register.simple_tag
def scss(file_path):
"""
- Converts an scss file into css and returns the output
+ Convert an scss file into css and returns the output.
"""
input_path = safe_join(settings.SMATIC_SCSS_PATH, file_path)
if not os.path.exists(input_path):
raise Exception('File does not exist: %s\n' % input_path)
- sass_dict = { 'bin' : settings.SASS_BIN, 'sass_style' : 'compact', 'input' : input_path }
- cmd = "%(bin)s --scss -t %(sass_style)s -C %(input)s" % sass_dict
+ cmd = "%(bin)s --scss -t %(sass_style)s -C %(input)s" % {
+ 'bin': getattr(settings, 'SASS_BIN', 'sass'),
+ 'sass_style': 'compact',
+ 'input': input_path,
+ }
(status, output) = getstatusoutput(cmd)
if not status == 0:
raise Exception(output)
return output
-register.simple_tag(scss)
-
+@register.simple_tag
def js(file_path):
-
input_path = safe_join(settings.SMATIC_JS_PATH, file_path)
if not os.path.exists(input_path):
# TODO: check if enabled on
raise Exception('File does not exist: %s\n' % input_path)
- return '<script type="text/javascript" src="%sjs/%s"></script>' % (settings.STATIC_URL, file_path)
-
-register.simple_tag(js)
+ return '<script type="text/javascript" src="%sjs/%s"></script>' % (
+ settings.STATIC_URL, file_path
+ )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.