commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
e2fb7715d10a9a02216fa5668f482952bbb60310 | readthedocs/donate/forms.py | readthedocs/donate/forms.py | import logging
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
import stripe
from .models import Supporter
log = logging.getLogger(__name__)
class SupporterForm(forms.ModelForm):
class Meta:
model = Supporter
fields = (
'last_4_digits',
'stripe_id',
'name',
'email',
'dollars',
'public',
)
labels = {
'public': _('Make this donation public'),
}
help_texts = {
'public': _('Your name and gravatar will be displayed on the donation page'),
}
last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True)
stripe_id = forms.CharField(widget=forms.HiddenInput(), required=True)
def clean(self):
try:
stripe.api_key = settings.STRIPE_SECRET
stripe.Charge.create(
amount=int(self.cleaned_data['dollars']) * 100,
currency="usd",
source=self.cleaned_data['stripe_id'],
description="Read the Docs Sustained Engineering",
)
except stripe.error.CardError, e:
stripe_error = e.json_body['error']
log.error('Credit card error: %s', stripe_error['message'])
raise forms.ValidationError(
_('There was a problem processing your card: %(message)s'),
params=stripe_error)
return self.cleaned_data
| import logging
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
import stripe
from .models import Supporter
log = logging.getLogger(__name__)
class SupporterForm(forms.ModelForm):
class Meta:
model = Supporter
fields = (
'last_4_digits',
'stripe_id',
'name',
'email',
'dollars',
'public',
)
labels = {
'public': _('Make this donation public'),
}
help_texts = {
'public': _('Your name and gravatar will be displayed on the donation page'),
}
last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True)
stripe_id = forms.CharField(widget=forms.HiddenInput(), required=True)
def clean(self):
try:
stripe.api_key = settings.STRIPE_SECRET
stripe.Charge.create(
amount=int(self.cleaned_data['dollars']) * 100,
currency='usd',
source=self.cleaned_data['stripe_id'],
description='Read the Docs Sustained Engineering',
receipt_email=self.cleaned_data['email']
)
except stripe.error.CardError, e:
stripe_error = e.json_body['error']
log.error('Credit card error: %s', stripe_error['message'])
raise forms.ValidationError(
_('There was a problem processing your card: %(message)s'),
params=stripe_error)
return self.cleaned_data
| Add receipt_email to payment processing on donations | Add receipt_email to payment processing on donations
| Python | mit | SteveViss/readthedocs.org,rtfd/readthedocs.org,KamranMackey/readthedocs.org,safwanrahman/readthedocs.org,wijerasa/readthedocs.org,laplaceliu/readthedocs.org,jerel/readthedocs.org,pombredanne/readthedocs.org,fujita-shintaro/readthedocs.org,VishvajitP/readthedocs.org,CedarLogic/readthedocs.org,atsuyim/readthedocs.org,raven47git/readthedocs.org,VishvajitP/readthedocs.org,jerel/readthedocs.org,Carreau/readthedocs.org,hach-que/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,sid-kap/readthedocs.org,mhils/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,kenshinthebattosai/readthedocs.org,CedarLogic/readthedocs.org,emawind84/readthedocs.org,sunnyzwh/readthedocs.org,wijerasa/readthedocs.org,jerel/readthedocs.org,davidfischer/readthedocs.org,rtfd/readthedocs.org,michaelmcandrew/readthedocs.org,asampat3090/readthedocs.org,istresearch/readthedocs.org,espdev/readthedocs.org,GovReady/readthedocs.org,nikolas/readthedocs.org,sid-kap/readthedocs.org,titiushko/readthedocs.org,jerel/readthedocs.org,rtfd/readthedocs.org,dirn/readthedocs.org,asampat3090/readthedocs.org,hach-que/readthedocs.org,royalwang/readthedocs.org,laplaceliu/readthedocs.org,kdkeyser/readthedocs.org,CedarLogic/readthedocs.org,kenwang76/readthedocs.org,kenwang76/readthedocs.org,sils1297/readthedocs.org,clarkperkins/readthedocs.org,KamranMackey/readthedocs.org,laplaceliu/readthedocs.org,d0ugal/readthedocs.org,wijerasa/readthedocs.org,clarkperkins/readthedocs.org,gjtorikian/readthedocs.org,LukasBoersma/readthedocs.org,d0ugal/readthedocs.org,mhils/readthedocs.org,atsuyim/readthedocs.org,emawind84/readthedocs.org,singingwolfboy/readthedocs.org,attakei/readthedocs-oauth,wanghaven/readthedocs.org,VishvajitP/readthedocs.org,singingwolfboy/readthedocs.org,kdkeyser/readthedocs.org,clarkperkins/readthedocs.org,sunnyzwh/readthedocs.org,wanghaven/readthedocs.org,cgourlay/readthedocs.org,GovReady/readthedocs.org,Tazer/readthedocs.org,wanghaven/readthedocs.org,agjohnson/readthedocs.org,titiushko/readthedocs.org,Tazer/readthedocs.org,sils1297/readthedocs.org,mhils/readthedocs.org,GovReady/readthedocs.org,d0ugal/readthedocs.org,takluyver/readthedocs.org,SteveViss/readthedocs.org,clarkperkins/readthedocs.org,safwanrahman/readthedocs.org,LukasBoersma/readthedocs.org,sid-kap/readthedocs.org,asampat3090/readthedocs.org,asampat3090/readthedocs.org,espdev/readthedocs.org,agjohnson/readthedocs.org,sunnyzwh/readthedocs.org,royalwang/readthedocs.org,KamranMackey/readthedocs.org,kdkeyser/readthedocs.org,agjohnson/readthedocs.org,atsuyim/readthedocs.org,fujita-shintaro/readthedocs.org,safwanrahman/readthedocs.org,stevepiercy/readthedocs.org,royalwang/readthedocs.org,michaelmcandrew/readthedocs.org,emawind84/readthedocs.org,wijerasa/readthedocs.org,kenwang76/readthedocs.org,soulshake/readthedocs.org,raven47git/readthedocs.org,wanghaven/readthedocs.org,titiushko/readthedocs.org,michaelmcandrew/readthedocs.org,kenshinthebattosai/readthedocs.org,techtonik/readthedocs.org,sunnyzwh/readthedocs.org,sid-kap/readthedocs.org,techtonik/readthedocs.org,atsuyim/readthedocs.org,VishvajitP/readthedocs.org,michaelmcandrew/readthedocs.org,hach-que/readthedocs.org,titiushko/readthedocs.org,d0ugal/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,kenshinthebattosai/readthedocs.org,soulshake/readthedocs.org,tddv/readthedocs.org,davidfischer/readthedocs.org,raven47git/readthedocs.org,Carreau/readthedocs.org,takluyver/readthedocs.org,attakei/readthedocs-oauth,takluyver/readthedocs.org,stevepiercy/readthedocs.org,attakei/readthedocs-oauth,fujita-shintaro/readthedocs.org,dirn/readthedocs.org,gjtorikian/readthedocs.org,dirn/readthedocs.org,royalwang/readthedocs.org,cgourlay/readthedocs.org,pombredanne/readthedocs.org,gjtorikian/readthedocs.org,espdev/readthedocs.org,cgourlay/readthedocs.org,gjtorikian/readthedocs.org,SteveViss/readthedocs.org,emawind84/readthedocs.org,Carreau/readthedocs.org,LukasBoersma/readthedocs.org,takluyver/readthedocs.org,techtonik/readthedocs.org,dirn/readthedocs.org,hach-que/readthedocs.org,kenshinthebattosai/readthedocs.org,espdev/readthedocs.org,kdkeyser/readthedocs.org,rtfd/readthedocs.org,singingwolfboy/readthedocs.org,davidfischer/readthedocs.org,KamranMackey/readthedocs.org,sils1297/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,soulshake/readthedocs.org,nikolas/readthedocs.org,CedarLogic/readthedocs.org,raven47git/readthedocs.org,attakei/readthedocs-oauth,fujita-shintaro/readthedocs.org,stevepiercy/readthedocs.org,laplaceliu/readthedocs.org,safwanrahman/readthedocs.org,mhils/readthedocs.org,sils1297/readthedocs.org,espdev/readthedocs.org,nikolas/readthedocs.org,davidfischer/readthedocs.org,nikolas/readthedocs.org,Carreau/readthedocs.org,soulshake/readthedocs.org,GovReady/readthedocs.org,Tazer/readthedocs.org,agjohnson/readthedocs.org,tddv/readthedocs.org,singingwolfboy/readthedocs.org,LukasBoersma/readthedocs.org,cgourlay/readthedocs.org,techtonik/readthedocs.org,kenwang76/readthedocs.org | ---
+++
@@ -38,9 +38,10 @@
stripe.api_key = settings.STRIPE_SECRET
stripe.Charge.create(
amount=int(self.cleaned_data['dollars']) * 100,
- currency="usd",
+ currency='usd',
source=self.cleaned_data['stripe_id'],
- description="Read the Docs Sustained Engineering",
+ description='Read the Docs Sustained Engineering',
+ receipt_email=self.cleaned_data['email']
)
except stripe.error.CardError, e:
stripe_error = e.json_body['error'] |
b9bd647cfd8def947838cb35c266b3b9ac855201 | test_apriori.py | test_apriori.py | import unittest
from itertools import chain
from apriori import subsets
class AprioriTest(unittest.TestCase):
def test_subsets_should_return_empty_subsets_if_input_empty_set(self):
result = tuple(subsets(set([])))
self.assertEqual(result, ())
def test_subsets_should_return_non_empty_subsets(self):
result = tuple(subsets(set(['beer', 'rice'])))
self.assertEqual(result[0], ('beer',))
self.assertEqual(result[1], ('rice',))
self.assertEqual(result[2], ('beer', 'rice',))
if __name__ == '__main__':
unittest.main()
| from collections import defaultdict
from itertools import chain
import unittest
from apriori import (
subsets,
returnItemsWithMinSupport,
)
class AprioriTest(unittest.TestCase):
def test_subsets_should_return_empty_subsets_if_input_empty_set(self):
result = tuple(subsets(frozenset([])))
self.assertEqual(result, ())
def test_subsets_should_return_non_empty_subsets(self):
result = tuple(subsets(frozenset(['beer', 'rice'])))
self.assertEqual(result[0], ('beer',))
self.assertEqual(result[1], ('rice',))
self.assertEqual(result[2], ('beer', 'rice',))
def test_return_items_with_min_support(self):
itemSet = set([
frozenset(['apple']),
frozenset(['beer']),
frozenset(['chicken']),
frozenset(['mango']),
frozenset(['milk']),
frozenset(['rice'])
])
transactionList = [
frozenset(['beer', 'rice', 'apple', 'chicken']),
frozenset(['beer', 'rice', 'apple']),
frozenset(['beer', 'apple']),
frozenset(['mango', 'apple']),
frozenset(['beer', 'rice', 'milk', 'chicken']),
frozenset(['beer', 'rice', 'milk']),
frozenset(['beer', 'milk']),
frozenset(['mango', 'milk'])
]
minSupport = 0.5
freqSet = defaultdict(int)
result = returnItemsWithMinSupport(
itemSet,
transactionList,
minSupport,
freqSet
)
expected = set([
frozenset(['milk']),
frozenset(['apple']),
frozenset(['beer']),
frozenset(['rice'])
])
self.assertEqual(result, expected)
expected = defaultdict(
int,
{
frozenset(['apple']): 4,
frozenset(['beer']): 6,
frozenset(['chicken']): 2,
frozenset(['mango']): 2,
frozenset(['milk']): 4,
frozenset(['rice']): 4
}
)
self.assertEqual(freqSet, expected)
if __name__ == '__main__':
unittest.main()
| Test returning items with minimum support | Test returning items with minimum support
| Python | mit | asaini/Apriori,gst-group/apriori_demo | ---
+++
@@ -1,22 +1,76 @@
+from collections import defaultdict
+from itertools import chain
import unittest
-from itertools import chain
-from apriori import subsets
+from apriori import (
+ subsets,
+ returnItemsWithMinSupport,
+)
class AprioriTest(unittest.TestCase):
def test_subsets_should_return_empty_subsets_if_input_empty_set(self):
- result = tuple(subsets(set([])))
+ result = tuple(subsets(frozenset([])))
self.assertEqual(result, ())
def test_subsets_should_return_non_empty_subsets(self):
- result = tuple(subsets(set(['beer', 'rice'])))
+ result = tuple(subsets(frozenset(['beer', 'rice'])))
self.assertEqual(result[0], ('beer',))
self.assertEqual(result[1], ('rice',))
self.assertEqual(result[2], ('beer', 'rice',))
+ def test_return_items_with_min_support(self):
+ itemSet = set([
+ frozenset(['apple']),
+ frozenset(['beer']),
+ frozenset(['chicken']),
+ frozenset(['mango']),
+ frozenset(['milk']),
+ frozenset(['rice'])
+ ])
+ transactionList = [
+ frozenset(['beer', 'rice', 'apple', 'chicken']),
+ frozenset(['beer', 'rice', 'apple']),
+ frozenset(['beer', 'apple']),
+ frozenset(['mango', 'apple']),
+ frozenset(['beer', 'rice', 'milk', 'chicken']),
+ frozenset(['beer', 'rice', 'milk']),
+ frozenset(['beer', 'milk']),
+ frozenset(['mango', 'milk'])
+ ]
+ minSupport = 0.5
+ freqSet = defaultdict(int)
+
+ result = returnItemsWithMinSupport(
+ itemSet,
+ transactionList,
+ minSupport,
+ freqSet
+ )
+
+ expected = set([
+ frozenset(['milk']),
+ frozenset(['apple']),
+ frozenset(['beer']),
+ frozenset(['rice'])
+ ])
+ self.assertEqual(result, expected)
+
+ expected = defaultdict(
+ int,
+ {
+ frozenset(['apple']): 4,
+ frozenset(['beer']): 6,
+ frozenset(['chicken']): 2,
+ frozenset(['mango']): 2,
+ frozenset(['milk']): 4,
+ frozenset(['rice']): 4
+ }
+ )
+ self.assertEqual(freqSet, expected)
+
if __name__ == '__main__':
unittest.main() |
c14bdaf3043cb38571073db7162a0899a35778ed | app/utils.py | app/utils.py | from flask import url_for
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
app.add_template_global(index_for_role)
def index_for_role(role):
return url_for(role.name + '.index')
| from flask import url_for
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
app.add_template_global(index_for_role)
def index_for_role(role):
return url_for(role.index)
| Fix index_for_role function to use index field in Role class. | Fix index_for_role function to use index field in Role class.
| Python | mit | hack4impact/reading-terminal-market,hack4impact/reading-terminal-market,hack4impact/reading-terminal-market | ---
+++
@@ -17,4 +17,4 @@
def index_for_role(role):
- return url_for(role.name + '.index')
+ return url_for(role.index) |
9cb21f98e1b6670d733940ea74d75a7a01a1b38e | misp_modules/modules/expansion/__init__.py | misp_modules/modules/expansion/__init__.py | from . import _vmray # noqa
__all__ = ['vmray_submit', 'bgpranking', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',
'farsight_passivedns', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal',
'whois', 'shodan', 'reversedns', 'geoip_country', 'wiki', 'iprep',
'threatminer', 'otx', 'threatcrowd', 'vulndb', 'crowdstrike_falcon',
'yara_syntax_validator', 'hashdd', 'onyphe', 'onyphe_full', 'rbl',
'xforceexchange', 'sigma_syntax_validator', 'stix2_pattern_syntax_validator',
'sigma_queries', 'dbl_spamhaus', 'vulners', 'yara_query', 'macaddress_io',
'intel471', 'backscatter_io', 'btc_scam_check']
| from . import _vmray # noqa
__all__ = ['vmray_submit', 'bgpranking', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'btc_steroids', 'domaintools', 'eupi',
'farsight_passivedns', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal',
'whois', 'shodan', 'reversedns', 'geoip_country', 'wiki', 'iprep',
'threatminer', 'otx', 'threatcrowd', 'vulndb', 'crowdstrike_falcon',
'yara_syntax_validator', 'hashdd', 'onyphe', 'onyphe_full', 'rbl',
'xforceexchange', 'sigma_syntax_validator', 'stix2_pattern_syntax_validator',
'sigma_queries', 'dbl_spamhaus', 'vulners', 'yara_query', 'macaddress_io',
'intel471', 'backscatter_io', 'btc_scam_check', 'hibp', 'greynoise', 'macvendors']
| Add the new module sin the list of modules availables. | fix: Add the new module sin the list of modules availables.
| Python | agpl-3.0 | VirusTotal/misp-modules,MISP/misp-modules,VirusTotal/misp-modules,VirusTotal/misp-modules,MISP/misp-modules,amuehlem/misp-modules,MISP/misp-modules,amuehlem/misp-modules,amuehlem/misp-modules | ---
+++
@@ -8,4 +8,4 @@
'yara_syntax_validator', 'hashdd', 'onyphe', 'onyphe_full', 'rbl',
'xforceexchange', 'sigma_syntax_validator', 'stix2_pattern_syntax_validator',
'sigma_queries', 'dbl_spamhaus', 'vulners', 'yara_query', 'macaddress_io',
- 'intel471', 'backscatter_io', 'btc_scam_check']
+ 'intel471', 'backscatter_io', 'btc_scam_check', 'hibp', 'greynoise', 'macvendors'] |
ba4a8815305e402b4e0d587601565d44524a114e | oslo/__init__.py | oslo/__init__.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__import__('pkg_resources').declare_namespace(__name__)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__import__('pkg_resources').declare_namespace(__name__)
| Remove extraneous vim editor configuration comments | Remove extraneous vim editor configuration comments
Change-Id: Id8fb52db12695bff4488ea9e74b492a9dabbbccf
Partial-Bug: #1229324
| Python | apache-2.0 | varunarya10/oslo.db,openstack/oslo.db,akash1808/oslo.db,magic0704/oslo.db,JioCloud/oslo.db,openstack/oslo.db | ---
+++
@@ -1,5 +1,3 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at |
80bbff34e7bb8d9d3779d46b1bf9e64b62592055 | spotify/__init__.py | spotify/__init__.py | from __future__ import unicode_literals
import os
from cffi import FFI
__version__ = '2.0.0a1'
header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
header = open(header_file).read()
header += '#define SPOTIFY_API_VERSION ...\n'
ffi = FFI()
ffi.cdef(header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_unicode(chars):
return ffi.string(chars).decode('utf-8')
def enum(prefix):
def wrapper(obj):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
return obj
return wrapper
@enum('SP_ERROR_')
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_unicode(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
| from __future__ import unicode_literals
import os
import cffi
__version__ = '2.0.0a1'
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
def _to_unicode(chars):
return ffi.string(chars).decode('utf-8')
def enum(prefix):
def wrapper(obj):
for attr in dir(lib):
if attr.startswith(prefix):
setattr(obj, attr.replace(prefix, ''), getattr(lib, attr))
return obj
return wrapper
@enum('SP_ERROR_')
class Error(Exception):
def __init__(self, error_code):
self.error_code = error_code
message = _to_unicode(lib.sp_error_message(error_code))
super(Error, self).__init__(message)
| Make header file variables private | Make header file variables private
| Python | apache-2.0 | kotamat/pyspotify,felix1m/pyspotify,kotamat/pyspotify,jodal/pyspotify,felix1m/pyspotify,mopidy/pyspotify,mopidy/pyspotify,jodal/pyspotify,jodal/pyspotify,kotamat/pyspotify,felix1m/pyspotify | ---
+++
@@ -2,18 +2,17 @@
import os
-from cffi import FFI
+import cffi
__version__ = '2.0.0a1'
-header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
-header = open(header_file).read()
-header += '#define SPOTIFY_API_VERSION ...\n'
-
-ffi = FFI()
-ffi.cdef(header)
+_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
+_header = open(_header_file).read()
+_header += '#define SPOTIFY_API_VERSION ...\n'
+ffi = cffi.FFI()
+ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
|
b84fe94128c308be3e95be800693eb92ec8c7d25 | keyring/py33compat.py | keyring/py33compat.py | """
Compatibility support for Python 3.3. Remove when Python 3.3 support is
no longer required.
"""
from .py27compat import builtins
def max(*args, **kwargs):
"""
Add support for 'default' kwarg.
>>> max([], default='res')
'res'
>>> max(default='res')
Traceback (most recent call last):
...
TypeError: ...
>>> max('a', 'b', default='other')
'b'
"""
missing = object()
if not 'default' in kwargs:
return builtins.max(*args, **kwargs)
default = kwargs.pop('default', missing)
try:
return builtins.max(*args, **kwargs)
except ValueError as exc:
if 'empty sequence' in str(exc) and default is not missing:
return default
raise
| """
Compatibility support for Python 3.3. Remove when Python 3.3 support is
no longer required.
"""
from .py27compat import builtins
def max(*args, **kwargs):
"""
Add support for 'default' kwarg.
>>> max([], default='res')
'res'
>>> max(default='res')
Traceback (most recent call last):
...
TypeError: ...
>>> max('a', 'b', default='other')
'b'
"""
missing = object()
default = kwargs.pop('default', missing)
try:
return builtins.max(*args, **kwargs)
except ValueError as exc:
if 'empty sequence' in str(exc) and default is not missing:
return default
raise
| Remove superfluous check for default | Remove superfluous check for default
| Python | mit | jaraco/keyring | ---
+++
@@ -22,8 +22,6 @@
'b'
"""
missing = object()
- if not 'default' in kwargs:
- return builtins.max(*args, **kwargs)
default = kwargs.pop('default', missing)
try:
return builtins.max(*args, **kwargs) |
3874ca578c52879d9861213e321f6ece9e67f10b | sopel/modules/ping.py | sopel/modules/ping.py | # coding=utf8
"""
ping.py - Sopel Ping Module
Author: Sean B. Palmer, inamidst.com
About: http://sopel.chat
"""
from __future__ import unicode_literals
import random
from sopel.module import rule, priority, thread
@rule(r'(?i)(hi|hello|hey),? $nickname[ \t]*$')
def hello(bot, trigger):
if trigger.owner:
greeting = random.choice(('Fuck off,', 'Screw you,', 'Go away'))
else:
greeting = random.choice(('Hi', 'Hey', 'Hello'))
punctuation = random.choice(('', '!'))
bot.say(greeting + ' ' + trigger.nick + punctuation)
@rule(r'(?i)(Fuck|Screw) you,? $nickname[ \t]*$')
def rude(bot, trigger):
bot.say('Watch your mouth, ' + trigger.nick + ', or I\'ll tell your mother!')
@rule('$nickname!')
@priority('high')
@thread(False)
def interjection(bot, trigger):
bot.say(trigger.nick + '!')
| # coding=utf8
"""
ping.py - Sopel Ping Module
Author: Sean B. Palmer, inamidst.com
About: http://sopel.chat
"""
from __future__ import unicode_literals
import random
from sopel.module import rule, priority, thread
@rule(r'(?i)(hi|hello|hey),? $nickname[ \t]*$')
def hello(bot, trigger):
greeting = random.choice(('Hi', 'Hey', 'Hello'))
punctuation = random.choice(('', '!'))
bot.say(greeting + ' ' + trigger.nick + punctuation)
@rule(r'(?i)(Fuck|Screw) you,? $nickname[ \t]*$')
def rude(bot, trigger):
bot.say('Watch your mouth, ' + trigger.nick + ', or I\'ll tell your mother!')
@rule('$nickname!')
@priority('high')
@thread(False)
def interjection(bot, trigger):
bot.say(trigger.nick + '!')
| Stop Sopel from relying rudely to the bot's owner. | Stop Sopel from relying rudely to the bot's owner.
| Python | mit | Uname-a/knife_scraper,Uname-a/knife_scraper,Uname-a/knife_scraper | ---
+++
@@ -12,10 +12,7 @@
@rule(r'(?i)(hi|hello|hey),? $nickname[ \t]*$')
def hello(bot, trigger):
- if trigger.owner:
- greeting = random.choice(('Fuck off,', 'Screw you,', 'Go away'))
- else:
- greeting = random.choice(('Hi', 'Hey', 'Hello'))
+ greeting = random.choice(('Hi', 'Hey', 'Hello'))
punctuation = random.choice(('', '!'))
bot.say(greeting + ' ' + trigger.nick + punctuation)
|
c22252cef5225bc71ec9f407efe39f61819bcea5 | civictechprojects/sitemaps.py | civictechprojects/sitemaps.py | from common.helpers.constants import FrontEndSection
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = date(year=2019, month=12, day=5)
sections = [FrontEndSection.AboutUs, FrontEndSection.FindProjects, FrontEndSection.PartnerWithUs, FrontEndSection.Donate,
FrontEndSection.Press, FrontEndSection.ContactUs]
def items(self):
return self.sections
def location(self, page):
return '/index/?section=' + str(page)
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
| from common.helpers.constants import FrontEndSection
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = date(year=2019, month=12, day=5)
sections = [FrontEndSection.AboutUs, FrontEndSection.FindProjects, FrontEndSection.PartnerWithUs, FrontEndSection.Donate,
FrontEndSection.Press, FrontEndSection.ContactUs]
def items(self):
return self.sections
def location(self, section):
return '/index/?section=' + str(section.value)
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
| Fix static page sitemap urls | Fix static page sitemap urls
| Python | mit | DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange | ---
+++
@@ -16,8 +16,8 @@
def items(self):
return self.sections
- def location(self, page):
- return '/index/?section=' + str(page)
+ def location(self, section):
+ return '/index/?section=' + str(section.value)
class ProjectSitemap(Sitemap): |
2bc35ef508e2a954dadb999b47df614150ebfaee | ckanext/nhsengland/helpers.py | ckanext/nhsengland/helpers.py |
def split_resources(pkg_dict):
"""
Splits the resources in the provided package based on the format field
with DOC, DOCX, PDF being returned as documents, all other formats as data
"""
document_formats = ["DOC", "DOCX", "PDF"]
documents, data = [], []
for resource in pkg_dict["resources"]:
target = documents if resource.get("format", "") in document_formats \
else data
target.append(resource)
return documents, data
|
def split_resources(pkg_dict):
"""
Splits the resources in the provided package based on the format field
with DOC, DOCX, PDF being returned as documents, all other formats as data
"""
document_formats = ["DOC", "DOCX", "PDF"]
documents, data = [], []
for resource in pkg_dict["resources"]:
target = documents if resource.get("format", "").upper() in document_formats \
else data
target.append(resource)
return documents, data
| Fix format check to ignore case. | Fix format check to ignore case. | Python | agpl-3.0 | nhsengland/ckanext-nhsengland,nhsengland/ckanext-nhsengland,nhsengland/ckanext-nhsengland | ---
+++
@@ -8,7 +8,7 @@
document_formats = ["DOC", "DOCX", "PDF"]
documents, data = [], []
for resource in pkg_dict["resources"]:
- target = documents if resource.get("format", "") in document_formats \
+ target = documents if resource.get("format", "").upper() in document_formats \
else data
target.append(resource)
return documents, data |
cf663c4961a84260e34ec422a4983d312df9e8c3 | calc.py | calc.py | """calc.py: A simple python calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
| """calc.py: A simple Python calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
elif command == 'min':
print(min(nums))
| Add support to min command | Add support to min command
| Python | bsd-3-clause | anchavesb/pyCalc | ---
+++
@@ -1,5 +1,4 @@
-"""calc.py: A simple python calculator."""
-
+"""calc.py: A simple Python calculator."""
import sys
def add_all(nums):
@@ -15,3 +14,5 @@
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
+ elif command == 'min':
+ print(min(nums)) |
8e00a63f539413b39aaaad77a230f5fac5a37261 | conary/build/capsulerecipe.py | conary/build/capsulerecipe.py | #
# Copyright (c) 2009 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
from conary.build import defaultrecipes
from conary.build.recipe import RECIPE_TYPE_CAPSULE
from conary.build.packagerecipe import BaseRequiresRecipe, AbstractPackageRecipe
class AbstractCapsuleRecipe(AbstractPackageRecipe):
internalAbstractBaseClass = 1
internalPolicyModules = ( 'packagepolicy', 'capsulepolicy' )
_recipeType = RECIPE_TYPE_CAPSULE
def __init__(self, *args, **kwargs):
klass = self._getParentClass('AbstractPackageRecipe')
klass.__init__(self, *args, **kwargs)
from conary.build import source
self._addSourceAction('source.addCapsule', source.addCapsule)
self._addSourceAction('source.addSource', source.addSource)
exec defaultrecipes.CapsuleRecipe
| #
# Copyright (c) 2009 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
import inspect
from conary.build import action, defaultrecipes
from conary.build.recipe import RECIPE_TYPE_CAPSULE
from conary.build.packagerecipe import BaseRequiresRecipe, AbstractPackageRecipe
class AbstractCapsuleRecipe(AbstractPackageRecipe):
internalAbstractBaseClass = 1
internalPolicyModules = ( 'packagepolicy', 'capsulepolicy' )
_recipeType = RECIPE_TYPE_CAPSULE
def __init__(self, *args, **kwargs):
klass = self._getParentClass('AbstractPackageRecipe')
klass.__init__(self, *args, **kwargs)
from conary.build import build
for name, item in build.__dict__.items():
if inspect.isclass(item) and issubclass(item, action.Action):
self._addBuildAction(name, item)
def loadSourceActions(self):
self._loadSourceActions(lambda item: item._packageAction is True)
exec defaultrecipes.CapsuleRecipe
| Enable building hybrid capsule/non-capsule packages (CNY-3271) | Enable building hybrid capsule/non-capsule packages (CNY-3271)
| Python | apache-2.0 | sassoftware/conary,sassoftware/conary,sassoftware/conary,sassoftware/conary,sassoftware/conary | ---
+++
@@ -11,7 +11,10 @@
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
-from conary.build import defaultrecipes
+
+import inspect
+
+from conary.build import action, defaultrecipes
from conary.build.recipe import RECIPE_TYPE_CAPSULE
from conary.build.packagerecipe import BaseRequiresRecipe, AbstractPackageRecipe
@@ -20,13 +23,19 @@
internalAbstractBaseClass = 1
internalPolicyModules = ( 'packagepolicy', 'capsulepolicy' )
_recipeType = RECIPE_TYPE_CAPSULE
+
def __init__(self, *args, **kwargs):
klass = self._getParentClass('AbstractPackageRecipe')
klass.__init__(self, *args, **kwargs)
- from conary.build import source
- self._addSourceAction('source.addCapsule', source.addCapsule)
- self._addSourceAction('source.addSource', source.addSource)
+ from conary.build import build
+ for name, item in build.__dict__.items():
+ if inspect.isclass(item) and issubclass(item, action.Action):
+ self._addBuildAction(name, item)
+
+ def loadSourceActions(self):
+ self._loadSourceActions(lambda item: item._packageAction is True)
+
exec defaultrecipes.CapsuleRecipe |
c0512873d1f558768c174c64faf419e03b63e24b | pijobs/flashjob.py | pijobs/flashjob.py | import scrollphat
from pijobs.scrollphatjob import ScrollphatJob
class FlashJob(ScrollphatJob):
def run(self):
scrollphat.clear()
for i in range(int(self.options['loop'])):
scrollphat.set_pixels(lambda x, y: True, True)
self.sleep_interval()
scrollphat.clear()
self.sleep_interval()
self.sleep()
| import scrollphat
from pijobs.scrollphatjob import ScrollphatJob
class FlashJob(ScrollphatJob):
def run(self):
scrollphat.clear()
for i in range(int(self.options['loop'])):
scrollphat.set_pixels(lambda x, y: True, True)
self.sleep_interval()
scrollphat.clear()
self.sleep_interval()
self.sleep()
def default_options(self):
return {
'loop': 5,
'brightness': 10,
'interval': 0.2,
}
| Add default options for FlashJob. | Add default options for FlashJob.
| Python | mit | ollej/piapi,ollej/piapi | ---
+++
@@ -11,3 +11,9 @@
self.sleep_interval()
self.sleep()
+ def default_options(self):
+ return {
+ 'loop': 5,
+ 'brightness': 10,
+ 'interval': 0.2,
+ } |
70efd2427caf52a5ba45e05eb33dc47fce2147c6 | redwind/tasks.py | redwind/tasks.py | from contextlib import contextmanager
from flask import current_app
from redis import StrictRedis
import rq
_queue = None
def get_queue():
global _queue
if _queue is None:
_queue = create_queue()
return _queue
def create_queue():
"""Connect to Redis and create the RQ. Since this is not imported
directly, it is a convenient place to mock for tests that don't
care about the queue.
"""
redis = StrictRedis(
host=current_app.config.get('REDIS_HOST','localhost'),
port=current_app.config.get('REDIS_PORT',6379),
password=current_app.config.get('REDIS_PASSWORD',None)
)
return rq.Queue('redwind:low', connection=redis)
@contextmanager
def async_app_context(app_config):
from redwind import create_app
app = create_app(app_config)
with app.app_context():
yield
| from contextlib import contextmanager
from redis import StrictRedis
import rq
_queue = None
def get_queue():
global _queue
if _queue is None:
_queue = create_queue()
return _queue
def create_queue():
"""Connect to Redis and create the RQ. Since this is not imported
directly, it is a convenient place to mock for tests that don't
care about the queue.
"""
redis = StrictRedis()
return rq.Queue('redwind:low', connection=redis)
@contextmanager
def async_app_context(app_config):
from redwind import create_app
app = create_app(app_config)
with app.app_context():
yield
| Revert "Added configuration options for Redis" | Revert "Added configuration options for Redis"
This reverts commit 80129aac1fb4471e7a519b8f86df2db0b978903a.
| Python | bsd-2-clause | Lancey6/redwind,Lancey6/redwind,Lancey6/redwind | ---
+++
@@ -1,5 +1,4 @@
from contextlib import contextmanager
-from flask import current_app
from redis import StrictRedis
import rq
@@ -19,11 +18,7 @@
directly, it is a convenient place to mock for tests that don't
care about the queue.
"""
- redis = StrictRedis(
- host=current_app.config.get('REDIS_HOST','localhost'),
- port=current_app.config.get('REDIS_PORT',6379),
- password=current_app.config.get('REDIS_PASSWORD',None)
- )
+ redis = StrictRedis()
return rq.Queue('redwind:low', connection=redis)
|
75d8c39fa65500d2bf618b10d8d74991b03a68f4 | lib/rpnpy/__init__.py | lib/rpnpy/__init__.py | import sys
import ctypes as _ct
if sys.version_info < (3,):
integer_types = (int, long,)
range = xrange
else:
integer_types = (int,)
long = int
# xrange = range
C_WCHAR2CHAR = lambda x: bytes(str(x).encode('ascii'))
C_WCHAR2CHAR.__doc__ = 'Convert str to bytes'
C_CHAR2WCHAR = lambda x: str(x.decode('ascii'))
C_CHAR2WCHAR.__doc__ = 'Convert bytes to str'
C_MKSTR = lambda x: _ct.create_string_buffer(C_WCHAR2CHAR(x))
C_MKSTR.__doc__ = 'alias to ctypes.create_string_buffer, make sure bytes are provided'
| import sys
import ctypes as _ct
if sys.version_info < (3,):
integer_types = (int, long,)
range = xrange
else:
integer_types = (int,)
long = int
range = range
C_WCHAR2CHAR = lambda x: bytes(str(x).encode('ascii'))
C_WCHAR2CHAR.__doc__ = 'Convert str to bytes'
C_CHAR2WCHAR = lambda x: str(x.decode('ascii'))
C_CHAR2WCHAR.__doc__ = 'Convert bytes to str'
C_MKSTR = lambda x: _ct.create_string_buffer(C_WCHAR2CHAR(x))
C_MKSTR.__doc__ = 'alias to ctypes.create_string_buffer, make sure bytes are provided'
| Add missing rpnpy.range reference for Python 3. | Add missing rpnpy.range reference for Python 3.
Signed-off-by: Stephane_Chamberland <1054841519c328088796c1f3c72c14f95c4efe35@science.gc.ca>
(cherry picked from commit 23860277c006d9635dedcaaa5e065c7aad199d8c)
(cherry picked from commit b613c799afbf95e15f99cee50c2f76516a264f32)
| Python | lgpl-2.1 | meteokid/python-rpn,meteokid/python-rpn,meteokid/python-rpn,meteokid/python-rpn | ---
+++
@@ -7,7 +7,7 @@
else:
integer_types = (int,)
long = int
- # xrange = range
+ range = range
C_WCHAR2CHAR = lambda x: bytes(str(x).encode('ascii'))
C_WCHAR2CHAR.__doc__ = 'Convert str to bytes' |
f68808dc85b2bb0ea8fb0d7de4669099740cdb61 | mesoblog/models.py | mesoblog/models.py | from django.db import models
# Represents a category which articles can be part of
class Category(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return self.name+" ["+str(self.id)+"]"
# Article model represents one article in the blog.
class Article(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(max_length=255)
contents = models.TextField()
date_published = models.DateTimeField()
published = models.BooleanField()
categories = models.ManyToManyField(Category)
def __str__(self):
return self.title+" ["+str(self.id)+"]"
| from django.db import models
# Represents a category which articles can be part of
class Category(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return self.name+" ["+str(self.id)+"]"
# Article model represents one article in the blog.
class Article(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(max_length=255)
contents = models.TextField()
date_published = models.DateTimeField()
published = models.BooleanField()
primary_category = models.ForeignKey(Category, related_name='+')
categories = models.ManyToManyField(Category)
def __str__(self):
return self.title+" ["+str(self.id)+"]"
| Add a primary category which will decide which category is shown as current in the chrome for this article. | Add a primary category which will decide which category is shown as current in the chrome for this article.
TODO: Enforce including the primary category as one of the categories for the article, both in UI and server side.
| Python | mit | grundleborg/mesosphere | ---
+++
@@ -15,6 +15,7 @@
contents = models.TextField()
date_published = models.DateTimeField()
published = models.BooleanField()
+ primary_category = models.ForeignKey(Category, related_name='+')
categories = models.ManyToManyField(Category)
def __str__(self): |
cfb0bda6096378de428a1460823626f3dc4c9059 | spyder_terminal/__init__.py | spyder_terminal/__init__.py | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 2, 1)
__version__ = '.'.join(map(str, VERSION_INFO))
| # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO))
| Set package version info to 0.3.0.dev0 | Set package version info to 0.3.0.dev0
| Python | mit | spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal | ---
+++
@@ -11,5 +11,5 @@
PLUGIN_CLASS
-VERSION_INFO = (0, 2, 1)
+VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO)) |
7a362c4a516a1bf6fa91b9d8c670a5d5613e5aeb | miniraf/combine.py | miniraf/combine.py | import argparse
import astropy.io.fits as fits
import numpy as np
import sys
METHOD_MAP = {"median": lambda x: np.median(x, axis=0),
"average": lambda x: np.average(x, axis=0),
"sum": lambda x: np.sum(x, axis=0)}
def stack_fits_data(filenames):
fits_files = []
for f in filenames:
fits_files.append(fits.open(f))
stack = np.array([f[0].data for f in fits_files])
for f in fits_files:
f.close()
return stack
def create_parser(subparsers):
parser_combine = subparsers.add_parser("combine", help="combine help")
parser_combine.add_argument("-m", "--method", choices=["median", "average", "sum"], required=True)
parser_combine.add_argument("-o", "--output", metavar="OUTFILE", default=sys.stdout.buffer)
parser_combine.add_argument("file", nargs="+")
parser_combine.set_defaults(func=main)
def main(args):
image_stack = stack_fits_data(args.file)
result = METHOD_MAP[args.method](image_stack)
hdu = fits.PrimaryHDU(result)
hdu.writeto(args.output)
| import argparse
import astropy.io.fits as fits
import numpy as np
import sys
METHOD_MAP = {"median": lambda x: np.median(x, axis=0, overwrite_input=True),
"average": lambda x: np.average(x, axis=0),
"sum": lambda x: np.sum(x, axis=0)}
def stack_fits_data(filenames):
fits_files = []
for f in filenames:
fits_files.append(fits.open(f))
stack = np.array([f[0].data for f in fits_files])
for f in fits_files:
f.close()
return stack
def create_parser(subparsers):
parser_combine = subparsers.add_parser("combine", help="combine help")
parser_combine.add_argument("-m", "--method", choices=["median", "average", "sum"], required=True)
parser_combine.add_argument("-o", "--output", metavar="OUTFILE", default=sys.stdout.buffer)
parser_combine.add_argument("file", nargs="+")
parser_combine.set_defaults(func=main)
def main(args):
image_stack = stack_fits_data(args.file)
result = METHOD_MAP[args.method](image_stack)
hdu = fits.PrimaryHDU(result)
hdu.writeto(args.output)
| Allow the median method to overwrite in-memory intermediate | Allow the median method to overwrite in-memory intermediate
Signed-off-by: Lizhou Sha <d6acb26e253550574bc1141efa0eb5e6de15daeb@mit.edu>
| Python | mit | vulpicastor/miniraf | ---
+++
@@ -3,7 +3,7 @@
import numpy as np
import sys
-METHOD_MAP = {"median": lambda x: np.median(x, axis=0),
+METHOD_MAP = {"median": lambda x: np.median(x, axis=0, overwrite_input=True),
"average": lambda x: np.average(x, axis=0),
"sum": lambda x: np.sum(x, axis=0)}
|
f7ade90b6f68a4a8e71a3720ef529c228f2a035a | slackelot/slackelot.py | slackelot/slackelot.py | import time
import requests
class SlackNotificationError(Exception):
pass
def send_slack_message(message, webhook_url, pretext=None, title=None):
""" Send slack message using webhooks
Args:
message (string)
webhook_url (string), 'https://hooks.slack.com/services/{team id}/{bot or channel id}/{auth token}'
pretext (string)
title (string)
"""
if 'https://hooks.slack.com/services/' not in webhook_url:
raise SlackNotificationError(
'webhook_url is not in the correct format. It should look like this:\n\
https://hooks.slack.com/services/{team id}/{bot or channel id}/{auth token}')
payload = {
'attachments': [
{
'pretext': pretext,
'title': title,
'text': message,
'mrkdwn_in': ['text', 'pretext']
}
],
'link_names': '1',
'as_user': True
}
for i in range(10):
response = requests.post(webhook_url, json=payload)
if response.status_code == 200:
return True
else:
time.sleep(3)
# If the notification doesn't go through after 10 attempts, raise an error.
raise SlackNotificationError('Slack notification failed after 10 attempts.')
| import time
import requests
class SlackNotificationError(Exception):
pass
def send_slack_message(message, webhook_url, pretext='', title='', author_name='', color=None):
"""Send slack message using webhooks
Args:
message (string)
webhook_url (string), 'https://hooks.slack.com/services/{team id}/{bot or channel id}/{auth token}'
pretext (string)
title (string)
"""
if 'https://hooks.slack.com/services/' not in webhook_url:
raise SlackNotificationError(
'webhook_url is not in the correct format. It should look like this:\n\
https://hooks.slack.com/services/{team id}/{bot or channel id}/{auth token}')
fallback ='\n'.join([title, author_name, message])
payload = {
'attachments': [
{
'fallback': fallback,
'color': color,
'pretext': pretext,
'author_name': author_name,
'title': title,
'text': message,
'mrkdwn_in': ['text', 'pretext']
}
],
'link_names': '1',
'as_user': True
}
for i in range(10):
response = requests.post(webhook_url, json=payload)
if response.status_code == 200:
return True
else:
time.sleep(3)
# If the notification doesn't go through after 10 attempts, raise an error.
raise SlackNotificationError('Slack notification failed after 10 attempts.')
| Add author_name, color, and fallback to payload | Add author_name, color, and fallback to payload
| Python | mit | Chris-Graffagnino/slackelot | ---
+++
@@ -6,8 +6,8 @@
pass
-def send_slack_message(message, webhook_url, pretext=None, title=None):
- """ Send slack message using webhooks
+def send_slack_message(message, webhook_url, pretext='', title='', author_name='', color=None):
+ """Send slack message using webhooks
Args:
message (string)
@@ -20,10 +20,15 @@
'webhook_url is not in the correct format. It should look like this:\n\
https://hooks.slack.com/services/{team id}/{bot or channel id}/{auth token}')
+ fallback ='\n'.join([title, author_name, message])
+
payload = {
'attachments': [
{
+ 'fallback': fallback,
+ 'color': color,
'pretext': pretext,
+ 'author_name': author_name,
'title': title,
'text': message,
'mrkdwn_in': ['text', 'pretext'] |
a71f79d3966c7b3f491c2dacce721cd974af52c4 | sale_properties_dynamic_fields/__openerp__.py | sale_properties_dynamic_fields/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Sale properties dynamic fields",
'version': '1.0',
'category': '',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale_properties_easy_creation',
],
"data": [
'mrp_property_group_view.xml',
],
"demo": [
],
"test": [
'test/properties.yml',
],
"active": False,
"installable": True
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Sale properties dynamic fields",
'version': '1.0',
'category': '',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale_properties_easy_creation',
],
"data": [
'mrp_property_group_view.xml',
],
"demo": [
],
"test": [
'test/properties.yml',
],
"installable": True
}
| Remove active key since is deprecated | Remove active key since is deprecated
| Python | agpl-3.0 | xpansa/sale-workflow,Antiun/sale-workflow,akretion/sale-workflow,acsone/sale-workflow,brain-tec/sale-workflow,brain-tec/sale-workflow,Eficent/sale-workflow,thomaspaulb/sale-workflow,acsone/sale-workflow,open-synergy/sale-workflow,jabibi/sale-workflow,Endika/sale-workflow,fevxie/sale-workflow,factorlibre/sale-workflow,BT-cserra/sale-workflow,diagramsoftware/sale-workflow,ddico/sale-workflow,akretion/sale-workflow | ---
+++
@@ -36,6 +36,5 @@
"test": [
'test/properties.yml',
],
- "active": False,
"installable": True
} |
3e927164b77a325a68da7c3672a83977a2c408af | source/testclusters.py | source/testclusters.py | # -*- coding: utf-8 -*-
import clusters
K = 10
blognames,labels,data = clusters.readfile("blogdata1.txt")
#coords = clusters.scaledown(data)
print "Generating clusters..."
kclust = clusters.kcluster(data, k=K)
print
print "Clusters:"
for i in range(K):
print "Cluster" + str(i)
print ", ".join([blognames[r] for r in kclust[i]])
print
#clusters.draw2d(coords,blognames,jpeg='mds2d.jpg') | # -*- coding: utf-8 -*-
import clusters
K = 8
blognames,words,data = clusters.readfile("blogdata1.txt")
coords = clusters.scaledown(data)
print "Generating clusters..."
kclust = clusters.kcluster(data, k=K)
print
print "Clusters:"
for i in range(K):
print "Cluster" + str(i)
print ", ".join([blognames[r] for r in kclust[i]])
print
clusters.draw2d(coords,blognames,jpeg='mds2d.jpg') | Test draw function. Set K to 8. | Test draw function. Set K to 8.
| Python | agpl-3.0 | JARR-aggregator/JARR,JARR/JARR,JARR-aggregator/JARR,jaesivsm/pyAggr3g470r,jaesivsm/pyAggr3g470r,JARR/JARR,JARR-aggregator/JARR,jaesivsm/JARR,JARR/JARR,cedricbonhomme/pyAggr3g470r,jaesivsm/JARR,jaesivsm/pyAggr3g470r,jaesivsm/JARR,cedricbonhomme/pyAggr3g470r,cedricbonhomme/pyAggr3g470r,JARR-aggregator/JARR,cedricbonhomme/pyAggr3g470r,jaesivsm/pyAggr3g470r,cedricbonhomme/pyAggr3g470r,JARR/JARR,jaesivsm/pyAggr3g470r | ---
+++
@@ -2,11 +2,11 @@
import clusters
-K = 10
+K = 8
-blognames,labels,data = clusters.readfile("blogdata1.txt")
+blognames,words,data = clusters.readfile("blogdata1.txt")
-#coords = clusters.scaledown(data)
+coords = clusters.scaledown(data)
print "Generating clusters..."
kclust = clusters.kcluster(data, k=K)
@@ -20,4 +20,4 @@
-#clusters.draw2d(coords,blognames,jpeg='mds2d.jpg')
+clusters.draw2d(coords,blognames,jpeg='mds2d.jpg') |
c82f025f6f49e58c2a07f29ae94026cf49991939 | modelview/urls.py | modelview/urls.py | from django.conf.urls import url
from modelview import views
from oeplatform import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^(?P<sheettype>[\w\d_]+)s/$', views.listsheets, {}, name='modellist'),
url(r'^overview/$', views.overview, {}),
url(r'^(?P<sheettype>[\w\d_]+)s/add/$', views.FSAdd.as_view(), {'method':'add'}, name='modellist'),
url(r'^(?P<sheettype>[\w\d_]+)s/download/$', views.model_to_csv, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\w\d_]+)/$', views.show, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\w\d_]+)/edit/$', views.editModel, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<pk>[\w\d_]+)/update/$', views.FSAdd.as_view(), {'method':'update'}, name='index'),
]
| from django.conf.urls import url
from modelview import views
from oeplatform import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^(?P<sheettype>[\w\d_]+)s/$', views.listsheets, {}, name='modellist'),
url(r'^overview/$', views.overview, {}),
url(r'^(?P<sheettype>[\w\d_]+)s/add/$', views.FSAdd.as_view(), {'method':'add'}, name='modellist'),
url(r'^(?P<sheettype>[\w\d_]+)s/download/$', views.model_to_csv, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\d]+)/$', views.show, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\d]+)/edit/$', views.editModel, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<pk>[\d]+)/update/$', views.FSAdd.as_view(), {'method':'update'}, name='index'),
]
| Simplify regex in url matching | Simplify regex in url matching
| Python | agpl-3.0 | openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform | ---
+++
@@ -9,8 +9,8 @@
url(r'^overview/$', views.overview, {}),
url(r'^(?P<sheettype>[\w\d_]+)s/add/$', views.FSAdd.as_view(), {'method':'add'}, name='modellist'),
url(r'^(?P<sheettype>[\w\d_]+)s/download/$', views.model_to_csv, {}, name='index'),
- url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\w\d_]+)/$', views.show, {}, name='index'),
- url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\w\d_]+)/edit/$', views.editModel, {}, name='index'),
- url(r'^(?P<sheettype>[\w\d_]+)s/(?P<pk>[\w\d_]+)/update/$', views.FSAdd.as_view(), {'method':'update'}, name='index'),
+ url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\d]+)/$', views.show, {}, name='index'),
+ url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\d]+)/edit/$', views.editModel, {}, name='index'),
+ url(r'^(?P<sheettype>[\w\d_]+)s/(?P<pk>[\d]+)/update/$', views.FSAdd.as_view(), {'method':'update'}, name='index'),
] |
15fd86ff33d4f585578ef4e67614e249b1e94d45 | jjvm.py | jjvm.py | #!/usr/bin/python
import argparse
import os
import struct
import sys
###############
### CLASSES ###
###############
class MyParser(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
###################
### SUBROUTINES ###
###################
def lenCpStruct(tag):
if tag == 0xa:
return 3
else:
return -1
############
### MAIN ###
############
parser = MyParser('Run bytecode in jjvm')
parser.add_argument('path', help='path to class')
args = parser.parse_args()
with open(args.path, "rb") as c:
c.seek(8)
cpCount = struct.unpack(">H", c.read(2))[0] - 1
print "Constant pool count: %d" % cpCount;
while cpCount >= 0:
cpTag = ord(c.read(1))
print "Got tag: %d" % cpTag
cpStructSize = lenCpStruct(cpTag)
if cpStructSize < 0:
print "ERROR: cpStructSize %d for tag %d" % (cpStructSize, cpTag)
sys.exit(1)
print "Size: %d" % cpStructSize
cpCount -= 1
c.seek(cpStructSize, os.SEEK_CUR)
| #!/usr/bin/python
import argparse
import os
import struct
import sys
CP_STRUCT_SIZES = { 0xa:3 }
###############
### CLASSES ###
###############
class MyParser(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
###################
### SUBROUTINES ###
###################
def lenCpStruct(tag):
if tag in CP_STRUCT_SIZES:
return CP_STRUCT_SIZES[tag]
else:
return -1
############
### MAIN ###
############
parser = MyParser('Run bytecode in jjvm')
parser.add_argument('path', help='path to class')
args = parser.parse_args()
with open(args.path, "rb") as c:
c.seek(8)
cpCount = struct.unpack(">H", c.read(2))[0] - 1
print "Constant pool count: %d" % cpCount;
while cpCount >= 0:
cpTag = ord(c.read(1))
print "Got tag: %d" % cpTag
cpStructSize = lenCpStruct(cpTag)
if cpStructSize < 0:
print "ERROR: cpStructSize %d for tag %d" % (cpStructSize, cpTag)
sys.exit(1)
print "Size: %d" % cpStructSize
cpCount -= 1
c.seek(cpStructSize, os.SEEK_CUR)
| Use constants dictionary in lenCpStruct() | Use constants dictionary in lenCpStruct()
| Python | apache-2.0 | justinccdev/jjvm | ---
+++
@@ -4,6 +4,8 @@
import os
import struct
import sys
+
+CP_STRUCT_SIZES = { 0xa:3 }
###############
### CLASSES ###
@@ -18,8 +20,8 @@
### SUBROUTINES ###
###################
def lenCpStruct(tag):
- if tag == 0xa:
- return 3
+ if tag in CP_STRUCT_SIZES:
+ return CP_STRUCT_SIZES[tag]
else:
return -1
|
9da75318a5d8cf646442912f46e89ec2e6b13fb3 | odie.py | odie.py | #! /usr/bin/env python3
import config # pylint: disable=unused-import
from functools import partial
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
app = Flask("odie", template_folder='admin/templates', static_folder='admin/static')
app.config.from_object('config.FlaskConfig')
if app.config['DEBUG']:
# allow requests from default broccoli server port
from flask.ext.cors import CORS
CORS(app, origins=['http://localhost:4200'], supports_credentials=True)
sqla = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
def __unauthorized():
raise ClientError("unauthorized", status=401)
login_manager.unauthorized_handler(__unauthorized)
# sqlalchemy treats columns as nullable by default, which we don't want.
Column = partial(sqla.Column, nullable=False)
# errors that will be reported to the client
class ClientError(Exception):
def __init__(self, *errors, status=400):
super().__init__()
self.errors = errors
self.status = status
| #! /usr/bin/env python3
from functools import partial
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
app = Flask("odie", template_folder='admin/templates', static_folder='admin/static')
import config # pylint: disable=unused-import
app.config.from_object('config.FlaskConfig')
if app.config['DEBUG']:
# allow requests from default broccoli server port
from flask.ext.cors import CORS
CORS(app, origins=['http://localhost:4200'], supports_credentials=True)
sqla = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.setup_app(app)
def __unauthorized():
raise ClientError("unauthorized", status=401)
login_manager.unauthorized_handler(__unauthorized)
# sqlalchemy treats columns as nullable by default, which we don't want.
Column = partial(sqla.Column, nullable=False)
# errors that will be reported to the client
class ClientError(Exception):
def __init__(self, *errors, status=400):
super().__init__()
self.errors = errors
self.status = status
| Allow config access to app object | Allow config access to app object
| Python | mit | fsmi/odie-server,fjalir/odie-server,fjalir/odie-server,Kha/odie-server,fsmi/odie-server,Kha/odie-server,fsmi/odie-server,Kha/odie-server,fjalir/odie-server | ---
+++
@@ -1,6 +1,4 @@
#! /usr/bin/env python3
-
-import config # pylint: disable=unused-import
from functools import partial
@@ -10,6 +8,7 @@
app = Flask("odie", template_folder='admin/templates', static_folder='admin/static')
+import config # pylint: disable=unused-import
app.config.from_object('config.FlaskConfig')
if app.config['DEBUG']: |
58d5f96464a4706fd88869f7a09c1dffcd9ac684 | mywebsite/urls.py | mywebsite/urls.py | from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
# Examples:
# url(r'^$', 'mywebsite.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^blog/', include('myblogapp.urls')),
url(r'^$', include('myblogapp.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from mywebsite import views
urlpatterns = [
# Examples:
# url(r'^$', 'mywebsite.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^blog/', include('myblogapp.urls', namespace='blog')),
url(r'^about/', views.AboutView.as_view(), name='about'),
url(r'^$', include('myblogapp.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| Add namespace for blog and add about url | Add namespace for blog and add about url
| Python | mit | TomGijselinck/mywebsite,TomGijselinck/mywebsite | ---
+++
@@ -2,6 +2,7 @@
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
+from mywebsite import views
urlpatterns = [
# Examples:
@@ -9,10 +10,10 @@
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
- url(r'^blog/', include('myblogapp.urls')),
- url(r'^$', include('myblogapp.urls')),
+ url(r'^blog/', include('myblogapp.urls', namespace='blog')),
+ url(r'^about/', views.AboutView.as_view(), name='about'),
+ url(r'^$', include('myblogapp.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
- |
7cf58ed386028a616c2083364d3f5c92e0c0ade3 | examples/hello_world/hello_world.py | examples/hello_world/hello_world.py | #!/usr/bin/env python
# encoding: utf-8
"""
A Simple Example Flask Application
==================================
"""
# Third Party Libs
from flask import Flask
from flask_hal import HAL, document
app = Flask(__name__)
HAL(app) # Initialise HAL
@app.route('/foo')
def foo():
d = document.Document(data={
'foo': 'bar'
})
return d.to_json()
if __name__ == "__main__":
app.run()
| #!/usr/bin/env python
# encoding: utf-8
"""
A Simple Example Flask Application
==================================
"""
# Third Party Libs
from flask import Flask
from flask_hal import HAL, document
app = Flask(__name__)
HAL(app) # Initialise HAL
@app.route('/hello')
def foo():
return document.Document(data={
'message': 'Hello World'
})
if __name__ == "__main__":
app.run()
| Update to hello world example | Update to hello world example
| Python | unlicense | thisissoon/Flask-HAL,thisissoon/Flask-HAL | ---
+++
@@ -15,13 +15,12 @@
HAL(app) # Initialise HAL
-@app.route('/foo')
+@app.route('/hello')
def foo():
- d = document.Document(data={
- 'foo': 'bar'
+ return document.Document(data={
+ 'message': 'Hello World'
})
- return d.to_json()
if __name__ == "__main__":
app.run() |
774ece47574466f6661de469ef0f43ecf97d66f0 | molly/utils/misc.py | molly/utils/misc.py | import urllib2, sys, os.path, imp
class AnyMethodRequest(urllib2.Request):
def __init__(self, url, data=None, headers={}, origin_req_host=None, unverifiable=None, method=None):
self.method = method and method.upper() or None
urllib2.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
def get_method(self):
if not self.method is None:
return self.method
elif self.has_data():
return "POST"
else:
return "GET"
def get_norm_sys_path():
"""
Returns a normalised path that can be used for PYTHONPATH to recreate the
path used for this invocation.
"""
sys_path = sys.path[:]
# Find the path to the first package containing the settings module.
# Once we have it, normalise it and add it to our sys_path if it isn't
# already there.
project_path = imp.find_module(os.environ['DJANGO_SETTINGS_MODULE'].split('.')[0])[1]
sys_path.insert(0, os.path.join(project_path, '..'))
sys_path = [os.path.normpath(p) for p in sys_path if p != '']
# Remove duplicates. This is O(n^2), but efficiency isn't too much of an
# issue when n is small.
sys_path = [p for i,p in enumerate(sys_path) if p not in sys_path[:i]]
return sys_path | import urllib2, sys, os.path, imp
class AnyMethodRequest(urllib2.Request):
def __init__(self, url, data=None, headers={}, origin_req_host=None, unverifiable=None, method=None):
self.method = method and method.upper() or None
urllib2.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
def get_method(self):
if not self.method is None:
return self.method
elif self.has_data():
return "POST"
else:
return "GET"
def get_norm_sys_path():
"""
Returns a normalised path that can be used for PYTHONPATH to recreate the
path used for this invocation.
"""
sys_path = sys.path[:]
# Find the path to the first package containing the settings module.
# Once we have it, normalise it and add it to our sys_path if it isn't
# already there.
try:
project_path = imp.find_module(os.environ['DJANGO_SETTINGS_MODULE'].split('.')[0])[1]
except ImportError:
project_path = imp.find_module('settings')[1]
sys_path.insert(0, os.path.join(project_path, '..'))
sys_path = [os.path.normpath(p) for p in sys_path if p != '']
# Remove duplicates. This is O(n^2), but efficiency isn't too much of an
# issue when n is small.
sys_path = [p for i,p in enumerate(sys_path) if p not in sys_path[:i]]
return sys_path | Make create_crontab fall back to default settings search before explicitly searching for DJANGO_SETTINGS_MODULE | Make create_crontab fall back to default settings search before explicitly searching for DJANGO_SETTINGS_MODULE
| Python | apache-2.0 | mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject | ---
+++
@@ -24,7 +24,10 @@
# Find the path to the first package containing the settings module.
# Once we have it, normalise it and add it to our sys_path if it isn't
# already there.
- project_path = imp.find_module(os.environ['DJANGO_SETTINGS_MODULE'].split('.')[0])[1]
+ try:
+ project_path = imp.find_module(os.environ['DJANGO_SETTINGS_MODULE'].split('.')[0])[1]
+ except ImportError:
+ project_path = imp.find_module('settings')[1]
sys_path.insert(0, os.path.join(project_path, '..'))
sys_path = [os.path.normpath(p) for p in sys_path if p != ''] |
c1510244999e1e88dd66f62857d855e466cec570 | deprecated/__init__.py | deprecated/__init__.py | # -*- coding: utf-8 -*-
import functools
import inspect
import warnings
string_types = (type(b''), type(u''))
def deprecated(reason):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used.
"""
if isinstance(reason, string_types):
# The @deprecated is used with a 'reason'.
#
# .. code-block:: python
#
# @deprecated("please, use another function")
# def old_function(x, y):
# pass
def decorator(func1):
if inspect.isclass(func1):
fmt1 = "Call to deprecated class {name} ({reason})."
else:
fmt1 = "Call to deprecated function {name} ({reason})."
@functools.wraps(func1)
def new_func1(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
fmt1.format(name=func1.__name__, reason=reason),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func1(*args, **kwargs)
return new_func1
return decorator
elif inspect.isclass(reason) or inspect.isfunction(reason):
# The @deprecated is used without any 'reason'.
#
# .. code-block:: python
#
# @deprecated
# def old_function(x, y):
# pass
func2 = reason
if inspect.isclass(func2):
fmt2 = "Call to deprecated class {name}."
else:
fmt2 = "Call to deprecated function {name}."
@functools.wraps(func2)
def new_func2(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
fmt2.format(name=func2.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func2(*args, **kwargs)
return new_func2
else:
raise TypeError(repr(type(reason)))
| # -*- coding: utf-8 -*-
import functools
import warnings
def deprecated(func):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used.
"""
@functools.wraps(func)
def new_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
"Call to deprecated function {0}.".format(func.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func(*args, **kwargs)
return new_func
| Replace invalid string format '{}' by '{0}' for Python 2.6 compatibility. | Replace invalid string format '{}' by '{0}' for Python 2.6 compatibility.
| Python | mit | vrcmarcos/python-deprecated | ---
+++
@@ -1,80 +1,25 @@
# -*- coding: utf-8 -*-
import functools
-import inspect
import warnings
-string_types = (type(b''), type(u''))
-
-def deprecated(reason):
+def deprecated(func):
"""
This is a decorator which can be used to mark functions
- as deprecated. It will result in a warning being emitted
+ as deprecated. It will result in a warning being emmitted
when the function is used.
"""
- if isinstance(reason, string_types):
+ @functools.wraps(func)
+ def new_func(*args, **kwargs):
+ warnings.simplefilter('always', DeprecationWarning)
+ warnings.warn(
+ "Call to deprecated function {0}.".format(func.__name__),
+ category=DeprecationWarning,
+ stacklevel=2
+ )
+ warnings.simplefilter('default', DeprecationWarning)
+ return func(*args, **kwargs)
- # The @deprecated is used with a 'reason'.
- #
- # .. code-block:: python
- #
- # @deprecated("please, use another function")
- # def old_function(x, y):
- # pass
-
- def decorator(func1):
-
- if inspect.isclass(func1):
- fmt1 = "Call to deprecated class {name} ({reason})."
- else:
- fmt1 = "Call to deprecated function {name} ({reason})."
-
- @functools.wraps(func1)
- def new_func1(*args, **kwargs):
- warnings.simplefilter('always', DeprecationWarning)
- warnings.warn(
- fmt1.format(name=func1.__name__, reason=reason),
- category=DeprecationWarning,
- stacklevel=2
- )
- warnings.simplefilter('default', DeprecationWarning)
- return func1(*args, **kwargs)
-
- return new_func1
-
- return decorator
-
- elif inspect.isclass(reason) or inspect.isfunction(reason):
-
- # The @deprecated is used without any 'reason'.
- #
- # .. code-block:: python
- #
- # @deprecated
- # def old_function(x, y):
- # pass
-
- func2 = reason
-
- if inspect.isclass(func2):
- fmt2 = "Call to deprecated class {name}."
- else:
- fmt2 = "Call to deprecated function {name}."
-
- @functools.wraps(func2)
- def new_func2(*args, **kwargs):
- warnings.simplefilter('always', DeprecationWarning)
- warnings.warn(
- fmt2.format(name=func2.__name__),
- category=DeprecationWarning,
- stacklevel=2
- )
- warnings.simplefilter('default', DeprecationWarning)
- return func2(*args, **kwargs)
-
- return new_func2
-
- else:
- raise TypeError(repr(type(reason)))
+ return new_func |
d4d04b72727b5e3886255a866f984b75fe610406 | organizations/admin.py | organizations/admin.py | from django.contrib import admin
from organizations.models import (Organization, OrganizationUser,
OrganizationOwner)
class OwnerInline(admin.StackedInline):
model = OrganizationOwner
class OrganizationAdmin(admin.ModelAdmin):
inlines = [OwnerInline]
list_display = ['name', 'is_active']
prepopulated_fields = {"slug": ("name",)}
class OrganizationUserAdmin(admin.ModelAdmin):
list_display = ['user', 'organization', 'is_admin']
class OrganizationOwnerAdmin(admin.ModelAdmin):
pass
admin.site.register(Organization, OrganizationAdmin)
admin.site.register(OrganizationUser, OrganizationUserAdmin)
admin.site.register(OrganizationOwner, OrganizationOwnerAdmin)
| from django.contrib import admin
from organizations.models import (Organization, OrganizationUser,
OrganizationOwner)
class OwnerInline(admin.StackedInline):
model = OrganizationOwner
raw_id_fields = ('organization_user',)
class OrganizationAdmin(admin.ModelAdmin):
inlines = [OwnerInline]
list_display = ['name', 'is_active']
prepopulated_fields = {"slug": ("name",)}
class OrganizationUserAdmin(admin.ModelAdmin):
list_display = ['user', 'organization', 'is_admin']
raw_id_fields = ('user', 'organization')
class OrganizationOwnerAdmin(admin.ModelAdmin):
raw_id_fields = ('organization_user', 'organization')
admin.site.register(Organization, OrganizationAdmin)
admin.site.register(OrganizationUser, OrganizationUserAdmin)
admin.site.register(OrganizationOwner, OrganizationOwnerAdmin)
| Use raw ID fields for users, organizations | Use raw ID fields for users, organizations
Avoids slowing down admin sites
| Python | bsd-2-clause | bennylope/django-organizations,st8st8/django-organizations,DESHRAJ/django-organizations,GauthamGoli/django-organizations,bennylope/django-organizations,GauthamGoli/django-organizations,aptivate/django-organizations,DESHRAJ/django-organizations,aptivate/django-organizations,st8st8/django-organizations,arteria/django-ar-organizations,arteria/django-ar-organizations,aptivate/django-organizations | ---
+++
@@ -6,6 +6,7 @@
class OwnerInline(admin.StackedInline):
model = OrganizationOwner
+ raw_id_fields = ('organization_user',)
class OrganizationAdmin(admin.ModelAdmin):
@@ -16,10 +17,11 @@
class OrganizationUserAdmin(admin.ModelAdmin):
list_display = ['user', 'organization', 'is_admin']
+ raw_id_fields = ('user', 'organization')
class OrganizationOwnerAdmin(admin.ModelAdmin):
- pass
+ raw_id_fields = ('organization_user', 'organization')
admin.site.register(Organization, OrganizationAdmin) |
fb865b2ca9905c70d79e15bf41c29899c533eea6 | MyCapytain/__init__.py | MyCapytain/__init__.py | # -*- coding: utf-8 -*-
"""
.. module:: MyCapytain
:platform: Unix, Windows
:synopsis: Canonical PrototypeText Service and Canonical URN tool suite for Python
.. moduleauthor:: Thibault Clérice <leponteineptique@gmail.com>
"""
__version__ = "2.0.0b16"
| # -*- coding: utf-8 -*-
"""
.. module:: MyCapytain
:platform: Unix, Windows
:synopsis: Canonical PrototypeText Service and Canonical URN tool suite for Python
.. moduleauthor:: Thibault Clérice <leponteineptique@gmail.com>
"""
__version__ = "2.0.0b17"
| Upgrade and remove some bugs | Upgrade and remove some bugs
| Python | mpl-2.0 | Capitains/MyCapytain,Capitains/MyCapytain | ---
+++
@@ -9,4 +9,4 @@
"""
-__version__ = "2.0.0b16"
+__version__ = "2.0.0b17" |
7b78ce63a34b3f043fd79eb8f3f1d2049103e46f | flocker/control/__init__.py | flocker/control/__init__.py | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Control service for managing a Flocker cluster.
A Flocker cluster is composed of a number of nodes. The control service is
in charge of managing the desired configuration, and exposes a public API
for modifying and retrieving the desired configuration. The nodes are
modified by convergence agents that retrieve the desired configuration
from the control service and make necessary changes to the nodes so that
they match that configuration.
"""
| # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Control service for managing a Flocker cluster.
A Flocker cluster is composed of a number of nodes. The control service is
in charge of managing the desired configuration and exposes a public API
for modifying and retrieving the desired configuration. The nodes are
modified by convergence agents that retrieve the desired configuration
from the control service and make necessary changes to the nodes so that
they match that configuration.
"""
| Address review comment: comma comma comma comma chameleon, it comes and goes etc. | Address review comment: comma comma comma comma chameleon, it comes and goes etc.
| Python | apache-2.0 | w4ngyi/flocker,adamtheturtle/flocker,mbrukman/flocker,LaynePeng/flocker,jml/flocker,runcom/flocker,agonzalezro/flocker,wallnerryan/flocker-profiles,AndyHuu/flocker,moypray/flocker,mbrukman/flocker,Azulinho/flocker,hackday-profilers/flocker,w4ngyi/flocker,1d4Nf6/flocker,agonzalezro/flocker,runcom/flocker,adamtheturtle/flocker,agonzalezro/flocker,adamtheturtle/flocker,moypray/flocker,hackday-profilers/flocker,AndyHuu/flocker,Azulinho/flocker,1d4Nf6/flocker,wallnerryan/flocker-profiles,lukemarsden/flocker,lukemarsden/flocker,wallnerryan/flocker-profiles,hackday-profilers/flocker,1d4Nf6/flocker,w4ngyi/flocker,LaynePeng/flocker,achanda/flocker,Azulinho/flocker,jml/flocker,LaynePeng/flocker,moypray/flocker,jml/flocker,mbrukman/flocker,AndyHuu/flocker,lukemarsden/flocker,achanda/flocker,runcom/flocker,achanda/flocker | ---
+++
@@ -4,7 +4,7 @@
Control service for managing a Flocker cluster.
A Flocker cluster is composed of a number of nodes. The control service is
-in charge of managing the desired configuration, and exposes a public API
+in charge of managing the desired configuration and exposes a public API
for modifying and retrieving the desired configuration. The nodes are
modified by convergence agents that retrieve the desired configuration
from the control service and make necessary changes to the nodes so that |
ff476b33c26a9067e6ac64b2c161d29b0febea33 | py/capnptools/examples/tests/test_books.py | py/capnptools/examples/tests/test_books.py | import unittest
from examples import books
class BooksTest(unittest.TestCase):
def test_builder(self):
book = books.MallocMessageBuilder().init_root(books.Book)
book.title = 'Moby-Dick; or, The Whale'
book.authors = ['Herman Melville']
self.assertEqual(
{
'title': 'Moby-Dick; or, The Whale',
'authors': ['Herman Melville'],
},
book._as_dict(),
)
book = book._as_reader()
self.assertEqual('Moby-Dick; or, The Whale', book.title)
self.assertEqual(['Herman Melville'], book.authors._as_dict())
self.assertEqual(
{
'title': 'Moby-Dick; or, The Whale',
'authors': ['Herman Melville'],
},
book._as_dict(),
)
if __name__ == '__main__':
unittest.main()
| import unittest
import os
import tempfile
from examples import books
class BooksTest(unittest.TestCase):
BOOK = {
'title': 'Moby-Dick; or, The Whale',
'authors': ['Herman Melville'],
}
def test_builder(self):
book = books.MallocMessageBuilder().init_root(books.Book)
book.title = self.BOOK['title']
book.authors = self.BOOK['authors']
self.assertEqual(self.BOOK, book._as_dict())
book = book._as_reader()
self.assertEqual(self.BOOK['title'], book.title)
self.assertEqual(self.BOOK['authors'], book.authors._as_dict())
self.assertEqual(self.BOOK, book._as_dict())
def test_write(self):
builder = books.MallocMessageBuilder()
book = builder.init_root(books.Book)
book.title = self.BOOK['title']
book.authors = self.BOOK['authors']
for read_cls, write_func in [
('StreamFdMessageReader', 'write_to'),
('PackedFdMessageReader', 'write_packed_to')]:
with self.subTest(read_cls=read_cls, write_func=write_func):
fd, path = tempfile.mkstemp()
try:
getattr(builder, write_func)(fd)
os.close(fd)
fd = os.open(path, os.O_RDONLY)
reader = getattr(books, read_cls)(fd)
book = reader.get_root(books.Book)
self.assertEqual(self.BOOK, book._as_dict())
finally:
os.unlink(path)
os.close(fd)
if __name__ == '__main__':
unittest.main()
| Add unit tests for write_to and write_packed_to | Add unit tests for write_to and write_packed_to
| Python | mit | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage | ---
+++
@@ -1,32 +1,55 @@
import unittest
+
+import os
+import tempfile
from examples import books
class BooksTest(unittest.TestCase):
+ BOOK = {
+ 'title': 'Moby-Dick; or, The Whale',
+ 'authors': ['Herman Melville'],
+ }
+
def test_builder(self):
+
book = books.MallocMessageBuilder().init_root(books.Book)
- book.title = 'Moby-Dick; or, The Whale'
- book.authors = ['Herman Melville']
- self.assertEqual(
- {
- 'title': 'Moby-Dick; or, The Whale',
- 'authors': ['Herman Melville'],
- },
- book._as_dict(),
- )
+ book.title = self.BOOK['title']
+ book.authors = self.BOOK['authors']
+ self.assertEqual(self.BOOK, book._as_dict())
book = book._as_reader()
- self.assertEqual('Moby-Dick; or, The Whale', book.title)
- self.assertEqual(['Herman Melville'], book.authors._as_dict())
- self.assertEqual(
- {
- 'title': 'Moby-Dick; or, The Whale',
- 'authors': ['Herman Melville'],
- },
- book._as_dict(),
- )
+ self.assertEqual(self.BOOK['title'], book.title)
+ self.assertEqual(self.BOOK['authors'], book.authors._as_dict())
+ self.assertEqual(self.BOOK, book._as_dict())
+
+ def test_write(self):
+
+ builder = books.MallocMessageBuilder()
+ book = builder.init_root(books.Book)
+ book.title = self.BOOK['title']
+ book.authors = self.BOOK['authors']
+
+ for read_cls, write_func in [
+ ('StreamFdMessageReader', 'write_to'),
+ ('PackedFdMessageReader', 'write_packed_to')]:
+
+ with self.subTest(read_cls=read_cls, write_func=write_func):
+ fd, path = tempfile.mkstemp()
+ try:
+ getattr(builder, write_func)(fd)
+ os.close(fd)
+
+ fd = os.open(path, os.O_RDONLY)
+ reader = getattr(books, read_cls)(fd)
+ book = reader.get_root(books.Book)
+ self.assertEqual(self.BOOK, book._as_dict())
+
+ finally:
+ os.unlink(path)
+ os.close(fd)
if __name__ == '__main__': |
a5b40d9781caf74179f8c7e1f6fe4de5299b1e59 | src/osmviz/__init__.py | src/osmviz/__init__.py | # osmviz module #
# Copyright (c) 2010 Colin Bick, Robert Damphousse
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
if sys.version_info < (3,):
raise ImportError(
"""You are running OSMViz 3.0 on Python 2.
OSMViz 3.0 and above are no longer compatible with Python 2, and you still
ended up with this version installed. That's unfortunate; sorry about that.
It should not have happened. Make sure you have pip >= 9.0 to avoid this kind
of issue, as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Your choices:
- Upgrade to Python 3.
- Install an older version of OSMViz:
$ pip install 'osmviz<3.0'
It would be great if you can figure out how this version ended up being
installed, and try to check how to prevent it for future users.
See the following for more up-to-date information:
https://github.com/hugovk/osmviz/issues/18
"""
)
| # osmviz module #
# Copyright (c) 2010 Colin Bick, Robert Damphousse
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
| Remove Python 2 ImportError, let python_requires handle it | Remove Python 2 ImportError, let python_requires handle it
| Python | mit | hugovk/osmviz,hugovk/osmviz | ---
+++
@@ -18,32 +18,3 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
-import sys
-
-if sys.version_info < (3,):
- raise ImportError(
- """You are running OSMViz 3.0 on Python 2.
-
-OSMViz 3.0 and above are no longer compatible with Python 2, and you still
-ended up with this version installed. That's unfortunate; sorry about that.
-It should not have happened. Make sure you have pip >= 9.0 to avoid this kind
-of issue, as well as setuptools >= 24.2:
-
- $ pip install pip setuptools --upgrade
-
-Your choices:
-
-- Upgrade to Python 3.
-
-- Install an older version of OSMViz:
-
- $ pip install 'osmviz<3.0'
-
-It would be great if you can figure out how this version ended up being
-installed, and try to check how to prevent it for future users.
-
-See the following for more up-to-date information:
-
-https://github.com/hugovk/osmviz/issues/18
-"""
- ) |
787494af73a0b0e316547c3ec8536aa9ac21575e | clients.py | clients.py | #!/usr/bin/env python
from socket import *
HOST = 'localhost'
PORT = 21567
BUFSIZ = 1024
ADDR = (HOST, PORT)
while True:
tcpCliSock = socket(AF_INET, SOCK_STREAM)
tcpCliSock.connect(ADDR)
data = raw_input('> ')
if not data:
break
tcpCliSock.send('%s\r\n' % data)
data = tcpCliSock.recv(BUFSIZ)
if not data:
break
print data.strip()
tcpCliSock.close()
| #!/usr/bin/env python
import argparse
from socket import *
HOST = 'localhost'
PORT = 21567
BUFSIZ = 1024
parser = argparse.ArgumentParser(description='Allow the user to specify a hostname and a port.')
parser.add_argument('--hostname', default=HOST, help='Add hostname')
parser.add_argument('--port', default=PORT, help='Add port')
args = parser.parse_args()
ADDR = (args.hostname, int(args.port))
while True:
tcpCliSock = socket(AF_INET, SOCK_STREAM)
tcpCliSock.connect(ADDR)
data = raw_input('> ')
if not data:
break
tcpCliSock.send('%s\r\n' % data)
data = tcpCliSock.recv(BUFSIZ)
if not data:
break
print data.strip()
tcpCliSock.close()
| Allow the user to specify a hostname and port | Allow the user to specify a hostname and port
| Python | mit | ccandillo/chapter2 | ---
+++
@@ -1,11 +1,18 @@
#!/usr/bin/env python
+import argparse
from socket import *
HOST = 'localhost'
PORT = 21567
BUFSIZ = 1024
-ADDR = (HOST, PORT)
+
+parser = argparse.ArgumentParser(description='Allow the user to specify a hostname and a port.')
+parser.add_argument('--hostname', default=HOST, help='Add hostname')
+parser.add_argument('--port', default=PORT, help='Add port')
+args = parser.parse_args()
+
+ADDR = (args.hostname, int(args.port))
while True:
tcpCliSock = socket(AF_INET, SOCK_STREAM) |
540c0130ccca7d7d7cf51ddbe251652a5c46364e | registrations/admin.py | registrations/admin.py | from django.contrib import admin
from .models import Source, Registration
class RegistrationAdmin(admin.ModelAdmin):
list_display = [
"id", "stage", "validated", "mother_id", "source",
"created_at", "updated_at", "created_by", "updated_by"]
list_filter = ["source", "validated", "created_at"]
search_fields = ["mother_id", "to_addr"]
admin.site.register(Source)
admin.site.register(Registration, RegistrationAdmin)
| from django.contrib import admin
from .models import Source, Registration, SubscriptionRequest
class RegistrationAdmin(admin.ModelAdmin):
list_display = [
"id", "stage", "validated", "mother_id", "source",
"created_at", "updated_at", "created_by", "updated_by"]
list_filter = ["source", "validated", "created_at"]
search_fields = ["mother_id", "to_addr"]
class SubscriptionRequestAdmin(admin.ModelAdmin):
list_display = [
"id", "contact", "messageset", "next_sequence_number", "lang",
"schedule", "created_at", "updated_at"]
list_filter = ["messageset", "created_at"]
search_fields = ["contact"]
admin.site.register(Source)
admin.site.register(Registration, RegistrationAdmin)
admin.site.register(SubscriptionRequest, SubscriptionRequestAdmin)
| Add SubscriptionRequestAdmin - reviewed over shoulder by @gsvr | Add SubscriptionRequestAdmin - reviewed over shoulder by @gsvr
| Python | bsd-3-clause | praekelt/hellomama-registration,praekelt/hellomama-registration | ---
+++
@@ -1,5 +1,5 @@
from django.contrib import admin
-from .models import Source, Registration
+from .models import Source, Registration, SubscriptionRequest
class RegistrationAdmin(admin.ModelAdmin):
@@ -9,5 +9,15 @@
list_filter = ["source", "validated", "created_at"]
search_fields = ["mother_id", "to_addr"]
+
+class SubscriptionRequestAdmin(admin.ModelAdmin):
+ list_display = [
+ "id", "contact", "messageset", "next_sequence_number", "lang",
+ "schedule", "created_at", "updated_at"]
+ list_filter = ["messageset", "created_at"]
+ search_fields = ["contact"]
+
+
admin.site.register(Source)
admin.site.register(Registration, RegistrationAdmin)
+admin.site.register(SubscriptionRequest, SubscriptionRequestAdmin) |
20a8d57f5e3d00898c7362d650b37f7962fdfe7a | tests/test_distributions.py | tests/test_distributions.py | from __future__ import division
import sympy
from symfit import Variable, Parameter
from symfit.distributions import Gaussian, Exp
def test_gaussian():
"""
Make sure that symfit.distributions.Gaussians produces the expected
sympy expression.
"""
x0 = Parameter()
sig = Parameter(positive=True)
x = Variable()
new = sympy.exp(-(x - x0)**2/(2*sig**2))/sympy.sqrt((2*sympy.pi*sig**2))
assert isinstance(new, sympy.Expr)
g = Gaussian(x, x0, sig)
assert issubclass(g.__class__, sympy.Expr)
assert new == g
# A pdf should always integrate to 1 on its domain
assert sympy.integrate(g, (x, -sympy.oo, sympy.oo)) == 1
def test_exp():
"""
Make sure that symfit.distributions.Exp produces the expected
sympy expression.
"""
l = Parameter(positive=True)
x = Variable()
new = l * sympy.exp(- l * x)
assert isinstance(new, sympy.Expr)
e = Exp(x, l)
assert issubclass(e.__class__, sympy.Expr)
assert new == e
# A pdf should always integrate to 1 on its domain
assert sympy.integrate(e, (x, 0, sympy.oo)) == 1
| from __future__ import division
import sympy
from symfit import Variable, Parameter
from symfit.distributions import Gaussian, Exp
def test_gaussian():
"""
Make sure that symfit.distributions.Gaussians produces the expected
sympy expression.
"""
x0 = Parameter('x0')
sig = Parameter('sig', positive=True)
x = Variable('x')
new = sympy.exp(-(x - x0)**2/(2*sig**2))/sympy.sqrt((2*sympy.pi*sig**2))
assert isinstance(new, sympy.Expr)
g = Gaussian(x, x0, sig)
assert issubclass(g.__class__, sympy.Expr)
assert new == g
# A pdf should always integrate to 1 on its domain
assert sympy.integrate(g, (x, -sympy.oo, sympy.oo)) == 1
def test_exp():
"""
Make sure that symfit.distributions.Exp produces the expected
sympy expression.
"""
l = Parameter('l', positive=True)
x = Variable('x')
new = l * sympy.exp(- l * x)
assert isinstance(new, sympy.Expr)
e = Exp(x, l)
assert issubclass(e.__class__, sympy.Expr)
assert new == e
# A pdf should always integrate to 1 on its domain
assert sympy.integrate(e, (x, 0, sympy.oo)) == 1
| Add names to Parameters/Variables to surpress DepricationWarnings | Add names to Parameters/Variables to surpress DepricationWarnings
| Python | mit | tBuLi/symfit | ---
+++
@@ -11,9 +11,9 @@
Make sure that symfit.distributions.Gaussians produces the expected
sympy expression.
"""
- x0 = Parameter()
- sig = Parameter(positive=True)
- x = Variable()
+ x0 = Parameter('x0')
+ sig = Parameter('sig', positive=True)
+ x = Variable('x')
new = sympy.exp(-(x - x0)**2/(2*sig**2))/sympy.sqrt((2*sympy.pi*sig**2))
assert isinstance(new, sympy.Expr)
@@ -30,8 +30,8 @@
Make sure that symfit.distributions.Exp produces the expected
sympy expression.
"""
- l = Parameter(positive=True)
- x = Variable()
+ l = Parameter('l', positive=True)
+ x = Variable('x')
new = l * sympy.exp(- l * x)
assert isinstance(new, sympy.Expr) |
22dc2bd827143c4c73b7b122d94a64611960e9a1 | wiki/models.py | wiki/models.py | from couchdb.schema import *
from couchdb.schema import View
from wiki.auth.models import User
class Page(Document):
created_date = DateTimeField()
last_edited_date = DateTimeField()
title = TextField()
contents = TextField()
auth_user_editable = BooleanField()
user = User()
get_pages = View('pages',
'function (doc) { emit(doc.title, doc);}',
name='all')
| from couchdb.schema import *
from couchdb.schema import View
class Page(Document):
created_date = DateTimeField()
last_edited_date = DateTimeField()
title = TextField()
contents = TextField()
auth_user_editable = BooleanField()
user = DictField()
get_pages = View('pages',
'function (doc) { emit(doc.title, doc);}',
name='all')
| Fix an unreferenced module bug | Fix an unreferenced module bug
| Python | mit | theju/django-couch-wiki | ---
+++
@@ -1,6 +1,5 @@
from couchdb.schema import *
from couchdb.schema import View
-from wiki.auth.models import User
class Page(Document):
created_date = DateTimeField()
@@ -8,7 +7,7 @@
title = TextField()
contents = TextField()
auth_user_editable = BooleanField()
- user = User()
+ user = DictField()
get_pages = View('pages',
'function (doc) { emit(doc.title, doc);}', |
ff6ef6752b4aa0f561f2776d400e3c313963815c | polling/models.py | polling/models.py | from __future__ import unicode_literals
from django.db import models
# Create your models here.
| from __future__ import unicode_literals
from django.db import models
# Create your models here.
PARTY_DEMOCRATIC = 'democratic'
PARTY_GREEN = 'green'
PARTY_LIBERTARIAN = 'libertarian'
PARTY_REPUBLICAN = 'republican'
PARTIES = (
(PARTY_DEMOCRATIC, PARTY_DEMOCRATIC.title()),
(PARTY_GREEN, PARTY_GREEN.title()),
(PARTY_LIBERTARIAN, PARTY_LIBERTARIAN.title()),
(PARTY_REPUBLICAN, PARTY_REPUBLICAN.title()),
)
CANDIDATE_CLINTON = 'clinton'
CANDIDATE_JOHNSON = 'johnson'
CANDIDATE_STEIN = 'stein'
CANDIDATE_TRUMP = 'trump'
CANDIDATE_NONE = 'none'
CANDIDATES = (
(CANDIDATE_CLINTON, CANDIDATE_CLINTON.title()),
(CANDIDATE_JOHNSON, CANDIDATE_JOHNSON.title()),
(CANDIDATE_STEIN, CANDIDATE_STEIN.title()),
(CANDIDATE_TRUMP, CANDIDATE_TRUMP.title()),
(CANDIDATE_NONE, "No One"),
)
CANDIDATE_TO_PARTY = {
CANDIDATE_CLINTON: PARTY_DEMOCRATIC,
CANDIDATE_JOHNSON: PARTY_LIBERTARIAN,
CANDIDATE_STEIN: PARTY_GREEN,
CANDIDATE_TRUMP: PARTY_REPUBLICAN,
}
class State(models.Model):
"""All states (and districts) that can vote in federal elections."""
name = models.CharField()
updated = models.DateField()
abbv = models.CharField()
tipping_point_rank = models.IntegerField()
safe_for = models.CharField(choices=CANDIDATES, default=CANDIDATE_NONE)
safe_rank = models.IntegerField(default=-1)
leans = models.CharField(choices=CANDIDATES, default=CANDIDATE_NONE)
lean_rank = models.IntegerField(default=-1)
class Meta:
unique_together = ('name', 'updated')
| Add a State model and some enums | Add a State model and some enums
| Python | mit | sbuss/voteswap,sbuss/voteswap,sbuss/voteswap,sbuss/voteswap | ---
+++
@@ -3,3 +3,52 @@
from django.db import models
# Create your models here.
+
+
+PARTY_DEMOCRATIC = 'democratic'
+PARTY_GREEN = 'green'
+PARTY_LIBERTARIAN = 'libertarian'
+PARTY_REPUBLICAN = 'republican'
+
+PARTIES = (
+ (PARTY_DEMOCRATIC, PARTY_DEMOCRATIC.title()),
+ (PARTY_GREEN, PARTY_GREEN.title()),
+ (PARTY_LIBERTARIAN, PARTY_LIBERTARIAN.title()),
+ (PARTY_REPUBLICAN, PARTY_REPUBLICAN.title()),
+)
+
+
+CANDIDATE_CLINTON = 'clinton'
+CANDIDATE_JOHNSON = 'johnson'
+CANDIDATE_STEIN = 'stein'
+CANDIDATE_TRUMP = 'trump'
+CANDIDATE_NONE = 'none'
+CANDIDATES = (
+ (CANDIDATE_CLINTON, CANDIDATE_CLINTON.title()),
+ (CANDIDATE_JOHNSON, CANDIDATE_JOHNSON.title()),
+ (CANDIDATE_STEIN, CANDIDATE_STEIN.title()),
+ (CANDIDATE_TRUMP, CANDIDATE_TRUMP.title()),
+ (CANDIDATE_NONE, "No One"),
+)
+
+CANDIDATE_TO_PARTY = {
+ CANDIDATE_CLINTON: PARTY_DEMOCRATIC,
+ CANDIDATE_JOHNSON: PARTY_LIBERTARIAN,
+ CANDIDATE_STEIN: PARTY_GREEN,
+ CANDIDATE_TRUMP: PARTY_REPUBLICAN,
+}
+
+
+class State(models.Model):
+ """All states (and districts) that can vote in federal elections."""
+ name = models.CharField()
+ updated = models.DateField()
+ abbv = models.CharField()
+ tipping_point_rank = models.IntegerField()
+ safe_for = models.CharField(choices=CANDIDATES, default=CANDIDATE_NONE)
+ safe_rank = models.IntegerField(default=-1)
+ leans = models.CharField(choices=CANDIDATES, default=CANDIDATE_NONE)
+ lean_rank = models.IntegerField(default=-1)
+
+ class Meta:
+ unique_together = ('name', 'updated') |
2293cc41ef1fada91c1d71c2f75ebf3e5609bd9e | admin/metrics/views.py | admin/metrics/views.py | from django.views.generic import TemplateView
from django.contrib.auth.mixins import PermissionRequiredMixin
from admin.base.settings import KEEN_CREDENTIALS
class MetricsView(PermissionRequiredMixin, TemplateView):
template_name = 'metrics/osf_metrics.html'
permission_required = 'common_auth.view_metrics'
raise_exception = True
def get_context_data(self, **kwargs):
kwargs.update(KEEN_CREDENTIALS.copy())
return super(MetricsView, self).get_context_data(**kwargs)
| from django.views.generic import TemplateView
from django.contrib.auth.mixins import PermissionRequiredMixin
from admin.base.settings import KEEN_CREDENTIALS
class MetricsView(PermissionRequiredMixin, TemplateView):
template_name = 'metrics/osf_metrics.html'
permission_required = 'osf.view_metrics'
raise_exception = True
def get_context_data(self, **kwargs):
kwargs.update(KEEN_CREDENTIALS.copy())
return super(MetricsView, self).get_context_data(**kwargs)
| Update permission required to match new permission name | Update permission required to match new permission name
| Python | apache-2.0 | HalcyonChimera/osf.io,caseyrollins/osf.io,chennan47/osf.io,hmoco/osf.io,crcresearch/osf.io,felliott/osf.io,saradbowman/osf.io,chennan47/osf.io,caseyrollins/osf.io,cslzchen/osf.io,icereval/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,adlius/osf.io,sloria/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,cslzchen/osf.io,TomBaxter/osf.io,felliott/osf.io,Nesiehr/osf.io,chrisseto/osf.io,cwisecarver/osf.io,binoculars/osf.io,hmoco/osf.io,cwisecarver/osf.io,chennan47/osf.io,leb2dg/osf.io,adlius/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,laurenrevere/osf.io,erinspace/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,chrisseto/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,mattclark/osf.io,felliott/osf.io,aaxelb/osf.io,caneruguz/osf.io,icereval/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,binoculars/osf.io,adlius/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,pattisdr/osf.io,aaxelb/osf.io,erinspace/osf.io,sloria/osf.io,crcresearch/osf.io,TomBaxter/osf.io,binoculars/osf.io,baylee-d/osf.io,mattclark/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,felliott/osf.io,hmoco/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,cslzchen/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,laurenrevere/osf.io,chrisseto/osf.io,sloria/osf.io,mfraezz/osf.io,cwisecarver/osf.io,aaxelb/osf.io,pattisdr/osf.io,caneruguz/osf.io,pattisdr/osf.io,baylee-d/osf.io,saradbowman/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,chrisseto/osf.io,hmoco/osf.io,adlius/osf.io,leb2dg/osf.io,leb2dg/osf.io | ---
+++
@@ -6,7 +6,7 @@
class MetricsView(PermissionRequiredMixin, TemplateView):
template_name = 'metrics/osf_metrics.html'
- permission_required = 'common_auth.view_metrics'
+ permission_required = 'osf.view_metrics'
raise_exception = True
def get_context_data(self, **kwargs): |
e9a8d67115295e852e376d17bd159f5b9789bb4d | videos/module/chapters/models.py | videos/module/chapters/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
class Chapter(models.Model):
"""
Video section.
"""
video = models.ForeignKey('videos.Video')
title = models.CharField(max_length=255)
timecode = models.TimeField(help_text='hh:mm:ss')
preview = models.ImageField(upload_to='videos/chapter/', null=True, blank=True, help_text=_('Preview image for this chapter.'))
class Meta:
app_label = 'videos'
ordering = ('timecode',)
def __unicode__(self):
return self.title
@property
def seconds(self):
return self.timecode.hour*3600+self.timecode.minute*60+self.timecode.second
| import datetime
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class Chapter(models.Model):
"""Video section"""
video = models.ForeignKey('videos.Video')
title = models.CharField(max_length=255)
timecode = models.TimeField(help_text='hh:mm:ss')
preview = models.ImageField(upload_to='videos/chapter/', null=True, blank=True, help_text=_('Preview image for this chapter.'))
class Meta:
app_label = 'videos'
ordering = ('timecode',)
def __str__(self):
return self.title
@property
def seconds(self):
timecode = self.timecode
return datetime.timedelta(
hours=timecode.hour,
minutes=timecode.minute,
seconds=timecode.second,
).total_seconds()
| Refactor for python 3 and shinyness | Refactor for python 3 and shinyness
| Python | bsd-2-clause | incuna/incuna-videos,incuna/incuna-videos | ---
+++
@@ -1,10 +1,13 @@
+import datetime
+
from django.db import models
+from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
+
+@python_2_unicode_compatible
class Chapter(models.Model):
- """
- Video section.
- """
+ """Video section"""
video = models.ForeignKey('videos.Video')
title = models.CharField(max_length=255)
timecode = models.TimeField(help_text='hh:mm:ss')
@@ -14,10 +17,14 @@
app_label = 'videos'
ordering = ('timecode',)
- def __unicode__(self):
+ def __str__(self):
return self.title
@property
def seconds(self):
- return self.timecode.hour*3600+self.timecode.minute*60+self.timecode.second
-
+ timecode = self.timecode
+ return datetime.timedelta(
+ hours=timecode.hour,
+ minutes=timecode.minute,
+ seconds=timecode.second,
+ ).total_seconds() |
e00ad93c1a769a920c7f61eeccec582272766b26 | badgify/templatetags/badgify_tags.py | badgify/templatetags/badgify_tags.py | # -*- coding: utf-8 -*-
from django import template
from ..models import Badge, Award
from ..compat import get_user_model
register = template.Library()
@register.assignment_tag
def badgify_badges(**kwargs):
"""
Returns all badges or only awarded badges for the given user.
"""
User = get_user_model()
user = kwargs.get('user', None)
username = kwargs.get('username', None)
if username:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
pass
if user:
awards = Award.objects.filter(user=user)
badges = [award.badge for award in awards]
return badges
return Badge.objects.all()
| # -*- coding: utf-8 -*-
from django import template
from ..models import Badge, Award
from ..compat import get_user_model
register = template.Library()
@register.assignment_tag
def badgify_badges(**kwargs):
"""
Returns all badges or only awarded badges for the given user.
"""
User = get_user_model()
user = kwargs.get('user', None)
username = kwargs.get('username', None)
if username:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
pass
if user:
awards = Award.objects.filter(user=user).select_related('badge')
badges = [award.badge for award in awards]
return badges
return Badge.objects.all()
| Add missing select_related to badgify_badges | Add missing select_related to badgify_badges
| Python | mit | ulule/django-badgify,ulule/django-badgify | ---
+++
@@ -21,7 +21,7 @@
except User.DoesNotExist:
pass
if user:
- awards = Award.objects.filter(user=user)
+ awards = Award.objects.filter(user=user).select_related('badge')
badges = [award.badge for award in awards]
return badges
return Badge.objects.all() |
ef8c04be4e2a312b977cbc3d84a45c20c7895147 | tests/window/window_util.py | tests/window/window_util.py | #!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width + 1, window.height - 1)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
| #!/usr/bin/python
# $Id:$
from pyglet.gl import *
def draw_client_border(window):
glClearColor(0, 0, 0, 1)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def rect(x1, y1, x2, y2):
glBegin(GL_LINE_LOOP)
glVertex2f(x1, y1)
glVertex2f(x2, y1)
glVertex2f(x2, y2)
glVertex2f(x1, y2)
glEnd()
glColor3f(1, 0, 0)
rect(-1, -1, window.width, window.height)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1)
| Fix window test border _again_ (more fixed). | Fix window test border _again_ (more fixed).
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%401383
| Python | bsd-3-clause | infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore | ---
+++
@@ -22,7 +22,7 @@
glEnd()
glColor3f(1, 0, 0)
- rect(-1, -1, window.width + 1, window.height - 1)
+ rect(-1, -1, window.width, window.height)
glColor3f(0, 1, 0)
rect(0, 0, window.width - 1, window.height - 1) |
0c57cbcf31d0f2e413d68fb822a8e43d16670dc1 | apnsclient/__init__.py | apnsclient/__init__.py | # Copyright 2013 Getlogic BV, Sardar Yumatov
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__title__ = 'APNS client'
__version__ = "0.1.1"
__author__ = "Sardar Yumatov"
__contact__ = "ja.doma@gmail.com"
__license__ = "Apache 2.0"
__homepage__ = "https://bitbucket.org/sardarnl/apns-client/"
__copyright__ = 'Copyright 2013 Getlogic BV, Sardar Yumatov'
from apnsclient.apns import *
| # Copyright 2013 Getlogic BV, Sardar Yumatov
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__title__ = 'APNS client'
__version__ = "0.1.5"
__author__ = "Sardar Yumatov"
__contact__ = "ja.doma@gmail.com"
__license__ = "Apache 2.0"
__homepage__ = "https://bitbucket.org/sardarnl/apns-client/"
__copyright__ = 'Copyright 2013 Getlogic BV, Sardar Yumatov'
from apnsclient.apns import *
| Adjust the module __version__ to match the version advertised in PyPI. | Adjust the module __version__ to match the version advertised in PyPI.
| Python | apache-2.0 | marcinkaszynski/apnsclient | ---
+++
@@ -14,7 +14,7 @@
__title__ = 'APNS client'
-__version__ = "0.1.1"
+__version__ = "0.1.5"
__author__ = "Sardar Yumatov"
__contact__ = "ja.doma@gmail.com"
__license__ = "Apache 2.0" |
c21eaccbee53f2b915fc35b85bf665e84b81dc8c | app/celery/__init__.py | app/celery/__init__.py | from celery import Celery
class NewAcropolisCelery(Celery):
def init_app(self, app):
super(NewAcropolisCelery, self).__init__(
app.import_name,
broker=app.config['CELERY_BROKER_URL'],
)
app.logger.info('Setting up celery: %s', app.config['CELERY_BROKER_URL'])
self.conf.update(app.config)
class ContextTask(self.Task):
def __call__(self, *args, **kwargs): # noqa
with app.app_context():
return self.run(*args, **kwargs)
self.Task = ContextTask
| from celery import Celery
class NewAcropolisCelery(Celery):
def init_app(self, app):
if not app.config['CELERY_BROKER_URL']:
app.logger.info('Celery broker URL not set')
return
super(NewAcropolisCelery, self).__init__(
app.import_name,
broker=app.config['CELERY_BROKER_URL'],
)
app.logger.info('Setting up celery: %s', app.config['CELERY_BROKER_URL'])
self.conf.update(app.config)
class ContextTask(self.Task):
def __call__(self, *args, **kwargs): # noqa
with app.app_context():
return self.run(*args, **kwargs)
self.Task = ContextTask
| Add logging for missing CELERY_BROKER_URL | Add logging for missing CELERY_BROKER_URL
| Python | mit | NewAcropolis/api,NewAcropolis/api,NewAcropolis/api | ---
+++
@@ -3,6 +3,10 @@
class NewAcropolisCelery(Celery):
def init_app(self, app):
+ if not app.config['CELERY_BROKER_URL']:
+ app.logger.info('Celery broker URL not set')
+ return
+
super(NewAcropolisCelery, self).__init__(
app.import_name,
broker=app.config['CELERY_BROKER_URL'], |
df03a2d9543b392fb9ea9c027b93f4ed736e6788 | pyked/_version.py | pyked/_version.py | __version_info__ = (0, 1, 1)
__version__ = '.'.join(map(str, __version_info__))
| __version_info__ = (0, 1, 1, 'a1')
__version__ = '.'.join(map(str, __version_info__[:3]))
if len(__version_info__) == 4:
__version__ += __version_info__[-1]
| Allow alpha versions in the versioning string | Allow alpha versions in the versioning string
| Python | bsd-3-clause | bryanwweber/PyKED,pr-omethe-us/PyKED | ---
+++
@@ -1,2 +1,4 @@
-__version_info__ = (0, 1, 1)
-__version__ = '.'.join(map(str, __version_info__))
+__version_info__ = (0, 1, 1, 'a1')
+__version__ = '.'.join(map(str, __version_info__[:3]))
+if len(__version_info__) == 4:
+ __version__ += __version_info__[-1] |
27d8c3e0944056d23f1d25c651f7988f6cbf8353 | froide/helper/tasks.py | froide/helper/tasks.py | from django.conf import settings
from django.utils import translation
from celery.task import task
from haystack import site
@task
def delayed_update(instance_pk, model):
""" Only index stuff that is known to be public """
translation.activate(settings.LANGUAGE_CODE)
try:
instance = model.published.get(pk=instance_pk)
except (model.DoesNotExist, AttributeError):
return
site.update_object(instance)
@task
def delayed_remove(instance_pk, model):
translation.activate(settings.LANGUAGE_CODE)
try:
instance = model.published.get(pk=instance_pk)
except (model.DoesNotExist, AttributeError):
return
site.remove_object(instance)
| from django.conf import settings
from django.utils import translation
from celery.task import task
from haystack import site
@task
def delayed_update(instance_pk, model):
""" Only index stuff that is known to be public """
translation.activate(settings.LANGUAGE_CODE)
try:
instance = model.published.get(pk=instance_pk)
except (model.DoesNotExist, AttributeError):
return
site.update_object(instance)
@task
def delayed_remove(instance_pk, model):
translation.activate(settings.LANGUAGE_CODE)
# Fake an instance (real one is already gone from the DB)
fake_instance = model()
fake_instance.pk = instance_pk
site.remove_object(fake_instance)
| Rewrite delayed_remove task: instance is gone, so cannot retrieve. Fake instance with proper pk and let haystack remove it from index. | Rewrite delayed_remove task: instance is gone, so cannot retrieve. Fake instance with proper pk and let haystack remove it from index.
| Python | mit | CodeforHawaii/froide,CodeforHawaii/froide,LilithWittmann/froide,catcosmo/froide,ryankanno/froide,stefanw/froide,okfse/froide,catcosmo/froide,okfse/froide,ryankanno/froide,stefanw/froide,ryankanno/froide,CodeforHawaii/froide,CodeforHawaii/froide,LilithWittmann/froide,fin/froide,catcosmo/froide,catcosmo/froide,LilithWittmann/froide,okfse/froide,ryankanno/froide,fin/froide,okfse/froide,LilithWittmann/froide,stefanw/froide,stefanw/froide,okfse/froide,catcosmo/froide,LilithWittmann/froide,CodeforHawaii/froide,fin/froide,fin/froide,ryankanno/froide,stefanw/froide | ---
+++
@@ -17,8 +17,7 @@
@task
def delayed_remove(instance_pk, model):
translation.activate(settings.LANGUAGE_CODE)
- try:
- instance = model.published.get(pk=instance_pk)
- except (model.DoesNotExist, AttributeError):
- return
- site.remove_object(instance)
+ # Fake an instance (real one is already gone from the DB)
+ fake_instance = model()
+ fake_instance.pk = instance_pk
+ site.remove_object(fake_instance) |
ca97a29dded7278b40785fe88b5e8c9ceb542d86 | urllib3/util/wait.py | urllib3/util/wait.py | from .selectors import (
HAS_SELECT,
DefaultSelector,
EVENT_READ,
EVENT_WRITE
)
def _wait_for_io_events(socks, events, timeout=None):
""" Waits for IO events to be available from a list of sockets
or optionally a single socket if passed in. Returns a list of
sockets that can be interacted with immediately. """
if not HAS_SELECT:
raise ValueError('Platform does not have a selector')
if not isinstance(socks, list):
# Probably just a single socket.
if hasattr(socks, "fileno"):
socks = [socks]
# Otherwise it might be a non-list iterable.
else:
socks = list(socks)
selector = DefaultSelector()
for sock in socks:
selector.register(sock, events)
return [key[0].fileobj for key in
selector.select(timeout) if key[1] & events]
def wait_for_read(socks, timeout=None):
""" Waits for reading to be available from a list of sockets
or optionally a single socket if passed in. Returns a list of
sockets that can be read from immediately. """
return _wait_for_io_events(socks, EVENT_READ, timeout)
def wait_for_write(socks, timeout=None):
""" Waits for writing to be available from a list of sockets
or optionally a single socket if passed in. Returns a list of
sockets that can be written to immediately. """
return _wait_for_io_events(socks, EVENT_WRITE, timeout)
| from .selectors import (
HAS_SELECT,
DefaultSelector,
EVENT_READ,
EVENT_WRITE
)
def _wait_for_io_events(socks, events, timeout=None):
""" Waits for IO events to be available from a list of sockets
or optionally a single socket if passed in. Returns a list of
sockets that can be interacted with immediately. """
if not HAS_SELECT:
raise ValueError('Platform does not have a selector')
if not isinstance(socks, list):
# Probably just a single socket.
if hasattr(socks, "fileno"):
socks = [socks]
# Otherwise it might be a non-list iterable.
else:
socks = list(socks)
with DefaultSelector() as selector:
for sock in socks:
selector.register(sock, events)
return [key[0].fileobj for key in
selector.select(timeout) if key[1] & events]
def wait_for_read(socks, timeout=None):
""" Waits for reading to be available from a list of sockets
or optionally a single socket if passed in. Returns a list of
sockets that can be read from immediately. """
return _wait_for_io_events(socks, EVENT_READ, timeout)
def wait_for_write(socks, timeout=None):
""" Waits for writing to be available from a list of sockets
or optionally a single socket if passed in. Returns a list of
sockets that can be written to immediately. """
return _wait_for_io_events(socks, EVENT_WRITE, timeout)
| Use DefaultSelector as context manager. | Use DefaultSelector as context manager. | Python | mit | sigmavirus24/urllib3,Lukasa/urllib3,Disassem/urllib3,Lukasa/urllib3,Disassem/urllib3,urllib3/urllib3,urllib3/urllib3,sigmavirus24/urllib3 | ---
+++
@@ -19,11 +19,11 @@
# Otherwise it might be a non-list iterable.
else:
socks = list(socks)
- selector = DefaultSelector()
- for sock in socks:
- selector.register(sock, events)
- return [key[0].fileobj for key in
- selector.select(timeout) if key[1] & events]
+ with DefaultSelector() as selector:
+ for sock in socks:
+ selector.register(sock, events)
+ return [key[0].fileobj for key in
+ selector.select(timeout) if key[1] & events]
def wait_for_read(socks, timeout=None): |
785717703baf0c8fd9234058a6c9845a6838a8bf | salt/modules/key.py | salt/modules/key.py | # -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__['hash_type']
)
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
return salt.utils.pem_finger(
os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__['hash_type']
)
| # -*- coding: utf-8 -*-
'''
Functions to view the minion's public key information
'''
from __future__ import absolute_import
# Import python libs
import os
# Import Salt libs
import salt.utils
def finger():
'''
Return the minion's public key fingerprint
CLI Example:
.. code-block:: bash
salt '*' key.finger
'''
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
def finger_master():
'''
Return the fingerprint of the master's public key on the minion.
CLI Example:
.. code-block:: bash
salt '*' key.finger_master
'''
return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
sum_type=__opts__.get('hash_type', 'md5'))
| Use configurable hash_type for general Key fingerprinting | Use configurable hash_type for general Key fingerprinting
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | ---
+++
@@ -21,10 +21,8 @@
salt '*' key.finger
'''
- return salt.utils.pem_finger(
- os.path.join(__opts__['pki_dir'], 'minion.pub'),
- sum_type=__opts__['hash_type']
- )
+ return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion.pub'),
+ sum_type=__opts__.get('hash_type', 'md5'))
def finger_master():
@@ -37,7 +35,5 @@
salt '*' key.finger_master
'''
- return salt.utils.pem_finger(
- os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
- sum_type=__opts__['hash_type']
- )
+ return salt.utils.pem_finger(os.path.join(__opts__['pki_dir'], 'minion_master.pub'),
+ sum_type=__opts__.get('hash_type', 'md5')) |
3ef70e2e4e319414441b69b0f96ca9d52df9929e | runner/update_manifest.py | runner/update_manifest.py | import imp
import json
import os
import sys
here = os.path.dirname(__file__)
localpaths = imp.load_source("localpaths", os.path.abspath(os.path.join(here, os.pardir, "localpaths.py")))
root = localpaths.repo_root
import manifest
def main(request, response):
path = os.path.join(root, "MANIFEST.json")
manifest_file = manifest.manifest.load(path)
manifest.update.update(root, "/", manifest_file)
manifest.manifest.write(manifest_file, path)
return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"})
| import imp
import json
import os
import sys
here = os.path.dirname(__file__)
localpaths = imp.load_source("localpaths", os.path.abspath(os.path.join(here, os.pardir, "localpaths.py")))
root = localpaths.repo_root
import manifest
def main(request, response):
path = os.path.join(root, "MANIFEST.json")
manifest_file = manifest.manifest.load(root, path)
manifest.update.update(root, "/", manifest_file)
manifest.manifest.write(manifest_file, path)
return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"})
| Fix the manifest.load call in the test runner. | Fix the manifest.load call in the test runner.
| Python | bsd-3-clause | wpt-on-tv-tf/wpt-tools,wpt-on-tv-tf/wpt-tools,vivliostyle/wpt-tools,frewsxcv/wpt-tools,frewsxcv/wpt-tools,UprootStaging/wpt-tools,frewsxcv/wpt-tools,UprootStaging/wpt-tools,wpt-on-tv-tf/wpt-tools,kaixinjxq/wpt-tools,vivliostyle/wpt-tools,UprootStaging/wpt-tools,vivliostyle/wpt-tools,kaixinjxq/wpt-tools,kaixinjxq/wpt-tools | ---
+++
@@ -12,7 +12,7 @@
def main(request, response):
path = os.path.join(root, "MANIFEST.json")
- manifest_file = manifest.manifest.load(path)
+ manifest_file = manifest.manifest.load(root, path)
manifest.update.update(root, "/", manifest_file)
manifest.manifest.write(manifest_file, path)
|
373b0210483839b7ac5b4fd8eb0bcfdfe8d63d83 | begood_sites/fields.py | begood_sites/fields.py | from django.db import models
from django.contrib.sites.models import Site
class MultiSiteField(models.ManyToManyField):
def __init__(self, **kwargs):
defaults = {
'blank': False,
}
defaults.update(kwargs)
super(MultiSiteField, self).__init__(Site, **defaults)
class SingleSiteField(models.ForeignKey):
def __init__(self, **kwargs):
super(SingleSiteField, self).__init__(Site, **kwargs)
# Make sure South migrations work
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^begood_sites\.fields\.MultiSiteField"])
add_introspection_rules([], ["^begood_sites\.fields\.SingleSiteField"])
except:
pass
| from django.db import models
from django.contrib.sites.models import Site
class MultiSiteField(models.ManyToManyField):
def __init__(self, **kwargs):
defaults = {
'blank': False,
}
defaults.update(kwargs)
if 'to' in defaults:
del defaults['to']
super(MultiSiteField, self).__init__(Site, **defaults)
class SingleSiteField(models.ForeignKey):
def __init__(self, **kwargs):
if 'to' in kwargs:
del kwargs['to']
super(SingleSiteField, self).__init__(Site, **kwargs)
# Make sure South migrations work
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^begood_sites\.fields\.MultiSiteField"])
add_introspection_rules([], ["^begood_sites\.fields\.SingleSiteField"])
except:
pass
| Fix problem with South migrations. | Fix problem with South migrations.
| Python | mit | AGoodId/begood-sites | ---
+++
@@ -9,12 +9,16 @@
'blank': False,
}
defaults.update(kwargs)
+ if 'to' in defaults:
+ del defaults['to']
super(MultiSiteField, self).__init__(Site, **defaults)
class SingleSiteField(models.ForeignKey):
def __init__(self, **kwargs):
+ if 'to' in kwargs:
+ del kwargs['to']
super(SingleSiteField, self).__init__(Site, **kwargs)
|
a5deca67d654fa1113db3e224efefc85831146dc | mailviews/tests/__init__.py | mailviews/tests/__init__.py | import django
from django.conf import settings
if not settings.configured:
settings.configure(
DEBUG=True,
INSTALLED_APPS=(
'mailviews',
'mailviews.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
},
ROOT_URLCONF='mailviews.tests.urls',
STATIC_URL='/static/',
LOGGING={
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'': {
'handler': ['console'],
'level': 'DEBUG',
},
},
},
)
if hasattr(django, 'setup'):
django.setup()
from mailviews.tests.tests import * # NOQA
if __name__ == '__main__':
from mailviews.tests.__main__ import __main__
__main__()
| import django
from django.conf import settings
if not settings.configured:
settings.configure(
DEBUG=True,
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'mailviews',
'mailviews.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
},
ROOT_URLCONF='mailviews.tests.urls',
STATIC_URL='/static/',
LOGGING={
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'': {
'handler': ['console'],
'level': 'DEBUG',
},
},
},
)
if hasattr(django, 'setup'):
django.setup()
from mailviews.tests.tests import * # NOQA
if __name__ == '__main__':
from mailviews.tests.__main__ import __main__
__main__()
| Add auth and contentypes django apps on tests to prevent RuntimeError on Django 1.9 | Add auth and contentypes django apps on tests to prevent RuntimeError on Django 1.9
| Python | apache-2.0 | disqus/django-mailviews,joshowen/django-mailviews,disqus/django-mailviews,danxshap/django-mailviews,joshowen/django-mailviews,danxshap/django-mailviews | ---
+++
@@ -5,6 +5,8 @@
settings.configure(
DEBUG=True,
INSTALLED_APPS=(
+ 'django.contrib.auth',
+ 'django.contrib.contenttypes',
'mailviews',
'mailviews.tests',
), |
022fdeed936c0ad74a2510e568bdc385b4ebb13d | masters/master.chromium/master_android_cfg.py | masters/master.chromium/master_android_cfg.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import master_config
from master.factory import chromium_factory
defaults = {}
helper = master_config.Helper(defaults)
B = helper.Builder
F = helper.Factory
S = helper.Scheduler
T = helper.Triggerable
def linux_android(): return chromium_factory.ChromiumFactory(
'', 'linux2', nohooks_on_update=True, target_os='android')
defaults['category'] = '5android'
#
# Main release scheduler for src/
#
S('android', branch='src', treeStableTimer=60)
#
# Triggerable scheduler for the builder
#
T('android_trigger')
#
# Android Builder
#
B('Android Builder', 'dbg', 'android', 'android', notify_on_missing=True)
F('dbg', linux_android().ChromiumAnnotationFactory(
target='Debug',
annotation_script='src/build/android/buildbot/bb_main.sh',
factory_properties={'trigger': 'android_trigger'}))
def Update(config_arg, active_master, c):
return helper.Update(c)
| # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import master_config
from master.factory import chromium_factory
defaults = {}
helper = master_config.Helper(defaults)
B = helper.Builder
F = helper.Factory
S = helper.Scheduler
T = helper.Triggerable
def linux_android(): return chromium_factory.ChromiumFactory(
'', 'linux2', nohooks_on_update=True, target_os='android')
defaults['category'] = '5android'
#
# Main release scheduler for src/
#
S('android', branch='src', treeStableTimer=60)
#
# Triggerable scheduler for the builder
#
T('android_trigger')
#
# Android Builder
#
B('Android Builder', 'dbg', 'android', 'android', notify_on_missing=True)
F('dbg', linux_android().ChromiumAnnotationFactory(
target='Debug',
annotation_script='src/build/android/buildbot/bb_main_builder.sh',
factory_properties={'trigger': 'android_trigger'}))
def Update(config_arg, active_master, c):
return helper.Update(c)
| Fix typo in annotation script location | Fix typo in annotation script location
In 2a07fd1866 I erroneously pointed to the wrong location
for the annotation script for main waterfall. This fixes that.
TBR=nsylvain@chromium.org
BUG=
Review URL: https://chromiumcodereview.appspot.com/10917129
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@155426 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build | ---
+++
@@ -34,7 +34,7 @@
B('Android Builder', 'dbg', 'android', 'android', notify_on_missing=True)
F('dbg', linux_android().ChromiumAnnotationFactory(
target='Debug',
- annotation_script='src/build/android/buildbot/bb_main.sh',
+ annotation_script='src/build/android/buildbot/bb_main_builder.sh',
factory_properties={'trigger': 'android_trigger'}))
|
5dfd723b37e208c1b81e65cd2df1b7d9226493b3 | numpy/_array_api/_sorting_functions.py | numpy/_array_api/_sorting_functions.py | def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
| def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
return res
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
return res
| Add missing returns to the array API sorting functions | Add missing returns to the array API sorting functions
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | ---
+++
@@ -6,6 +6,7 @@
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
+ return res
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
@@ -15,3 +16,4 @@
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
+ return res |
b2470140f7fb33bee2af34cdc51695406a4073ec | utils/layers_test.py | utils/layers_test.py | # Lint as: python3
"""Tests for spectral."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import layers
class LayersTest(tf.test.TestCase):
def test_conv_transpose_shape(self):
inputs = np.random.normal(size=(10, 5, 2))
conv_transpose = layers.Conv1DTranspose(filters=2,
kernel_size=1,
strides=1)
outputs = conv_transpose(inputs)
self.assertShapeEqual(inputs, outputs)
if __name__ == '__main__':
tf.test.main()
| # Lint as: python3
"""Tests for spectral."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import layers
class LayersTest(tf.test.TestCase):
def test_conv_transpose_shape(self):
inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
conv_transpose = layers.Conv1DTranspose(
filters=2, kernel_size=1, strides=1
)
outputs = conv_transpose(inputs)
self.assertShapeEqual(inputs, outputs)
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = ''
tf.test.main()
| Resolve Minor Issues with Test Ensure that the tensorflow tests run on the CPU | Resolve Minor Issues with Test
Ensure that the tensorflow tests run on the CPU
| Python | apache-2.0 | googleinterns/audio_synthesis | ---
+++
@@ -7,20 +7,23 @@
import tensorflow as tf
import numpy as np
+import os
import layers
class LayersTest(tf.test.TestCase):
- def test_conv_transpose_shape(self):
- inputs = np.random.normal(size=(10, 5, 2))
- conv_transpose = layers.Conv1DTranspose(filters=2,
- kernel_size=1,
- strides=1)
- outputs = conv_transpose(inputs)
- self.assertShapeEqual(inputs, outputs)
+ def test_conv_transpose_shape(self):
+ inputs = np.random.normal(size=(10, 5, 2)).astype(np.float32)
+ conv_transpose = layers.Conv1DTranspose(
+ filters=2, kernel_size=1, strides=1
+ )
+
+ outputs = conv_transpose(inputs)
+ self.assertShapeEqual(inputs, outputs)
if __name__ == '__main__':
+ os.environ["CUDA_VISIBLE_DEVICES"] = ''
tf.test.main() |
5c4db9dc32eb4918866af5ae7037220b6f651a7d | fabfile.py | fabfile.py | from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
| from fabric.api import cd, sudo, env
import os
PROJECT = os.environ.get('PROJECT', 'go-rts-zambia')
DEPLOY_USER = os.environ.get('DEPLOY_USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=DEPLOY_USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=DEPLOY_USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=DEPLOY_USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=DEPLOY_USER)
| Fix USER in fabric file (the deploy user and ssh user aren't necessarily the same). Set default value for PROJECT (this is the go-rts-zambia repo after all). | Fix USER in fabric file (the deploy user and ssh user aren't necessarily the same). Set default value for PROJECT (this is the go-rts-zambia repo after all).
| Python | bsd-3-clause | praekelt/go-rts-zambia | ---
+++
@@ -1,17 +1,8 @@
from fabric.api import cd, sudo, env
import os
-expected_vars = [
- 'PROJECT',
-]
-
-for var in expected_vars:
- if var not in os.environ:
- raise Exception('Please specify %s environment variable' % (
- var,))
-
-PROJECT = os.environ['PROJECT']
-USER = os.environ.get('USER', 'jmbo')
+PROJECT = os.environ.get('PROJECT', 'go-rts-zambia')
+DEPLOY_USER = os.environ.get('DEPLOY_USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
@@ -23,14 +14,14 @@
def deploy():
with cd(env.path):
- sudo('git pull', user=USER)
+ sudo('git pull', user=DEPLOY_USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
- user=USER)
+ user=DEPLOY_USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
- user=USER)
+ user=DEPLOY_USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
- '--upgrade' if force else '',), user=USER)
+ '--upgrade' if force else '',), user=DEPLOY_USER) |
3b99493e606a04a6338d8ee2fc299595d19b2a44 | fabfile.py | fabfile.py | from fabric.api import local, cd
def docs():
local("./bin/docs")
local("./bin/python setup.py upload_sphinx --upload-dir=docs/html")
def release():
# update version id in setup.py, changelog and docs/source/conf.py
local("python setup.py sdist --formats=gztar,zip upload")
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from fabric.api import local, cd
def docs():
local("./bin/docs")
local("./bin/python setup.py upload_sphinx --upload-dir=docs/html")
def release():
"""Update version id in setup.py, changelog and docs/source/conf.py."""
local(("python setup.py bdist_egg sdist --formats=bztar,gztar,zip "
"upload --show-response"))
| Add BzTar and EGG format to Fabric script | Add BzTar and EGG format to Fabric script
| Python | bsd-3-clause | janusnic/importd,pombredanne/importd,arpitremarkable/importd,pombredanne/importd,akshar-raaj/importd,hitul007/importd,akshar-raaj/importd,hitul007/importd,janusnic/importd,arpitremarkable/importd | ---
+++
@@ -1,9 +1,16 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+
from fabric.api import local, cd
+
def docs():
local("./bin/docs")
local("./bin/python setup.py upload_sphinx --upload-dir=docs/html")
+
def release():
- # update version id in setup.py, changelog and docs/source/conf.py
- local("python setup.py sdist --formats=gztar,zip upload")
+ """Update version id in setup.py, changelog and docs/source/conf.py."""
+ local(("python setup.py bdist_egg sdist --formats=bztar,gztar,zip "
+ "upload --show-response")) |
19debebae3368be479b8368e96e17458e9a18d23 | setup.py | setup.py | import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='windpowerlib',
version='0.1.3',
description='Creating time series of wind power plants.',
url='http://github.com/wind-python/windpowerlib',
author='oemof developer group',
author_email='windpowerlib@rl-institut.de',
license=None,
packages=['windpowerlib'],
package_data={
'windpowerlib': [os.path.join('data', '*.csv')]},
long_description=read('README.rst'),
zip_safe=False,
install_requires=['pandas >= 0.19.1, < 0.26',
'requests < 3.0'],
extras_require={
'dev': ['pytest', 'jupyter', 'sphinx_rtd_theme', 'nbformat']})
| import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='windpowerlib',
version='0.1.3',
description='Creating time series of wind power plants.',
url='http://github.com/wind-python/windpowerlib',
author='oemof developer group',
author_email='windpowerlib@rl-institut.de',
license=None,
packages=['windpowerlib'],
package_data={
'windpowerlib': [os.path.join('data', '*.csv')]},
long_description=read('README.rst'),
long_description_content_type='text/rst',
zip_safe=False,
install_requires=['pandas >= 0.19.1, < 0.26',
'requests < 3.0'],
extras_require={
'dev': ['pytest', 'jupyter', 'sphinx_rtd_theme', 'nbformat']})
| Add content type of long description | Add content type of long description
| Python | mit | wind-python/windpowerlib | ---
+++
@@ -17,6 +17,7 @@
package_data={
'windpowerlib': [os.path.join('data', '*.csv')]},
long_description=read('README.rst'),
+ long_description_content_type='text/rst',
zip_safe=False,
install_requires=['pandas >= 0.19.1, < 0.26',
'requests < 3.0'], |
22c818fdb813fbd3b7ea48bb0fd3c7324ebdd0ba | setup.py | setup.py | from setuptools import setup
setup(
name='dblp',
version='0.1.0',
author='Sebastian Gehrmann',
author_email='gehrmann@seas.harvard.edu',
packages=['dblp'],
url='https://github.com/sebastianGehrmann/dblp-pub/tree/master',
license='LICENSE.txt',
description='Downloads and formats search results from dblp',
long_description=open('README.md').read(),
install_requires=[
"beautifulsoup4>=4.3.2",
"pandas>=0.16.2",
"requests>=2.7.0"
],
) | from setuptools import setup
setup(
name='dblp',
version='0.1.0',
author='Sebastian Gehrmann',
author_email='gehrmann@seas.harvard.edu',
packages=['dblp'],
url='https://github.com/sebastianGehrmann/dblp-pub/tree/master',
license='LICENSE.txt',
description='Downloads and formats search results from dblp',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
install_requires=[
"beautifulsoup4>=4.3.2",
"pandas>=0.16.2",
"requests>=2.7.0"
],
) | Use long_description_content_type': 'text/markdown' in README | Use long_description_content_type': 'text/markdown' in README
| Python | mit | sebastianGehrmann/dblp-pub | ---
+++
@@ -10,6 +10,7 @@
license='LICENSE.txt',
description='Downloads and formats search results from dblp',
long_description=open('README.md').read(),
+ long_description_content_type='text/markdown',
install_requires=[
"beautifulsoup4>=4.3.2",
"pandas>=0.16.2", |
116d3837aa817dee6e15bdc24f20eac1e56066dd | buildtools/__init__.py | buildtools/__init__.py | __all__ = ['Config', 'Chdir', 'cmd', 'log','Properties','replace_vars','cmd']
from buildtools.config import Config, Properties, replace_vars
from buildtools.os_utils import Chdir, cmd, ENV, BuildEnv
from buildtools.bt_logging import log
| __all__ = ['Config', 'Chdir', 'cmd', 'log','Properties','replace_vars','cmd','ENV','BuildEnv']
from buildtools.config import Config, Properties, replace_vars
from buildtools.os_utils import Chdir, cmd, ENV, BuildEnv
from buildtools.bt_logging import log
| Add BuildEnv and ENV to __all__ | Add BuildEnv and ENV to __all__
| Python | mit | N3X15/python-build-tools,N3X15/python-build-tools,N3X15/python-build-tools | ---
+++
@@ -1,4 +1,4 @@
-__all__ = ['Config', 'Chdir', 'cmd', 'log','Properties','replace_vars','cmd']
+__all__ = ['Config', 'Chdir', 'cmd', 'log','Properties','replace_vars','cmd','ENV','BuildEnv']
from buildtools.config import Config, Properties, replace_vars
from buildtools.os_utils import Chdir, cmd, ENV, BuildEnv |
8a9c6c9df8f29f5ff6f9bb1a6200536f08cf08a9 | getlost.py | getlost.py | from os import environ
from urllib2 import urlopen
from flask import Flask, json, make_response
app = Flask(__name__)
url = 'http://open.mapquestapi.com/directions/v2/route'
params = '?key={apikey}&ambiguities=ignore&routeType=pedestrian'
rel = url + params + '&from={flat},{flng}&to={tlat},{tlng}'
@app.route("/route/<from_lat>,<from_lng>/<to_lat>,<to_lng>")
def get_coords(from_lat, from_lng, to_lat, to_lng):
resp = urlopen(rel.format(apikey=environ['MAPQUEST_API_KEY'],
flat=from_lat, flng=from_lng, tlat=to_lat, tlng=to_lng))
resp_dict = json.loads(resp.read().decode("utf-8"))
res = [(man['startPoint']['lat'], man['startPoint']['lng'])
for leg in resp_dict['route']['legs']
for man in leg['maneuvers']]
return make_response(json.dumps(res))
if __name__ == "__main__":
app.run(debug=environ.get('FLASK_DEBUG', False))
| from os import environ
from urllib2 import urlopen
from flask import Flask, json, jsonify
app = Flask(__name__)
from hip import get_ranking_array
url = 'http://open.mapquestapi.com/directions/v2/route'
params = '?key={apikey}&ambiguities=ignore&routeType=pedestrian'
rel = url + params + '&from={flat},{flng}&to={tlat},{tlng}'
@app.route("/route/<from_lat>,<from_lng>/<to_lat>,<to_lng>")
def route(from_lat, from_lng, to_lat, to_lng):
resp = urlopen(rel.format(apikey=environ['MAPQUEST_API_KEY'],
flat=from_lat, flng=from_lng, tlat=to_lat, tlng=to_lng))
route = json.loads(resp.read().decode("utf-8"))
coords = [(man['startPoint']['lat'], man['startPoint']['lng'])
for leg in route['route']['legs']
for man in leg['maneuvers']]
hip_rank, total_rank = get_ranking_array(coords)
return jsonify(route=route, hip_rank=list(hip_rank), total_rank=total_rank)
if __name__ == "__main__":
app.run(debug=environ.get('FLASK_DEBUG', False))
| Return route, hip rank and total rank | Return route, hip rank and total rank
| Python | apache-2.0 | kynan/GetLost | ---
+++
@@ -1,8 +1,10 @@
from os import environ
from urllib2 import urlopen
-from flask import Flask, json, make_response
+from flask import Flask, json, jsonify
app = Flask(__name__)
+
+from hip import get_ranking_array
url = 'http://open.mapquestapi.com/directions/v2/route'
params = '?key={apikey}&ambiguities=ignore&routeType=pedestrian'
@@ -10,15 +12,16 @@
@app.route("/route/<from_lat>,<from_lng>/<to_lat>,<to_lng>")
-def get_coords(from_lat, from_lng, to_lat, to_lng):
+def route(from_lat, from_lng, to_lat, to_lng):
resp = urlopen(rel.format(apikey=environ['MAPQUEST_API_KEY'],
flat=from_lat, flng=from_lng, tlat=to_lat, tlng=to_lng))
- resp_dict = json.loads(resp.read().decode("utf-8"))
- res = [(man['startPoint']['lat'], man['startPoint']['lng'])
- for leg in resp_dict['route']['legs']
- for man in leg['maneuvers']]
- return make_response(json.dumps(res))
+ route = json.loads(resp.read().decode("utf-8"))
+ coords = [(man['startPoint']['lat'], man['startPoint']['lng'])
+ for leg in route['route']['legs']
+ for man in leg['maneuvers']]
+ hip_rank, total_rank = get_ranking_array(coords)
+ return jsonify(route=route, hip_rank=list(hip_rank), total_rank=total_rank)
if __name__ == "__main__":
app.run(debug=environ.get('FLASK_DEBUG', False)) |
3617400a7c0915920384d15ff273aa4c8a805d9c | core/byzantinerandomizedconsensus.py | core/byzantinerandomizedconsensus.py | from base.consensus import Consensus
class ByzantineRandomizedConsensus(Consensus):
"""
Implements a Byzantine Fault Tolerant Randomized Consensus Broadcast protocol.
"""
def propose(self, message):
pass
def decide(self):
pass | Update Byzantine Randomized Consensus protocol class | Update Byzantine Randomized Consensus protocol class
| Python | mit | koevskinikola/ByzantineRandomizedConsensus | ---
+++
@@ -0,0 +1,13 @@
+from base.consensus import Consensus
+
+
+class ByzantineRandomizedConsensus(Consensus):
+ """
+ Implements a Byzantine Fault Tolerant Randomized Consensus Broadcast protocol.
+ """
+
+ def propose(self, message):
+ pass
+
+ def decide(self):
+ pass | |
819380c964ca30f9d3e480a61371d502b7976abe | tests/basics/subclass_native_cmp.py | tests/basics/subclass_native_cmp.py | # Test calling non-special method inherited from native type
class mytuple(tuple):
pass
t = mytuple((1, 2, 3))
print(t)
print(t == (1, 2, 3))
print((1, 2, 3) == t)
| # Test calling non-special method inherited from native type
class mytuple(tuple):
pass
t = mytuple((1, 2, 3))
print(t)
print(t == (1, 2, 3))
print((1, 2, 3) == t)
print(t < (1, 2, 3), t < (1, 2, 4))
print((1, 2, 3) <= t, (1, 2, 4) < t)
| Add test for tuple compare with class derived from tuple. | tests/basics: Add test for tuple compare with class derived from tuple.
Only the "==" operator was tested by the test suite in for such arguments.
Other comparison operators like "<" take a different path in the code so
need to be tested separately.
| Python | mit | adafruit/circuitpython,MrSurly/micropython,tobbad/micropython,tobbad/micropython,bvernoux/micropython,MrSurly/micropython,kerneltask/micropython,tobbad/micropython,pramasoul/micropython,bvernoux/micropython,selste/micropython,kerneltask/micropython,pozetroninc/micropython,henriknelson/micropython,henriknelson/micropython,pramasoul/micropython,pozetroninc/micropython,tobbad/micropython,MrSurly/micropython,bvernoux/micropython,adafruit/circuitpython,adafruit/circuitpython,selste/micropython,pozetroninc/micropython,tobbad/micropython,selste/micropython,henriknelson/micropython,pramasoul/micropython,pramasoul/micropython,henriknelson/micropython,adafruit/circuitpython,bvernoux/micropython,kerneltask/micropython,adafruit/circuitpython,MrSurly/micropython,bvernoux/micropython,kerneltask/micropython,selste/micropython,henriknelson/micropython,kerneltask/micropython,pozetroninc/micropython,MrSurly/micropython,pozetroninc/micropython,pramasoul/micropython,adafruit/circuitpython,selste/micropython | ---
+++
@@ -7,3 +7,6 @@
print(t)
print(t == (1, 2, 3))
print((1, 2, 3) == t)
+
+print(t < (1, 2, 3), t < (1, 2, 4))
+print((1, 2, 3) <= t, (1, 2, 4) < t) |
bf84052d391774b13b8333acc06533e4ec9cde9e | MS2/visualize/dna-summary.py | MS2/visualize/dna-summary.py | #!/usr/bin/python
"""
Print log for given DNA program run
"""
import os
import sys
from readevtlog import *
home = os.getenv('HOME')
logdir = os.path.join(home,'_dna','logs',sys.argv[1])
for d in sorted(os.listdir(logdir)) :
print "====",d,"================"
[nm] = os.listdir(os.path.join(logdir,d))
nm = os.path.join(logdir,d,nm)
for l in read_log_entries(stream_eventlog(nm)) :
print l
| #!/usr/bin/python
"""
Print log for given DNA program run
"""
import os
import sys
from readevtlog import *
class EntryFilter :
"""
Filter for log entries
"""
def __init__(self, strs) :
self.neg = [s[1:] for s in strs if s[0] == '-']
self.pos = [s[1:] for s in strs if s[0] == '+']
def isOK(self, e) :
def and_(xs) : return reduce( lambda x, y: x and y, xs, True )
def or_ (xs) : return reduce( lambda x, y: x or y, xs, False)
neg = [pat == e.tag for pat in self.neg]
pos = [pat == e.tag for pat in self.pos]
return not (or_(neg)) and and_(pos)
def filter_evt(f, items) :
for i in items :
if f.isOK(i) :
yield i
home = os.getenv('HOME')
logdir = os.path.join(home,'_dna','logs',sys.argv[1])
filters = EntryFilter( sys.argv[2:] )
for d in sorted(os.listdir(logdir)) :
print "====",d,"================"
[nm] = os.listdir(os.path.join(logdir,d))
nm = os.path.join(logdir,d,nm)
for l in filter_evt(filters, read_log_entries(stream_eventlog(nm))) :
print l
| Allow some simplt matching on message tags | Allow some simplt matching on message tags
| Python | apache-2.0 | SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC | ---
+++
@@ -7,11 +7,32 @@
import sys
from readevtlog import *
-home = os.getenv('HOME')
-logdir = os.path.join(home,'_dna','logs',sys.argv[1])
+class EntryFilter :
+ """
+ Filter for log entries
+ """
+ def __init__(self, strs) :
+ self.neg = [s[1:] for s in strs if s[0] == '-']
+ self.pos = [s[1:] for s in strs if s[0] == '+']
+ def isOK(self, e) :
+ def and_(xs) : return reduce( lambda x, y: x and y, xs, True )
+ def or_ (xs) : return reduce( lambda x, y: x or y, xs, False)
+ neg = [pat == e.tag for pat in self.neg]
+ pos = [pat == e.tag for pat in self.pos]
+ return not (or_(neg)) and and_(pos)
+
+def filter_evt(f, items) :
+ for i in items :
+ if f.isOK(i) :
+ yield i
+
+home = os.getenv('HOME')
+logdir = os.path.join(home,'_dna','logs',sys.argv[1])
+filters = EntryFilter( sys.argv[2:] )
+
for d in sorted(os.listdir(logdir)) :
print "====",d,"================"
[nm] = os.listdir(os.path.join(logdir,d))
nm = os.path.join(logdir,d,nm)
- for l in read_log_entries(stream_eventlog(nm)) :
+ for l in filter_evt(filters, read_log_entries(stream_eventlog(nm))) :
print l |
67f8acd6d0d9580dc346f4ddd6f765c24d687bcc | cached_counts/tests.py | cached_counts/tests.py | from django.test import TestCase
from .models import CachedCount
class CachedCountTechCase(TestCase):
def setUp(self):
initial_counts = (
{
'count_type': 'constituency',
'name': 'South Norfolk',
'count': 10,
'object_id': '65666'
},
{
'count_type': 'party',
'name': 'Labour',
'count': 0,
'object_id': 'party:53'
},
)
for count in initial_counts:
CachedCount(**count).save()
def test_object_urls(self):
for count in CachedCount.objects.filter(count_type='constituency'):
self.assertTrue(count.object_url)
| import unittest
from django.test import TestCase
from candidates.tests.test_create_person import mock_create_person
from .models import CachedCount
class CachedCountTechCase(TestCase):
def setUp(self):
initial_counts = (
{
'count_type': 'constituency',
'name': 'Dulwich and West Norwood',
'count': 10,
'object_id': '65808'
},
{
'count_type': 'party',
'name': 'Labour',
'count': 0,
'object_id': 'party:53'
},
)
for count in initial_counts:
CachedCount(**count).save()
def test_object_urls(self):
for count in CachedCount.objects.filter(count_type='constituency'):
self.assertTrue(count.object_url)
def test_increment_count(self):
self.assertEqual(CachedCount.objects.get(object_id='party:53').count, 0)
self.assertEqual(CachedCount.objects.get(object_id='65808').count, 10)
mock_create_person()
self.assertEqual(CachedCount.objects.get(object_id='65808').count, 11)
self.assertEqual(CachedCount.objects.get(object_id='party:53').count, 1)
| Use mock_create_person to test increments work | Use mock_create_person to test increments work
| Python | agpl-3.0 | neavouli/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextmp-popit,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,YoQuieroSaber/yournextrepresentative,openstate/yournextrepresentative,datamade/yournextmp-popit,DemocracyClub/yournextrepresentative,datamade/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,neavouli/yournextrepresentative,openstate/yournextrepresentative,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextrepresentative | ---
+++
@@ -1,4 +1,8 @@
+import unittest
+
from django.test import TestCase
+
+from candidates.tests.test_create_person import mock_create_person
from .models import CachedCount
@@ -7,9 +11,9 @@
initial_counts = (
{
'count_type': 'constituency',
- 'name': 'South Norfolk',
+ 'name': 'Dulwich and West Norwood',
'count': 10,
- 'object_id': '65666'
+ 'object_id': '65808'
},
{
'count_type': 'party',
@@ -24,3 +28,10 @@
def test_object_urls(self):
for count in CachedCount.objects.filter(count_type='constituency'):
self.assertTrue(count.object_url)
+
+ def test_increment_count(self):
+ self.assertEqual(CachedCount.objects.get(object_id='party:53').count, 0)
+ self.assertEqual(CachedCount.objects.get(object_id='65808').count, 10)
+ mock_create_person()
+ self.assertEqual(CachedCount.objects.get(object_id='65808').count, 11)
+ self.assertEqual(CachedCount.objects.get(object_id='party:53').count, 1) |
cbae962b77b7277f5904279a5418a53e38148f2c | karspexet/show/models.py | karspexet/show/models.py | from django.db import models
import datetime
class Production(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True)
def __str__(self):
return self.name
class Show(models.Model):
production = models.ForeignKey(Production, on_delete=models.PROTECT)
date = models.DateTimeField()
venue = models.ForeignKey('venue.Venue', on_delete=models.PROTECT)
@staticmethod
def upcoming():
return Show.objects.filter(date__gte=datetime.date.today())
def date_string(self):
return self.date.strftime("%Y-%m-%d %H:%M")
def __str__(self):
return self.production.name + " " + self.date_string()
class Meta:
ordering = ('date',)
| from django.db import models
import datetime
class Production(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True)
def __str__(self):
return self.name
class Show(models.Model):
production = models.ForeignKey(Production, on_delete=models.PROTECT)
date = models.DateTimeField()
venue = models.ForeignKey('venue.Venue', on_delete=models.PROTECT)
@staticmethod
def upcoming():
return Show.objects.filter(date__gte=datetime.date.today())
@staticmethod
def ticket_coverage():
return Show.objects.raw("""
select show.id,
show.production_id,
show.venue_id,
venue.name as venue_name,
production.name as production_name,
show.date,
count(distinct(ticket.id)) as ticket_count,
count(distinct(seat.id)) as seat_count,
100 * (count(distinct(ticket.id))::float / count(distinct(seat.id))) as sales_percentage
from show_show show
left outer join ticket_ticket ticket on ticket.show_id = show.id
left join venue_venue venue on show.venue_id = venue.id
left join venue_seatinggroup sg on sg.venue_id = venue.id
left join venue_seat seat on sg.id = seat.group_id
left join show_production production on show.production_id = production.id
group by show.id, venue.name, production.name
order by show.date desc
""")
def date_string(self):
return self.date.strftime("%Y-%m-%d %H:%M")
def __str__(self):
return self.production.name + " " + self.date_string()
class Meta:
ordering = ('date',)
| Add Show.ticket_coverage() to get statistics on coverage | Add Show.ticket_coverage() to get statistics on coverage
Very left join, much SQL, wow.
| Python | mit | Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet | ---
+++
@@ -19,6 +19,28 @@
def upcoming():
return Show.objects.filter(date__gte=datetime.date.today())
+ @staticmethod
+ def ticket_coverage():
+ return Show.objects.raw("""
+ select show.id,
+ show.production_id,
+ show.venue_id,
+ venue.name as venue_name,
+ production.name as production_name,
+ show.date,
+ count(distinct(ticket.id)) as ticket_count,
+ count(distinct(seat.id)) as seat_count,
+ 100 * (count(distinct(ticket.id))::float / count(distinct(seat.id))) as sales_percentage
+ from show_show show
+ left outer join ticket_ticket ticket on ticket.show_id = show.id
+ left join venue_venue venue on show.venue_id = venue.id
+ left join venue_seatinggroup sg on sg.venue_id = venue.id
+ left join venue_seat seat on sg.id = seat.group_id
+ left join show_production production on show.production_id = production.id
+ group by show.id, venue.name, production.name
+ order by show.date desc
+ """)
+
def date_string(self):
return self.date.strftime("%Y-%m-%d %H:%M")
|
118eabf049db8804635001b2348fcb81c8a2a4f4 | openstack_dashboard/dashboards/admin/routers/ports/tables.py | openstack_dashboard/dashboards/admin/routers/ports/tables.py | # Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard.dashboards.project.networks.ports \
import tables as networks_tables
from openstack_dashboard.dashboards.project.routers.ports \
import tables as routers_tables
DISPLAY_CHOICES = (
("UP", pgettext_lazy("Admin state of a Network", u"UP")),
("DOWN", pgettext_lazy("Admin state of a Network", u"DOWN")),
)
class PortsTable(tables.DataTable):
name = tables.Column("name",
verbose_name=_("Name"),
link="horizon:admin:networks:ports:detail")
fixed_ips = tables.Column(networks_tables.get_fixed_ips,
verbose_name=_("Fixed IPs"))
status = tables.Column("status", verbose_name=_("Status"))
device_owner = tables.Column(routers_tables.get_device_owner,
verbose_name=_("Type"))
admin_state = tables.Column("admin_state",
verbose_name=_("Admin State"),
display_choices=DISPLAY_CHOICES)
def get_object_display(self, port):
return port.id
class Meta(object):
name = "interfaces"
verbose_name = _("Interfaces")
| # Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard.dashboards.project.routers.ports \
import tables as routers_tables
class PortsTable(routers_tables.PortsTable):
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:admin:networks:ports:detail")
class Meta(object):
name = "interfaces"
verbose_name = _("Interfaces")
| Fix router details's name empty and change inheritance project table | Fix router details's name empty and change inheritance project table
In admin router details page, the name column is empty,
change to if no name show id. And change to inheritance
from port table of project.
Change-Id: I54d4ad95bd04db2432eb47f848917a452c5f54e9
Closes-bug:#1417948
| Python | apache-2.0 | j4/horizon,yeming233/horizon,henaras/horizon,yeming233/horizon,damien-dg/horizon,tqtran7/horizon,BiznetGIO/horizon,Hodorable/0602,tqtran7/horizon,RudoCris/horizon,dan1/horizon-x509,agileblaze/OpenStackTwoFactorAuthentication,kfox1111/horizon,maestro-hybrid-cloud/horizon,NeCTAR-RC/horizon,redhat-openstack/horizon,Tesora/tesora-horizon,vladryk/horizon,agileblaze/OpenStackTwoFactorAuthentication,yjxtogo/horizon,NCI-Cloud/horizon,wolverineav/horizon,Solinea/horizon,ChameleonCloud/horizon,saydulk/horizon,bac/horizon,mdavid/horizon,damien-dg/horizon,tellesnobrega/horizon,tqtran7/horizon,redhat-openstack/horizon,ChameleonCloud/horizon,openstack/horizon,Tesora/tesora-horizon,Metaswitch/horizon,icloudrnd/automation_tools,BiznetGIO/horizon,django-leonardo/horizon,tellesnobrega/horizon,luhanhan/horizon,dan1/horizon-proto,Tesora/tesora-horizon,mandeepdhami/horizon,tellesnobrega/horizon,yeming233/horizon,blueboxgroup/horizon,RudoCris/horizon,icloudrnd/automation_tools,mandeepdhami/horizon,Solinea/horizon,newrocknj/horizon,sandvine/horizon,endorphinl/horizon-fork,Mirantis/mos-horizon,Metaswitch/horizon,VaneCloud/horizon,FNST-OpenStack/horizon,liyitest/rr,pranavtendolkr/horizon,philoniare/horizon,coreycb/horizon,mandeepdhami/horizon,icloudrnd/automation_tools,henaras/horizon,redhat-cip/horizon,CiscoSystems/horizon,kfox1111/horizon,dan1/horizon-x509,Mirantis/mos-horizon,Mirantis/mos-horizon,Tesora/tesora-horizon,yeming233/horizon,promptworks/horizon,yjxtogo/horizon,xinwu/horizon,newrocknj/horizon,BiznetGIO/horizon,promptworks/horizon,pranavtendolkr/horizon,newrocknj/horizon,NeCTAR-RC/horizon,pranavtendolkr/horizon,CiscoSystems/horizon,vladryk/horizon,philoniare/horizon,anthonydillon/horizon,luhanhan/horizon,luhanhan/horizon,xinwu/horizon,sandvine/horizon,eayunstack/horizon,xinwu/horizon,bac/horizon,dan1/horizon-proto,j4/horizon,eayunstack/horizon,izadorozhna/dashboard_integration_tests,newrocknj/horizon,henaras/horizon,philoniare/horizon,anthonydillon/horizon,Metaswitch/horizon,mdavid/horizon,kfox1111/horizon,blueboxgroup/horizon,icloudrnd/automation_tools,gerrive/horizon,tqtran7/horizon,j4/horizon,django-leonardo/horizon,redhat-cip/horizon,endorphinl/horizon,noironetworks/horizon,dan1/horizon-x509,tellesnobrega/horizon,agileblaze/OpenStackTwoFactorAuthentication,endorphinl/horizon-fork,Dark-Hacker/horizon,ChameleonCloud/horizon,takeshineshiro/horizon,coreycb/horizon,mdavid/horizon,anthonydillon/horizon,Dark-Hacker/horizon,NCI-Cloud/horizon,karthik-suresh/horizon,luhanhan/horizon,blueboxgroup/horizon,philoniare/horizon,Daniex/horizon,endorphinl/horizon,RudoCris/horizon,bigswitch/horizon,redhat-cip/horizon,FNST-OpenStack/horizon,openstack/horizon,endorphinl/horizon-fork,noironetworks/horizon,CiscoSystems/horizon,noironetworks/horizon,saydulk/horizon,wangxiangyu/horizon,openstack/horizon,NeCTAR-RC/horizon,davidcusatis/horizon,NCI-Cloud/horizon,wolverineav/horizon,wolverineav/horizon,redhat-openstack/horizon,doug-fish/horizon,gerrive/horizon,liyitest/rr,bac/horizon,wangxiangyu/horizon,endorphinl/horizon,yjxtogo/horizon,wolverineav/horizon,agileblaze/OpenStackTwoFactorAuthentication,endorphinl/horizon,FNST-OpenStack/horizon,takeshineshiro/horizon,BiznetGIO/horizon,mdavid/horizon,dan1/horizon-proto,takeshineshiro/horizon,henaras/horizon,xinwu/horizon,saydulk/horizon,maestro-hybrid-cloud/horizon,j4/horizon,davidcusatis/horizon,bigswitch/horizon,coreycb/horizon,karthik-suresh/horizon,wangxiangyu/horizon,VaneCloud/horizon,watonyweng/horizon,vladryk/horizon,promptworks/horizon,mandeepdhami/horizon,maestro-hybrid-cloud/horizon,idjaw/horizon,dan1/horizon-proto,endorphinl/horizon-fork,NeCTAR-RC/horizon,davidcusatis/horizon,Daniex/horizon,Mirantis/mos-horizon,saydulk/horizon,RudoCris/horizon,django-leonardo/horizon,gerrive/horizon,Daniex/horizon,izadorozhna/dashboard_integration_tests,liyitest/rr,yjxtogo/horizon,idjaw/horizon,FNST-OpenStack/horizon,karthik-suresh/horizon,bigswitch/horizon,idjaw/horizon,Solinea/horizon,damien-dg/horizon,VaneCloud/horizon,maestro-hybrid-cloud/horizon,sandvine/horizon,eayunstack/horizon,karthik-suresh/horizon,coreycb/horizon,vladryk/horizon,Hodorable/0602,CiscoSystems/horizon,ChameleonCloud/horizon,gerrive/horizon,openstack/horizon,dan1/horizon-x509,Metaswitch/horizon,Dark-Hacker/horizon,redhat-openstack/horizon,django-leonardo/horizon,liyitest/rr,Solinea/horizon,wangxiangyu/horizon,Hodorable/0602,damien-dg/horizon,bigswitch/horizon,VaneCloud/horizon,idjaw/horizon,watonyweng/horizon,sandvine/horizon,promptworks/horizon,doug-fish/horizon,pranavtendolkr/horizon,bac/horizon,Hodorable/0602,doug-fish/horizon,doug-fish/horizon,blueboxgroup/horizon,watonyweng/horizon,watonyweng/horizon,takeshineshiro/horizon,noironetworks/horizon,Dark-Hacker/horizon,anthonydillon/horizon,Daniex/horizon,davidcusatis/horizon,kfox1111/horizon,NCI-Cloud/horizon,redhat-cip/horizon | ---
+++
@@ -12,37 +12,17 @@
# License for the specific language governing permissions and limitations
# under the License.
-from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from horizon import tables
-from openstack_dashboard.dashboards.project.networks.ports \
- import tables as networks_tables
from openstack_dashboard.dashboards.project.routers.ports \
import tables as routers_tables
-DISPLAY_CHOICES = (
- ("UP", pgettext_lazy("Admin state of a Network", u"UP")),
- ("DOWN", pgettext_lazy("Admin state of a Network", u"DOWN")),
-)
-
-
-class PortsTable(tables.DataTable):
- name = tables.Column("name",
+class PortsTable(routers_tables.PortsTable):
+ name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:admin:networks:ports:detail")
- fixed_ips = tables.Column(networks_tables.get_fixed_ips,
- verbose_name=_("Fixed IPs"))
- status = tables.Column("status", verbose_name=_("Status"))
- device_owner = tables.Column(routers_tables.get_device_owner,
- verbose_name=_("Type"))
- admin_state = tables.Column("admin_state",
- verbose_name=_("Admin State"),
- display_choices=DISPLAY_CHOICES)
-
- def get_object_display(self, port):
- return port.id
class Meta(object):
name = "interfaces" |
4c267c78a09128dc80be821654dd60b174d53d41 | setuptools/logging.py | setuptools/logging.py | import sys
import logging
import distutils.log
from . import monkey
def _not_warning(record):
return record.levelno < logging.WARNING
def configure():
"""
Configure logging to emit warning and above to stderr
and everything else to stdout. This behavior is provided
for compatibility with distutils.log but may change in
the future.
"""
err_handler = logging.StreamHandler()
err_handler.setLevel(logging.WARNING)
out_handler = logging.StreamHandler(sys.stdout)
out_handler.addFilter(_not_warning)
handlers = err_handler, out_handler
logging.basicConfig(
format="{message}", style='{', handlers=handlers, level=logging.DEBUG)
if hasattr(distutils.log, 'Log'):
monkey.patch_func(set_threshold, distutils.log, 'set_threshold')
# For some reason `distutils.log` module is getting cached in `distutils.dist`
# and then loaded again when patched,
# implying: id(distutils.log) != id(distutils.dist.log).
# Make sure the same module object is used everywhere:
distutils.dist.log = distutils.log
def set_threshold(level):
logging.root.setLevel(level*10)
return set_threshold.unpatched(level)
| import sys
import inspect
import logging
import distutils.log
from . import monkey
def _not_warning(record):
return record.levelno < logging.WARNING
def configure():
"""
Configure logging to emit warning and above to stderr
and everything else to stdout. This behavior is provided
for compatibility with distutils.log but may change in
the future.
"""
err_handler = logging.StreamHandler()
err_handler.setLevel(logging.WARNING)
out_handler = logging.StreamHandler(sys.stdout)
out_handler.addFilter(_not_warning)
handlers = err_handler, out_handler
logging.basicConfig(
format="{message}", style='{', handlers=handlers, level=logging.DEBUG)
if inspect.ismodule(distutils.dist.log):
monkey.patch_func(set_threshold, distutils.log, 'set_threshold')
# For some reason `distutils.log` module is getting cached in `distutils.dist`
# and then loaded again when patched,
# implying: id(distutils.log) != id(distutils.dist.log).
# Make sure the same module object is used everywhere:
distutils.dist.log = distutils.log
def set_threshold(level):
logging.root.setLevel(level*10)
return set_threshold.unpatched(level)
| Replace condition to patch distutils.dist.log | Replace condition to patch distutils.dist.log
As `distutils.log.Log` was backfilled for compatibility we no longer can
use this as a condition.
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools | ---
+++
@@ -1,4 +1,5 @@
import sys
+import inspect
import logging
import distutils.log
from . import monkey
@@ -22,7 +23,7 @@
handlers = err_handler, out_handler
logging.basicConfig(
format="{message}", style='{', handlers=handlers, level=logging.DEBUG)
- if hasattr(distutils.log, 'Log'):
+ if inspect.ismodule(distutils.dist.log):
monkey.patch_func(set_threshold, distutils.log, 'set_threshold')
# For some reason `distutils.log` module is getting cached in `distutils.dist`
# and then loaded again when patched, |
92e96d8b352f608a0d04521a140bdd3dcbe5ebbc | test/tools/service.py | test/tools/service.py | from os import environ as env
from glanceclient import Client as glclient
import keystoneclient.v2_0.client as ksclient
import novaclient.v2.client as nvclient
client_args = {
}
nova = nvclient.Client(auth_url=env['OS_AUTH_URL'],
username=env['OS_USERNAME'],
api_key=env['OS_PASSWORD'],
project_id=env['OS_TENANT_NAME'])
keystone = ksclient.Client(auth_url=env['OS_AUTH_URL'],
username=env['OS_USERNAME'],
password=env['OS_PASSWORD'],
tenant_name=env['OS_TENANT_NAME'],)
glance_endpoint = keystone.service_catalog.url_for(service_type='image')
glance = glclient(version=2, endpoint=glance_endpoint, token=keystone.auth_token)
| from os import environ as env
from glanceclient import Client as glclient
import keystoneclient.v2_0.client as ksclient
import novaclient.client as nvclient
client_args = {
}
nova = nvclient.Client('2', env['OS_USERNAME'], env['OS_PASSWORD'], env['OS_TENANT_NAME'], env['OS_AUTH_URL'])
keystone = ksclient.Client(auth_url=env['OS_AUTH_URL'],
username=env['OS_USERNAME'],
password=env['OS_PASSWORD'],
tenant_name=env['OS_TENANT_NAME'],)
glance_endpoint = keystone.service_catalog.url_for(service_type='image')
glance = glclient(version=2, endpoint=glance_endpoint, token=keystone.auth_token)
| Fix nova client usage, it should not be initialized this way | Fix nova client usage, it should not be initialized this way
| Python | apache-2.0 | pellaeon/bsd-cloudinit-installer,pellaeon/bsd-cloudinit-installer | ---
+++
@@ -2,16 +2,13 @@
from glanceclient import Client as glclient
import keystoneclient.v2_0.client as ksclient
-import novaclient.v2.client as nvclient
+import novaclient.client as nvclient
client_args = {
}
-nova = nvclient.Client(auth_url=env['OS_AUTH_URL'],
- username=env['OS_USERNAME'],
- api_key=env['OS_PASSWORD'],
- project_id=env['OS_TENANT_NAME'])
+nova = nvclient.Client('2', env['OS_USERNAME'], env['OS_PASSWORD'], env['OS_TENANT_NAME'], env['OS_AUTH_URL'])
keystone = ksclient.Client(auth_url=env['OS_AUTH_URL'],
username=env['OS_USERNAME'], |
3d524f00269026bc66dcd2b085c60c0d43242ee5 | changes/api/jobstep_deallocate.py | changes/api/jobstep_deallocate.py | from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status != Status.allocated:
return {
"error": "Only {0} job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
| from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Result, Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status not in (Status.allocated, Status.in_progress):
return {
"error": "Only allocated and in_progress job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.result = Result.unknown
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
| Allow deallocation of running jobsteps | Allow deallocation of running jobsteps
| Python | apache-2.0 | bowlofstew/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes | ---
+++
@@ -1,7 +1,7 @@
from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
-from changes.constants import Status
+from changes.constants import Result, Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
@@ -15,15 +15,17 @@
if to_deallocate is None:
return '', 404
- if to_deallocate.status != Status.allocated:
+ if to_deallocate.status not in (Status.allocated, Status.in_progress):
return {
- "error": "Only {0} job steps may be deallocated.",
+ "error": "Only allocated and in_progress job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
+ to_deallocate.result = Result.unknown
to_deallocate.date_started = None
to_deallocate.date_finished = None
+
db.session.add(to_deallocate)
db.session.commit()
|
0409580aed43b6a0556fcc4b8e6e9252d9f082ea | froide/publicbody/management/commands/validate_publicbodies.py | froide/publicbody/management/commands/validate_publicbodies.py | from io import StringIO
from contextlib import contextmanager
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils import translation
from django.utils.translation import ugettext_lazy as _
from froide.helper.email_sending import send_mail
from ...validators import PublicBodyValidator
from ...models import PublicBody
class Command(BaseCommand):
help = "Validates public bodies"
def add_arguments(self, parser):
parser.add_argument('filename', type=str, nargs='?', default=None)
@contextmanager
def get_stream(self, filename):
if filename is None:
stream = StringIO()
else:
if filename == '-':
stream = self.stdout
else:
stream = open(filename, 'w')
yield stream
if filename is not None and filename != '-':
stream.close()
def handle(self, *args, **options):
translation.activate(settings.LANGUAGE_CODE)
filename = options['filename']
pbs = PublicBody.objects.all().iterator()
validator = PublicBodyValidator(pbs)
with self.get_stream(filename) as stream:
validator.write_csv(stream)
if filename is None and not validator.is_valid:
for name, email in settings.MANAGERS:
send_mail(
_('Public body validation results'),
_('Please find attached the results of the public body validation'),
email,
attachments=[
('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv')
]
)
| from io import StringIO
from contextlib import contextmanager
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils import translation
from django.utils.translation import ugettext_lazy as _
from froide.helper.email_sending import send_mail
from ...validators import PublicBodyValidator
from ...models import PublicBody
class Command(BaseCommand):
help = "Validates public bodies"
def add_arguments(self, parser):
parser.add_argument('filename', type=str, nargs='?', default=None)
@contextmanager
def get_stream(self, filename):
if filename is None:
stream = StringIO()
else:
if filename == '-':
stream = self.stdout
else:
stream = open(filename, 'w')
yield stream
if filename is not None and filename != '-':
stream.close()
def handle(self, *args, **options):
translation.activate(settings.LANGUAGE_CODE)
filename = options['filename']
pbs = PublicBody.objects.all()
validator = PublicBodyValidator(pbs)
with self.get_stream(filename) as stream:
validator.write_csv(stream)
if filename is None and not validator.is_valid:
for name, email in settings.MANAGERS:
send_mail(
_('Public body validation results'),
_('Please find attached the results of the public body validation'),
email,
attachments=[
('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv')
]
)
| Use queryset in validate publicbodies command | Use queryset in validate publicbodies command | Python | mit | stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,fin/froide | ---
+++
@@ -35,7 +35,7 @@
translation.activate(settings.LANGUAGE_CODE)
filename = options['filename']
- pbs = PublicBody.objects.all().iterator()
+ pbs = PublicBody.objects.all()
validator = PublicBodyValidator(pbs)
with self.get_stream(filename) as stream: |
62f3368eabc54c2cb2a78308fac639e65f55e989 | hello.py | hello.py | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
"""A tiny Python program to check that Python is working.
Try running this program from the command line like this:
python hello.py
python hello.py Alice
That should print:
Hello World -or- Hello Alice
Try changing the 'Hello' to 'Howdy' and run again.
Once you have that working, you're ready for class -- you can edit
and run Python code; now you just need to learn Python!
"""
import sys
# Define a main() function that prints a little greeting.
def main():
# Get the name from the command line, using 'World' as a fallback.
if len(sys.argv) >= 2:
name = sys.argv[1]
else:
name = 'World'
print 'Howdy', name
print 'yay'
# This is the standard boilerplate that calls the main() function.
if __name__ == '__main__':
main()
| #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
"""A tiny Python program to check that Python is working.
Try running this program from the command line like this:
python hello.py
python hello.py Alice
That should print:
Hello World -or- Hello Alice
Try changing the 'Hello' to 'Howdy' and run again.
Once you have that working, you're ready for class -- you can edit
and run Python code; now you just need to learn Python!
"""
import sys
# Define a main() function that prints a little greeting.
def main():
# Get the name from the command line, using 'World' as a fallback.
if len(sys.argv) >= 2:
# by convention the script name is put at argv[0]
# the person's name is argv[1]
name = sys.argv[1]
else:
name = 'World'
# print inserts a space between arguments
print 'Howdy', name
print 'yay'
# This is the standard boilerplate that calls the main() function.
if __name__ == '__main__':
main()
| Change indentation from 2 spaces (Google standard) to 4 spaces (PEP-8). | Change indentation from 2 spaces (Google standard) to 4 spaces (PEP-8).
Add comments. | Python | apache-2.0 | beepscore/google-python-exercises,beepscore/google-python-exercises | ---
+++
@@ -21,14 +21,18 @@
# Define a main() function that prints a little greeting.
def main():
- # Get the name from the command line, using 'World' as a fallback.
- if len(sys.argv) >= 2:
- name = sys.argv[1]
- else:
- name = 'World'
- print 'Howdy', name
- print 'yay'
+ # Get the name from the command line, using 'World' as a fallback.
+ if len(sys.argv) >= 2:
+ # by convention the script name is put at argv[0]
+ # the person's name is argv[1]
+ name = sys.argv[1]
+ else:
+ name = 'World'
+
+ # print inserts a space between arguments
+ print 'Howdy', name
+ print 'yay'
# This is the standard boilerplate that calls the main() function.
if __name__ == '__main__':
- main()
+ main() |
b211306824db0a10a79cdab4153c457813b44bca | linter.py | linter.py | #
# linter.py
# Markdown Linter for SublimeLinter, a code checking framework
# for Sublime Text 3
#
# Written by Jon LaBelle
# Copyright (c) 2018 Jon LaBelle
#
# License: MIT
#
"""This module exports the Markdownlint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class MarkdownLint(NodeLinter):
"""Provides an interface to markdownlint."""
syntax = ('markdown', 'markdown gfm', 'multimarkdown', 'markdown extended')
cmd = ('markdownlint', '${args}', '${file}')
npm_name = 'markdownlint'
config_file = ('--config', '.markdownlintrc')
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.6.0'
check_version = True
regex = r'.+?[:]\s(?P<line>\d+)[:]\s(?P<error>MD\d+)?[/]?(?P<message>.+)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_STDERR
word_re = None
comment_re = r'\s*/[/*]'
| #
# linter.py
# Markdown Linter for SublimeLinter, a code checking framework
# for Sublime Text 3
#
# Written by Jon LaBelle
# Copyright (c) 2018 Jon LaBelle
#
# License: MIT
#
"""This module exports the Markdownlint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class MarkdownLint(NodeLinter):
"""Provides an interface to markdownlint."""
syntax = ('markdown', 'markdown gfm', 'multimarkdown', 'markdown extended')
cmd = ('markdownlint', '${args}', '${file}')
npm_name = 'markdownlint'
config_file = ('--config', '.markdownlintrc')
regex = r'.+?[:]\s(?P<line>\d+)[:]\s(?P<error>MD\d+)?[/]?(?P<message>.+)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_STDERR
word_re = None
comment_re = r'\s*/[/*]'
| Remove deprecated version requirement settings | Remove deprecated version requirement settings
Linter plugins can no longer set version requirements.
https://github.com/SublimeLinter/SublimeLinter/issues/1087 | Python | mit | jonlabelle/SublimeLinter-contrib-markdownlint,jonlabelle/SublimeLinter-contrib-markdownlint | ---
+++
@@ -21,10 +21,6 @@
cmd = ('markdownlint', '${args}', '${file}')
npm_name = 'markdownlint'
config_file = ('--config', '.markdownlintrc')
- version_args = '--version'
- version_re = r'(?P<version>\d+\.\d+\.\d+)'
- version_requirement = '>= 0.6.0'
- check_version = True
regex = r'.+?[:]\s(?P<line>\d+)[:]\s(?P<error>MD\d+)?[/]?(?P<message>.+)'
multiline = False
line_col_base = (1, 1) |
fa2d26f6c7652f1c4964ff5df076bf9dcdd3a493 | webvtt/exceptions.py | webvtt/exceptions.py |
class MalformedFileError(Exception):
"""Error raised when the file is not well formatted"""
class MalformedCaptionError(Exception):
"""Error raised when a caption is not well formatted"""
|
class MalformedFileError(Exception):
"""Error raised when the file is not well formatted"""
class MalformedCaptionError(Exception):
"""Error raised when a caption is not well formatted"""
class InvalidCaptionsError(Exception):
"""Error raised when passing wrong captions to the segmenter""" | Add exception for invalid captions | Add exception for invalid captions
| Python | mit | sampattuzzi/webvtt-py,glut23/webvtt-py | ---
+++
@@ -6,3 +6,7 @@
class MalformedCaptionError(Exception):
"""Error raised when a caption is not well formatted"""
+
+
+class InvalidCaptionsError(Exception):
+ """Error raised when passing wrong captions to the segmenter""" |
d5c296197c7f5b422f44e58f8e58ead5fdc5c2ad | reports/models.py | reports/models.py | from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Report(models.Model):
addressed_to = models.TextField()
reported_from = models.ForeignKey('members.User')
content = models.TextField()
created_at = models.DateField(_("Date"), default=datetime.now())
copies = models.ManyToManyField('protocols.Topic')
signed_from = models.CharField(max_length=64)
def __unicode__(self):
return self.addressed_to
def get_copies(self):
return "\n".join([c.name for c in self.copies.all()])
| from datetime import datetime
from django.db import models
class Report(models.Model):
addressed_to = models.TextField()
reported_from = models.ForeignKey('members.User')
content = models.TextField()
created_at = models.DateField(default=datetime.now)
copies = models.ManyToManyField('protocols.Topic')
signed_from = models.CharField(max_length=64)
def __unicode__(self):
return self.addressed_to
def get_copies(self):
return "\n".join([c.name for c in self.copies.all()])
| Add new initial migration for reports | Add new initial migration for reports
| Python | mit | Hackfmi/Diaphanum,Hackfmi/Diaphanum | ---
+++
@@ -1,14 +1,12 @@
from datetime import datetime
-
from django.db import models
-from django.utils.translation import ugettext_lazy as _
class Report(models.Model):
addressed_to = models.TextField()
reported_from = models.ForeignKey('members.User')
content = models.TextField()
- created_at = models.DateField(_("Date"), default=datetime.now())
+ created_at = models.DateField(default=datetime.now)
copies = models.ManyToManyField('protocols.Topic')
signed_from = models.CharField(max_length=64)
|
a542cfe1462869c2b64b2202d7316a0af1e3a613 | core/modules/can_access.py | core/modules/can_access.py | import requests
def can_access(url):
try:
current_page = (requests.get(url).text, 'lxml')
answer = "SL"
except requests.exceptions.ConnectionError:
answer = "PL"
return answer | import requests
def can_access(url):
try:
current_page = (requests.get(url).text, 'lxml')
answer = "SL"
except requests.exceptions.ConnectionError:
print("ERROR: Page is inaccessible, return U and move to next case.")
answer = "U"
return answer | Change handling for inaccessible pages | Change handling for inaccessible pages
| Python | bsd-2-clause | mjkim610/phishing-detection,jaeyung1001/phishing_site_detection | ---
+++
@@ -5,5 +5,6 @@
current_page = (requests.get(url).text, 'lxml')
answer = "SL"
except requests.exceptions.ConnectionError:
- answer = "PL"
+ print("ERROR: Page is inaccessible, return U and move to next case.")
+ answer = "U"
return answer |
bb14d53e8db76686a3a93a814b1258933083dc0b | resin/__init__.py | resin/__init__.py | """
Welcome to the Resin Python SDK documentation.
This document aims to describe all the functions supported by the SDK, as well as
showing examples of their expected usage.
Install the Resin SDK:
From Pip:
```
pip install resin-sdk
```
From Source (In case, you want to test a development branch):
```
https://github.com/resin-io/resin-sdk-python
```
If you feel something is missing, not clear or could be improved, [please don't
hesitate to open an issue in GitHub](https://github.com/resin-io/resin-sdk-python/issues), we'll be happy to help.
"""
from .base_request import BaseRequest
from .auth import Auth
from .logs import Logs
from .settings import Settings
from .models import Models
from .twofactor_auth import TwoFactorAuth
__version__ = '4.4.0'
class Resin(object):
"""
This class implements all functions supported by the Python SDK.
Attributes:
settings (Settings): configuration settings for Resin Python SDK.
logs (Logs): logs from devices working on Resin.
auth (Auth): authentication handling.
models (Models): all models in Resin Python SDK.
"""
def __init__(self):
self.settings = Settings()
self.logs = Logs()
self.auth = Auth()
self.models = Models()
self.twofactor_auth = TwoFactorAuth()
| """
Welcome to the Resin Python SDK documentation.
This document aims to describe all the functions supported by the SDK, as well as
showing examples of their expected usage.
Install the Resin SDK:
From Pip:
```
pip install resin-sdk
```
From Source (In case, you want to test a development branch):
```
https://github.com/resin-io/resin-sdk-python
```
Getting started:
```python
>>> from resin import Resin
>>> resin = Resin()
>>> credentials = {'username':<your email>, 'password':<your password>}
>>> resin.auth.login(**credentials)
...
```
If you feel something is missing, not clear or could be improved, [please don't
hesitate to open an issue in GitHub](https://github.com/resin-io/resin-sdk-python/issues), we'll be happy to help.
"""
from .base_request import BaseRequest
from .auth import Auth
from .logs import Logs
from .settings import Settings
from .models import Models
from .twofactor_auth import TwoFactorAuth
__version__ = '4.4.0'
class Resin(object):
"""
This class implements all functions supported by the Python SDK.
Attributes:
settings (Settings): configuration settings for Resin Python SDK.
logs (Logs): logs from devices working on Resin.
auth (Auth): authentication handling.
models (Models): all models in Resin Python SDK.
"""
def __init__(self):
self.settings = Settings()
self.logs = Logs()
self.auth = Auth()
self.models = Models()
self.twofactor_auth = TwoFactorAuth()
| Add getting started section to the docs. | Add getting started section to the docs.
| Python | apache-2.0 | resin-io/resin-sdk-python,resin-io/resin-sdk-python | ---
+++
@@ -13,6 +13,16 @@
From Source (In case, you want to test a development branch):
```
https://github.com/resin-io/resin-sdk-python
+```
+
+Getting started:
+
+```python
+>>> from resin import Resin
+>>> resin = Resin()
+>>> credentials = {'username':<your email>, 'password':<your password>}
+>>> resin.auth.login(**credentials)
+...
```
If you feel something is missing, not clear or could be improved, [please don't |
e2ffc3d09157a0bfba4a0bcaca98691d99d04d77 | Scripted/CIP_/CIP/ui/__init__.py | Scripted/CIP_/CIP/ui/__init__.py | from .CIP_EditorWidget import CIP_EditorWidget
from .CIP_EditBox import *
from .CaseReportsWidget import *
from .PreProcessingWidget import *
from .MIPViewerWidget import *
from .CollapsibleMultilineText import *
from .PdfReporter import *
#from ACIL_GetImage.CaseNavigatorWidget import *
#from AutoUpdateWidget import AutoUpdateWidget
# import os
# CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons')
# del os
| # from .CIP_EditorWidget import CIP_EditorWidget
# from .CIP_EditBox import *
from .CaseReportsWidget import *
from .PreProcessingWidget import *
from .MIPViewerWidget import *
from .CollapsibleMultilineText import *
from .PdfReporter import *
#from ACIL_GetImage.CaseNavigatorWidget import *
#from AutoUpdateWidget import AutoUpdateWidget
# import os
# CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons')
# del os
| Make CIP compatible with Slicer 5 | ENH: Make CIP compatible with Slicer 5
- remove CIP UI Editor includes to enable CIP loading in Slicer preview
| Python | bsd-3-clause | acil-bwh/SlicerCIP,acil-bwh/SlicerCIP,acil-bwh/SlicerCIP,acil-bwh/SlicerCIP | ---
+++
@@ -1,5 +1,5 @@
-from .CIP_EditorWidget import CIP_EditorWidget
-from .CIP_EditBox import *
+# from .CIP_EditorWidget import CIP_EditorWidget
+# from .CIP_EditBox import *
from .CaseReportsWidget import *
from .PreProcessingWidget import *
from .MIPViewerWidget import * |
5c116f4559520083d65848b3bde8bb95621a1633 | moksha/hub/amqp/__init__.py | moksha/hub/amqp/__init__.py | """
Here is where we configure which AMQP hub implementation we are going to use.
"""
from qpid010 import QpidAMQPHub
AMQPHub = QpidAMQPHub
#from pyamqplib import AMQPLibHub
#AMQPHub = AMQPLibHub
| """
Here is where we configure which AMQP hub implementation we are going to use.
"""
try:
from qpid010 import QpidAMQPHub
AMQPHub = QpidAMQPHub
except ImportError:
print "Unable to import qpid module"
class FakeHub(object):
pass
AMQPHub = FakeHub
#from pyamqplib import AMQPLibHub
#AMQPHub = AMQPLibHub
| Handle working without the qpid module | Handle working without the qpid module
| Python | apache-2.0 | mokshaproject/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha,ralphbean/moksha,mokshaproject/moksha,lmacken/moksha,lmacken/moksha,ralphbean/moksha,ralphbean/moksha,lmacken/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha | ---
+++
@@ -2,8 +2,14 @@
Here is where we configure which AMQP hub implementation we are going to use.
"""
-from qpid010 import QpidAMQPHub
-AMQPHub = QpidAMQPHub
+try:
+ from qpid010 import QpidAMQPHub
+ AMQPHub = QpidAMQPHub
+except ImportError:
+ print "Unable to import qpid module"
+ class FakeHub(object):
+ pass
+ AMQPHub = FakeHub
#from pyamqplib import AMQPLibHub
#AMQPHub = AMQPLibHub |
eff9c3865f06eb0fe2100b48274c88db63c2cf4c | websocket_server/__init__.py | websocket_server/__init__.py | # websocket_server -- WebSocket server library
# https://github.com/CylonicRaider/websocket-server
"""
websocket_server -- WebSocket server library
This is a little stand-alone library for WebSocket servers.
It integrates neatly with the standard library, providing easily set-up
servers for both WebSockets and other content. It is intended for
small-scale usages, where installing a proper framework would require
too much work.
TLS support is out of scope of this package; for setting up a TLS-enabled
WebSocket server, refer to online sources on how to achieve that using
HTTPServer.
For an example usage, see the examples module, for reference
documentation, see the wsfile and server modules.
"""
__version__ = '1.0'
# Auxillary modules
from . import compat, constants, exceptions, tools
# Main modules
from . import examples, quick, server, wsfile
__all__ = exceptions.__all__ + ['WebSocketFile', 'wrap',
'WebSocketRequestHandler']
from .exceptions import *
from .wsfile import WebSocketFile, wrap
from .server import WebSocketRequestHandler
| # websocket_server -- WebSocket server library
# https://github.com/CylonicRaider/websocket-server
"""
websocket_server -- WebSocket server library
This is a little stand-alone library for WebSocket servers.
It integrates neatly with the standard library, providing easily set-up
servers for both WebSockets and other content. It is intended for
small-scale usages, where installing a proper framework would require
too much work.
TLS support is out of scope of this package; for setting up a TLS-enabled
WebSocket server, refer to online sources on how to achieve that using
HTTPServer.
For an example usage, see the examples module, for reference
documentation, see the wsfile and server modules.
"""
__version__ = '1.0'
# Auxillary modules
from . import compat, constants, exceptions, tools
# Main modules
from . import examples, quick, server, wsfile
__all__ = constants.__all__ + exceptions.__all__
__all__ += ['WebSocketFile', 'wrap', 'WebSocketRequestHandler']
from .constants import *
from .exceptions import *
from .wsfile import WebSocketFile, wrap
from .server import WebSocketRequestHandler
| Embed constants into toplevel module | Embed constants into toplevel module
| Python | mit | CylonicRaider/websocket-server,CylonicRaider/websocket-server | ---
+++
@@ -25,9 +25,10 @@
# Main modules
from . import examples, quick, server, wsfile
-__all__ = exceptions.__all__ + ['WebSocketFile', 'wrap',
- 'WebSocketRequestHandler']
+__all__ = constants.__all__ + exceptions.__all__
+__all__ += ['WebSocketFile', 'wrap', 'WebSocketRequestHandler']
+from .constants import *
from .exceptions import *
from .wsfile import WebSocketFile, wrap
from .server import WebSocketRequestHandler |
1bfc91af9d3ef59e39e6b4693457e00e2877f321 | rtrss/database.py | rtrss/database.py | import logging
from contextlib import contextmanager
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.exc import SQLAlchemyError
from rtrss import OperationInterruptedException
from rtrss import config
_logger = logging.getLogger(__name__)
engine = create_engine(config.SQLALCHEMY_DATABASE_URI, client_encoding='utf8')
Session = sessionmaker(bind=engine)
@contextmanager
def session_scope(SessionFactory=None):
"""Provide a transactional scope around a series of operations."""
if SessionFactory is None:
SessionFactory = Session
session = SessionFactory()
try:
yield session
except SQLAlchemyError as e:
_logger.error("Database error %s", e)
session.rollback()
raise OperationInterruptedException(e)
else:
session.commit()
finally:
session.close()
def init_db(conn=None):
from rtrss.models import Base
if conn is None:
from database import engine as conn
Base.metadata.create_all(bind=conn)
def clear_db(conn=None):
from rtrss.models import Base
if conn is None:
from database import engine as conn
Base.metadata.drop_all(bind=conn)
| import logging
from contextlib import contextmanager
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.exc import SQLAlchemyError
from rtrss import OperationInterruptedException
from rtrss import config
_logger = logging.getLogger(__name__)
engine = create_engine(config.SQLALCHEMY_DATABASE_URI, client_encoding='utf8')
Session = sessionmaker(bind=engine)
@contextmanager
def session_scope(SessionFactory=None):
"""Provide a transactional scope around a series of operations."""
if SessionFactory is None:
SessionFactory = Session
session = SessionFactory()
try:
yield session
except SQLAlchemyError as e:
_logger.error("Database error %s", e)
session.rollback()
raise OperationInterruptedException(e)
else:
session.commit()
finally:
session.close()
def init_db(conn=None):
_logger.info('Initializing database')
from rtrss.models import Base
Base.metadata.create_all(bind=conn)
def clear_db(conn=None):
_logger.info('Clearing database')
from rtrss.models import Base
Base.metadata.drop_all(bind=conn)
| Remove obsolete imports, add logging | Remove obsolete imports, add logging
| Python | apache-2.0 | notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss | ---
+++
@@ -7,6 +7,7 @@
from rtrss import config
_logger = logging.getLogger(__name__)
+
engine = create_engine(config.SQLALCHEMY_DATABASE_URI, client_encoding='utf8')
Session = sessionmaker(bind=engine)
@@ -31,16 +32,12 @@
def init_db(conn=None):
+ _logger.info('Initializing database')
from rtrss.models import Base
- if conn is None:
- from database import engine as conn
-
Base.metadata.create_all(bind=conn)
def clear_db(conn=None):
+ _logger.info('Clearing database')
from rtrss.models import Base
- if conn is None:
- from database import engine as conn
-
Base.metadata.drop_all(bind=conn) |
83db8d5eb376304b1482b3f46f0b6a800571f50c | non_iterable_example/_5_context.py | non_iterable_example/_5_context.py |
if random:
numbers = 1
print_numbers(numbers)
else:
numbers = 1, 2, 3
print_numbers(numbers)
def print_numbers(numbers):
for n in numbers:
print(n)
|
def print_numbers(numbers):
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(numbers)
else:
numbers = 1, 2, 3
print_numbers(numbers)
| Make function exist when called. | Make function exist when called.
| Python | unlicense | markshannon/buggy_code | ---
+++
@@ -1,3 +1,7 @@
+
+def print_numbers(numbers):
+ for n in numbers:
+ print(n)
if random:
numbers = 1
@@ -6,8 +10,4 @@
numbers = 1, 2, 3
print_numbers(numbers)
-def print_numbers(numbers):
- for n in numbers:
- print(n)
- |
f3617d42b5ac9234f2c245dedbba87cf39c816dc | tests/test_accounts.py | tests/test_accounts.py | import unittest
from mock import Mock, patch
from tools import create_mock_json
from twilio.rest.resources import Account
class AccountTest(unittest.TestCase):
@patch("twilio.rest.resources.base.make_twilio_request")
def test_usage_records_subresource(self, request):
resp = create_mock_json("tests/resources/usage_records_list.json")
request.return_value = resp
mock = Mock()
mock.uri = "/base"
account = Account(mock, 'AC123')
account.load_subresources()
records = account.usage_records.list()
self.assertEquals(len(records), 2)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_usage_triggers_subresource(self, request):
resp = create_mock_json("tests/resources/usage_triggers_list.json")
request.return_value = resp
mock = Mock()
mock.uri = "/base"
account = Account(mock, 'AC123')
account.load_subresources()
triggers = account.usage_triggers.list()
self.assertEquals(len(triggers), 2)
| import unittest
from mock import Mock, patch
from tools import create_mock_json
from twilio.rest.resources import Account
class AccountTest(unittest.TestCase):
@patch("twilio.rest.resources.base.make_twilio_request")
def test_usage_records_subresource(self, request):
resp = create_mock_json("tests/resources/usage_records_list.json")
request.return_value = resp
mock = Mock()
mock.uri = "/base"
account = Account(mock, 'AC123')
account.load_subresources()
records = account.usage_records.list()
self.assertEquals(len(records), 2)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_usage_triggers_subresource(self, request):
resp = create_mock_json("tests/resources/usage_triggers_list.json")
request.return_value = resp
mock = Mock()
mock.uri = "/base"
account = Account(mock, 'AC123')
account.load_subresources()
triggers = account.usage_triggers.list()
self.assertEquals(len(triggers), 2)
| Update file for pep8 compliance | Update file for pep8 compliance
| Python | mit | Stackdriver/twilio-python,cinemapub/bright-response,tysonholub/twilio-python,Mobii/twilio-python,supermanheng21/twilio-python,cinemapub/bright-response,bcorwin/twilio-python,johannakate/twilio-python,clearcare/twilio-python,Stackdriver/twilio-python,YeelerG/twilio-python,Rosy-S/twilio-python,twilio/twilio-python | ---
+++
@@ -4,6 +4,7 @@
from tools import create_mock_json
from twilio.rest.resources import Account
+
class AccountTest(unittest.TestCase):
@@ -19,6 +20,7 @@
records = account.usage_records.list()
self.assertEquals(len(records), 2)
+
@patch("twilio.rest.resources.base.make_twilio_request")
def test_usage_triggers_subresource(self, request):
resp = create_mock_json("tests/resources/usage_triggers_list.json") |
17e80f746fd634f84f3050b8ef613537a62c1f73 | reddit.py | reddit.py | #!/usr/bin/env python
import sys,requests
REDDIT=sys.argv[1]
CHANNEL=sys.argv[2]
FEED=sys.argv[3]
STATEFILE="/home/ircbot/state/reddit-%s-%s-storyids"%(CHANNEL,REDDIT)
seen = set(open(STATEFILE).read().split("\n"))
data = requests.get("http://www.reddit.com/r/%s/%s.json" %(REDDIT,FEED)).json()
new=[]
writer=open("/home/ircbot/irc/irc.mozilla.org/%s/in"%CHANNEL, "a")
for post in data["data"]["children"]:
post = post['data']
if not post["id"] in seen:
writer.write(post["title"]+"\n")
if post["domain"] == "self.%s" % REDDIT:
writer.write(post["url"]+"\n")
else:
writer.write(post["url"]+" "+post["permalink"]+"\n")
new.append(post["id"])
if len(new) != 0:
f = open(STATEFILE, "a")
f.write("\n".join(new))
| #!/usr/bin/env python
import sys,requests,json
REDDIT=sys.argv[1]
CHANNEL=sys.argv[2]
FEED=sys.argv[3]
# Test mode:
if len(sys.argv) == 5:
print "running in test mode"
data = json.loads(open(sys.argv[4]).read())
writer=sys.stdout
else:
req = requests.get("http://www.reddit.com/r/%s/%s.json" %(REDDIT,FEED))
if req.status_code != 200:
print "Kabloom!"
print req.text
sys.exit(1)
data = req.json()
writer=open("/home/ircbot/irc/irc.mozilla.org/%s/in"%CHANNEL, "a")
STATEFILE="/home/ircbot/state/reddit-%s-%s-storyids"%(CHANNEL,REDDIT)
sf = open(STATEFILE)
seen = set(sf.read().split("\n"))
sf.close()
new=[]
for post in data["data"]["children"]:
post = post['data']
if not post["id"] in seen:
writer.write(post["title"]+"\n")
if post["domain"] == "self.%s" % REDDIT:
writer.write(post["url"]+"\n")
else:
writer.write(post["url"]+" "+post["permalink"]+"\n")
new.append(post["id"])
if len(new) != 0:
f = open(STATEFILE, "a")
for new in new:
f.write(new+"\n")
f.close()
| Add a test mode, handle 429 errors, stop corrupting state file | Add a test mode, handle 429 errors, stop corrupting state file
| Python | unlicense | iibot-irc/irc-reddit,iibot-irc/irc-reddit | ---
+++
@@ -1,17 +1,30 @@
#!/usr/bin/env python
-import sys,requests
+import sys,requests,json
REDDIT=sys.argv[1]
CHANNEL=sys.argv[2]
FEED=sys.argv[3]
+# Test mode:
+if len(sys.argv) == 5:
+ print "running in test mode"
+ data = json.loads(open(sys.argv[4]).read())
+ writer=sys.stdout
+else:
+ req = requests.get("http://www.reddit.com/r/%s/%s.json" %(REDDIT,FEED))
+ if req.status_code != 200:
+ print "Kabloom!"
+ print req.text
+ sys.exit(1)
+ data = req.json()
+ writer=open("/home/ircbot/irc/irc.mozilla.org/%s/in"%CHANNEL, "a")
STATEFILE="/home/ircbot/state/reddit-%s-%s-storyids"%(CHANNEL,REDDIT)
-seen = set(open(STATEFILE).read().split("\n"))
+sf = open(STATEFILE)
+seen = set(sf.read().split("\n"))
+sf.close()
-data = requests.get("http://www.reddit.com/r/%s/%s.json" %(REDDIT,FEED)).json()
new=[]
-writer=open("/home/ircbot/irc/irc.mozilla.org/%s/in"%CHANNEL, "a")
for post in data["data"]["children"]:
post = post['data']
if not post["id"] in seen:
@@ -23,4 +36,6 @@
new.append(post["id"])
if len(new) != 0:
f = open(STATEFILE, "a")
- f.write("\n".join(new))
+ for new in new:
+ f.write(new+"\n")
+ f.close() |
189f62b93382a1db0f2780678bbd4ad9c7769623 | util.py | util.py | import sys
def format_cols(cols):
widths = [0] * len(cols[0])
for i in cols:
for idx, val in enumerate(i):
widths[idx] = max(len(val), widths[idx])
f = ""
t = []
for i in widths:
t.append("%%-0%ds" % (i,))
return " ".join(t)
def column_report(title, fields, cols):
l = []
l.append("[" + title + "]")
l.append("")
f = format_cols([fields] + cols)
header = f % tuple(fields)
l.append(header)
l.append("-" * len(header))
for i in cols:
l.append(f % tuple(i))
l.append("")
l.append("")
return "\n".join(l)
def basename(uri):
return uri.rstrip("/").split("/")[-1]
def step(desc):
print desc
print "=" * len(desc)
print
def end_step():
raw_input("Press enter to run the next step.")
print
print
def check_response(r, expected_statuses=None):
if expected_statuses == None:
expected_statuses = [200]
ok = False
for i in expected_statuses:
if r.status_code == i:
ok = True
break
if not ok:
print "Request failed to succeed:"
print "Status: %s" % (r.status_code,)
print r.content
sys.exit(1)
| # As a hack, disable SSL warnings.
import urllib3
urllib3.disable_warnings()
import sys
def format_cols(cols):
widths = [0] * len(cols[0])
for i in cols:
for idx, val in enumerate(i):
widths[idx] = max(len(val), widths[idx])
f = ""
t = []
for i in widths:
t.append("%%-0%ds" % (i,))
return " ".join(t)
def column_report(title, fields, cols):
l = []
l.append("[" + title + "]")
l.append("")
f = format_cols([fields] + cols)
header = f % tuple(fields)
l.append(header)
l.append("-" * len(header))
for i in cols:
l.append(f % tuple(i))
l.append("")
l.append("")
return "\n".join(l)
def basename(uri):
return uri.rstrip("/").split("/")[-1]
def step(desc):
print desc
print "=" * len(desc)
print
def end_step():
raw_input("Press enter to run the next step.")
print
print
def check_response(r, expected_statuses=None):
if expected_statuses == None:
expected_statuses = [200]
ok = False
for i in expected_statuses:
if r.status_code == i:
ok = True
break
if not ok:
print "Request failed to succeed:"
print "Status: %s" % (r.status_code,)
print r.content
sys.exit(1)
| Disable SSL warnings by default. | Disable SSL warnings by default.
| Python | mit | lightcrest/kahu-api-demo | ---
+++
@@ -1,3 +1,7 @@
+# As a hack, disable SSL warnings.
+import urllib3
+urllib3.disable_warnings()
+
import sys
def format_cols(cols): |
9195060fa66b2a068bca4680716a0d5b7be9b13d | wake.py | wake.py | import couchdb
from been.couch import CouchStore
from flask import Flask, render_template
app = Flask(__name__)
app.jinja_env.trim_blocks = True
store = CouchStore()
store.load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.events())
if __name__ == '__main__':
app.run(debug=True)
| from been.couch import CouchStore
from flask import Flask, render_template
app = Flask(__name__)
app.jinja_env.trim_blocks = True
store = CouchStore()
store.load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.events())
if __name__ == '__main__':
app.run(debug=True)
| Clean up imports and whitespace. | Clean up imports and whitespace. | Python | bsd-3-clause | chromakode/wake | ---
+++
@@ -1,7 +1,6 @@
-import couchdb
from been.couch import CouchStore
+from flask import Flask, render_template
-from flask import Flask, render_template
app = Flask(__name__)
app.jinja_env.trim_blocks = True
|
8161ec1fc511ba948451ce121c863ca878ef482d | tests/test_pool.py | tests/test_pool.py | import random
import unittest
from aioes.pool import RandomSelector
class TestRandomSelector(unittest.TestCase):
def setUp(self):
random.seed(123456)
def tearDown(self):
random.seed(None)
def test_select(self):
s = RandomSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
| import random
import unittest
from aioes.pool import RandomSelector, RoundRobinSelector
class TestRandomSelector(unittest.TestCase):
def setUp(self):
random.seed(123456)
def tearDown(self):
random.seed(None)
def test_select(self):
s = RandomSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
class TestRoundRobinSelector(unittest.TestCase):
def test_select(self):
s = RoundRobinSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
r = s.select([1, 2, 3])
self.assertEqual(3, r)
r = s.select([1, 2, 3])
self.assertEqual(1, r)
r = s.select([1, 2, 3])
self.assertEqual(2, r)
| Add test for round-robin selector | Add test for round-robin selector
| Python | apache-2.0 | aio-libs/aioes | ---
+++
@@ -1,7 +1,7 @@
import random
import unittest
-from aioes.pool import RandomSelector
+from aioes.pool import RandomSelector, RoundRobinSelector
class TestRandomSelector(unittest.TestCase):
@@ -16,3 +16,17 @@
s = RandomSelector()
r = s.select([1, 2, 3])
self.assertEqual(2, r)
+
+
+class TestRoundRobinSelector(unittest.TestCase):
+
+ def test_select(self):
+ s = RoundRobinSelector()
+ r = s.select([1, 2, 3])
+ self.assertEqual(2, r)
+ r = s.select([1, 2, 3])
+ self.assertEqual(3, r)
+ r = s.select([1, 2, 3])
+ self.assertEqual(1, r)
+ r = s.select([1, 2, 3])
+ self.assertEqual(2, r) |
2190d5a08a49fb4d9980a744a8cf6e6b26651aaf | addons/event_sale/__openerp__.py | addons/event_sale/__openerp__.py | # -*- coding: utf-8 -*-
{
'name': 'Events Sales',
'version': '1.1',
'category': 'Tools',
'website': 'https://www.odoo.com/page/events',
'description': """
Creating registration with sale orders.
=======================================
This module allows you to automate and connect your registration creation with
your main sale flow and therefore, to enable the invoicing feature of registrations.
It defines a new kind of service products that offers you the possibility to
choose an event category associated with it. When you encode a sale order for
that product, you will be able to choose an existing event of that category and
when you confirm your sale order it will automatically create a registration for
this event.
""",
'depends': ['event', 'sale_crm'],
'data': [
'views/event.xml',
'views/product.xml',
'views/sale_order.xml',
'event_sale_data.xml',
'report/event_event_templates.xml',
'security/ir.model.access.csv',
'wizard/event_edit_registration.xml',
],
'demo': ['event_demo.xml'],
'test': ['test/confirm.yml'],
'installable': True,
'auto_install': True
}
| # -*- coding: utf-8 -*-
{
'name': 'Events Sales',
'version': '1.1',
'category': 'Tools',
'website': 'https://www.odoo.com/page/events',
'description': """
Creating registration with sale orders.
=======================================
This module allows you to automate and connect your registration creation with
your main sale flow and therefore, to enable the invoicing feature of registrations.
It defines a new kind of service products that offers you the possibility to
choose an event category associated with it. When you encode a sale order for
that product, you will be able to choose an existing event of that category and
when you confirm your sale order it will automatically create a registration for
this event.
""",
'depends': ['event', 'sale'],
'data': [
'views/event.xml',
'views/product.xml',
'views/sale_order.xml',
'event_sale_data.xml',
'report/event_event_templates.xml',
'security/ir.model.access.csv',
'wizard/event_edit_registration.xml',
],
'demo': ['event_demo.xml'],
'test': ['test/confirm.yml'],
'installable': True,
'auto_install': True
}
| Remove crm as dependency of event_sale | Remove crm as dependency of event_sale
There is no need for CRM to have event management. Introduced at 64d63ffaa6f5f2e72084ea6aedc6f2c2b7130ca6 for probably no reason... | Python | agpl-3.0 | sysadminmatmoz/OCB,microcom/odoo,Elico-Corp/odoo_OCB,laslabs/odoo,storm-computers/odoo,stephen144/odoo,bplancher/odoo,laslabs/odoo,sysadminmatmoz/OCB,stephen144/odoo,hip-odoo/odoo,microcom/odoo,ygol/odoo,optima-ict/odoo,storm-computers/odoo,storm-computers/odoo,ygol/odoo,bplancher/odoo,bplancher/odoo,stephen144/odoo,ygol/odoo,ygol/odoo,optima-ict/odoo,hip-odoo/odoo,stephen144/odoo,sysadminmatmoz/OCB,dfang/odoo,bplancher/odoo,Elico-Corp/odoo_OCB,stephen144/odoo,dfang/odoo,microcom/odoo,Elico-Corp/odoo_OCB,ygol/odoo,optima-ict/odoo,optima-ict/odoo,storm-computers/odoo,bplancher/odoo,dfang/odoo,hip-odoo/odoo,laslabs/odoo,Elico-Corp/odoo_OCB,hip-odoo/odoo,ygol/odoo,dfang/odoo,dfang/odoo,microcom/odoo,hip-odoo/odoo,dfang/odoo,laslabs/odoo,storm-computers/odoo,storm-computers/odoo,hip-odoo/odoo,optima-ict/odoo,ygol/odoo,laslabs/odoo,optima-ict/odoo,sysadminmatmoz/OCB,laslabs/odoo,microcom/odoo,microcom/odoo,sysadminmatmoz/OCB,bplancher/odoo,sysadminmatmoz/OCB,Elico-Corp/odoo_OCB,sysadminmatmoz/OCB,Elico-Corp/odoo_OCB,stephen144/odoo | ---
+++
@@ -18,7 +18,7 @@
when you confirm your sale order it will automatically create a registration for
this event.
""",
- 'depends': ['event', 'sale_crm'],
+ 'depends': ['event', 'sale'],
'data': [
'views/event.xml',
'views/product.xml', |
7fe3eae251f3b0b4d433cf2d14f619306159ae43 | manage.py | manage.py | import os
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
app = create_app(os.getenv("FLASK_CONFIG") or "default")
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command("db", MigrateCommand)
if __name__ == "__main__":
manager.run() | import os
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
app = create_app(os.getenv("MYDICTIONARY_CONFIG") or "default")
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command("db", MigrateCommand)
if __name__ == "__main__":
manager.run() | Change environment variable for app configuration | Change environment variable for app configuration
| Python | mit | Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary | ---
+++
@@ -4,7 +4,7 @@
from app import create_app, db
-app = create_app(os.getenv("FLASK_CONFIG") or "default")
+app = create_app(os.getenv("MYDICTIONARY_CONFIG") or "default")
migrate = Migrate(app, db)
|
9f8a8321fbed1008f0eec608ba7bce9b08897e40 | manage.py | manage.py | import os
import unittest
from flask.ext.script import Manager
from server import app
from server.models import db
manager = Manager(app)
@manager.command
def init_db():
""" Initialize database: drop and create all columns """
db.drop_all()
db.create_all()
@manager.command
def test():
tests_path = os.path.join(os.path.dirname(__file__), 'server', 'tests')
tests = unittest.defaultTestLoader.discover(tests_path)
runner = unittest.TextTestRunner()
runner.run(tests)
if __name__ == '__main__':
manager.run()
| import os
import unittest
from flask.ext.script import Manager
from server import app
from server.models import db
from server.models import Lecturer, Course, Lecture, Comment
manager = Manager(app)
@manager.command
def init_db():
""" Initialize database: drop and create all columns """
db.drop_all()
db.create_all()
@manager.command
def mock_db():
""" Insert mock data into database """
init_db()
simon = Lecturer('Simon', 'McCallum')
db.session.add(simon)
imt3601 = Course('IMT3601 - Game Programming', simon)
db.session.add(imt3601)
imt3601_l1 = Lecture('Lecture 1', imt3601)
db.session.add(imt3601_l1)
imt3601_l1_c1 = Comment('This is boring', imt3601_l1)
db.session.add(imt3601_l1_c1)
imt3601_l1_c2 = Comment('This is fun!', imt3601_l1)
db.session.add(imt3601_l1_c2)
db.session.commit()
@manager.command
def test():
tests_path = os.path.join(os.path.dirname(__file__), 'server', 'tests')
tests = unittest.defaultTestLoader.discover(tests_path)
runner = unittest.TextTestRunner()
runner.run(tests)
if __name__ == '__main__':
manager.run()
| Add command to mock some db data | Add command to mock some db data
| Python | mit | MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS | ---
+++
@@ -5,6 +5,8 @@
from server import app
from server.models import db
+
+from server.models import Lecturer, Course, Lecture, Comment
manager = Manager(app)
@@ -17,6 +19,28 @@
@manager.command
+def mock_db():
+ """ Insert mock data into database """
+ init_db()
+
+ simon = Lecturer('Simon', 'McCallum')
+ db.session.add(simon)
+
+ imt3601 = Course('IMT3601 - Game Programming', simon)
+ db.session.add(imt3601)
+
+ imt3601_l1 = Lecture('Lecture 1', imt3601)
+ db.session.add(imt3601_l1)
+
+ imt3601_l1_c1 = Comment('This is boring', imt3601_l1)
+ db.session.add(imt3601_l1_c1)
+ imt3601_l1_c2 = Comment('This is fun!', imt3601_l1)
+ db.session.add(imt3601_l1_c2)
+
+ db.session.commit()
+
+
+@manager.command
def test():
tests_path = os.path.join(os.path.dirname(__file__), 'server', 'tests')
tests = unittest.defaultTestLoader.discover(tests_path) |
abcd2bd21a9033353e04514edd78a2a0a06292de | manage.py | manage.py | from flask.ext.script import Manager, Shell
from app import create_app, db
from settings import DevConfig, ProdConfig
import os
if os.environ.get("ENV") == 'prod':
app = create_app(ProdConfig)
else:
app = create_app(DevConfig)
def _context():
"""
Expose shell session access to the app and db modules.
Returns:
dict: Exposing access to 'app' and 'db'.
"""
return {'app': app, 'db': db}
manager = Manager(app)
manager.add_command('shell', Shell(make_context=_context))
if __name__ == '__main__':
manager.run()
| from flask.ext.script import Manager, Shell, prompt, prompt_pass
from app import create_app, models, db
from settings import DevConfig, ProdConfig
import os
if os.environ.get("ENV") == 'prod':
app = create_app(ProdConfig)
else:
app = create_app(DevConfig)
manager = Manager(app)
@manager.command
def init_db():
"""
Creates the database tables from SQLAlchemy models.
Note: Opted to not use Flask-Migrate as it's quite heavyweight compared.
"""
db.drop_all()
db.create_all()
db.session.commit()
@manager.command
def create_user():
"""
Creates a user in the database.
"""
uname = prompt('Please enter a username')
pword = prompt_pass('Please enter a password')
db.session.add(models.User(username=uname, password=pword))
db.session.commit()
def _context():
"""
Expose shell session access to the app and db modules.
Returns:
dict: Exposing access to 'app' and 'db'.
"""
return {'app': app, 'db': db}
manager.add_command('shell', Shell(make_context=_context))
if __name__ == '__main__':
manager.run()
| Add commands for database and user creation | Add commands for database and user creation
| Python | mit | jawrainey/atc,jawrainey/atc | ---
+++
@@ -1,5 +1,5 @@
-from flask.ext.script import Manager, Shell
-from app import create_app, db
+from flask.ext.script import Manager, Shell, prompt, prompt_pass
+from app import create_app, models, db
from settings import DevConfig, ProdConfig
import os
@@ -7,6 +7,31 @@
app = create_app(ProdConfig)
else:
app = create_app(DevConfig)
+
+manager = Manager(app)
+
+
+@manager.command
+def init_db():
+ """
+ Creates the database tables from SQLAlchemy models.
+
+ Note: Opted to not use Flask-Migrate as it's quite heavyweight compared.
+ """
+ db.drop_all()
+ db.create_all()
+ db.session.commit()
+
+
+@manager.command
+def create_user():
+ """
+ Creates a user in the database.
+ """
+ uname = prompt('Please enter a username')
+ pword = prompt_pass('Please enter a password')
+ db.session.add(models.User(username=uname, password=pword))
+ db.session.commit()
def _context():
@@ -18,7 +43,6 @@
"""
return {'app': app, 'db': db}
-manager = Manager(app)
manager.add_command('shell', Shell(make_context=_context))
if __name__ == '__main__': |
43002f30c23f0a5d739a096ec8e9c445a9502f97 | manage.py | manage.py | #!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
from flask.ext.script.commands import ShowUrls, Clean
from waitress import serve
# default to dev config because no one should use this in
# production anyway
env = os.environ.get('APP_ENV', 'dev')
app = create_app('app.settings.%sConfig' % env.capitalize(), env=env)
manager = Manager(app)
manager.add_command("server", Server())
manager.add_command("show-urls", ShowUrls())
manager.add_command("clean", Clean())
@manager.shell
def make_shell_context():
""" Creates a python REPL with several default imports
in the context of the app
"""
return dict(app=app)
if __name__ == "__main__":
serve(manager)
| #!/usr/bin/env python
import os
from app import app, assets
from flask.ext.script import Manager, Server
from flask.ext.assets import ManageAssets
from flask.ext.script.commands import ShowUrls, Clean
# default to dev config because no one should use this in
# production anyway
env = os.environ.get('APP_ENV', 'dev')
manager = Manager(app)
assets.environment = app.jinja_env.assets_environment
manager.add_command('assets', ManageAssets(assets))
manager.add_command('server', Server())
manager.add_command('show-urls', ShowUrls())
manager.add_command('clean', Clean())
if __name__ == '__main__':
manager.run()
| Add assets command and misc clean-up | Add assets command and misc clean-up
| Python | mit | doomspork/seancallan.com,doomspork/seancallan.com,doomspork/seancallan.com | ---
+++
@@ -2,29 +2,23 @@
import os
-from app import create_app
+from app import app, assets
from flask.ext.script import Manager, Server
+from flask.ext.assets import ManageAssets
from flask.ext.script.commands import ShowUrls, Clean
-from waitress import serve
# default to dev config because no one should use this in
# production anyway
env = os.environ.get('APP_ENV', 'dev')
-app = create_app('app.settings.%sConfig' % env.capitalize(), env=env)
+
manager = Manager(app)
-manager.add_command("server", Server())
-manager.add_command("show-urls", ShowUrls())
-manager.add_command("clean", Clean())
+assets.environment = app.jinja_env.assets_environment
+manager.add_command('assets', ManageAssets(assets))
+manager.add_command('server', Server())
+manager.add_command('show-urls', ShowUrls())
+manager.add_command('clean', Clean())
-@manager.shell
-def make_shell_context():
- """ Creates a python REPL with several default imports
- in the context of the app
- """
-
- return dict(app=app)
-
-if __name__ == "__main__":
- serve(manager)
+if __name__ == '__main__':
+ manager.run() |
084a923d928996022936e5c942e69876dc409b5e | edx_data_research/cli/commands.py | edx_data_research/cli/commands.py | """
In this module we define the interface between the cli input provided
by the user and the analytics required by the user
"""
from edx_data_research import parsing
from edx_data_research import reporting
def cmd_report_basic(args):
"""
Run basic analytics
"""
edx_obj = reporting.Basic(args)
getattr(edx_obj, args.basic.replace('-', '_'))()
def cmd_report_problem_ids(args):
edx_obj = reporting.ProblemIds(args)
getattr(edx_obj, args.report.replace('-', '_'))()
def cmd_report_stats(args):
edx_obj = reporting.Stats(args)
getattr(edx_obj, args.report.replace('-', '_'))()
def cmd_parse_sql(args):
edx_obj = parsing.SQL(args)
edx_obj.migrate()
def cmd_parse_forum(args):
edx_obj = parsing.Forum(args)
edx_obj.migrate()
def cmd_parse_problem_ids(args):
edx_obj = parsing.ProblemIds(args)
edx_obj.migrate()
def cmd_parse_course_structure(args):
edx_obj = parsing.CourseStructure(args)
edx_obj.migrate()
def cmd_parse_tracking(args):
edx_obj = parsing.Tracking(args)
edx_obj.migrate()
| """
In this module we define the interface between the cli input provided
by the user and the analytics required by the user
"""
from edx_data_research import parsing
from edx_data_research import reporting
def cmd_report_basic(args):
"""
Run basic analytics
"""
edx_obj = reporting.Basic(args)
getattr(edx_obj, args.basic.replace('-', '_'))()
def cmd_report_problem_ids(args):
edx_obj = reporting.ProblemIds(args)
getattr(edx_obj, args.report.replace('-', '_'))()
def cmd_report_stats(args):
edx_obj = reporting.Stats(args)
getattr(edx_obj, args.report.replace('-', '_'))()
def cmd_parse_sql(args):
edx_obj = parsing.SQL(args)
edx_obj.migrate()
def cmd_parse_forum(args):
edx_obj = parsing.Forum(args)
edx_obj.migrate()
def cmd_parse_problem_ids(args):
edx_obj = parsing.ProblemIds(args)
edx_obj.migrate()
def cmd_parse_course_structure(args):
edx_obj = parsing.CourseStructure(args)
edx_obj.migrate()
def cmd_parse_tracking(args):
edx_obj = parsing.Tracking(args)
edx_obj.migrate()
def cmd_parse_course_tracking(args):
edx_obj = parsing.CourseTracking(args)
edx_obj.migrate()
| Define proxy function for course specific tracking logs | Define proxy function for course specific tracking logs | Python | mit | McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research | ---
+++
@@ -39,3 +39,7 @@
def cmd_parse_tracking(args):
edx_obj = parsing.Tracking(args)
edx_obj.migrate()
+
+def cmd_parse_course_tracking(args):
+ edx_obj = parsing.CourseTracking(args)
+ edx_obj.migrate() |
db960486f223e04fe08a8f2b9619aa887dcafeda | yuno.py | yuno.py | #!/usr/bin/env python3
import os
import re
import sys
from yuno.core import cli, config
from yuno.core.util import working_dir
def main(argv=None):
# Figure out where Yuno lives so plugins can cd correctly if they need to.
yuno_home = os.path.abspath(os.path.dirname(__file__))
config.update('YUNO_HOME', yuno_home)
with working_dir(yuno_home):
args, subcommand_args = cli.get_cli_args()
load_settings(args.runtime_settings, args.command)
program = __import__(
'yuno.{command}.{command}'.format(command=args.command),
fromlist=['yuno.' + args.command]
)
program.main(subcommand_args)
def load_settings(runtime_settings, plugin_name):
plugin_name = re.sub('[^a-z0-9_]', '', plugin_name, flags=re.I)
plugin_settings_file = 'yuno/%s/settings/config.json' % plugin_name
config.load_default()
if os.path.isfile(plugin_settings_file):
config.load_json(plugin_settings_file)
for override in runtime_settings or []:
key = override[0]
if isinstance(getattr(config.config, key), list):
value = override[1:]
else:
value = override[1]
config.update(key, value)
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
import os
import re
import sys
from yuno.core import cli, config
from yuno.core.util import working_dir
def main(argv=None):
# Figure out where Yuno lives so plugins can cd correctly if they need to.
yuno_home = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
config.update('YUNO_HOME', yuno_home)
with working_dir(yuno_home):
args, subcommand_args = cli.get_cli_args()
load_settings(args.runtime_settings, args.command)
program = __import__(
'yuno.{command}.{command}'.format(command=args.command),
fromlist=['yuno.' + args.command]
)
program.main(subcommand_args)
def load_settings(runtime_settings, plugin_name):
plugin_name = re.sub('[^a-z0-9_]', '', plugin_name, flags=re.I)
plugin_settings_file = 'yuno/%s/settings/config.json' % plugin_name
config.load_default()
if os.path.isfile(plugin_settings_file):
config.load_json(plugin_settings_file)
for override in runtime_settings or []:
key = override[0]
if isinstance(getattr(config.config, key), list):
value = override[1:]
else:
value = override[1]
config.update(key, value)
if __name__ == '__main__':
main()
| Resolve symlinks when detecting YUNO_HOME. | Resolve symlinks when detecting YUNO_HOME.
| Python | mit | bulatb/yuno,bulatb/yuno | ---
+++
@@ -10,7 +10,7 @@
def main(argv=None):
# Figure out where Yuno lives so plugins can cd correctly if they need to.
- yuno_home = os.path.abspath(os.path.dirname(__file__))
+ yuno_home = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
config.update('YUNO_HOME', yuno_home)
with working_dir(yuno_home): |
0adc28fffafbce685dc74009891ced9333b76eb9 | minify.py | minify.py | """A Pelican plugin which minifies HTML pages."""
from logging import getLogger
from os import walk
from os.path import join
from htmlmin.minify import html_minify as min
from pelican import signals
logger = getLogger(__name__)
def minify_html(pelican):
"""Minify all HTML files.
:param pelican: The Pelican instance.
"""
for dirpath, _, filenames in walk(pelican.settings['OUTPUT_PATH']):
for name in filenames:
if name.endswith('.html'):
filepath = join(dirpath, name)
create_minified_file(filepath)
def create_minified_file(filename):
"""Create a minified HTML file, overwriting the original.
:param str filename: The file to minify.
"""
uncompressed = open(filename).read()
with open(filename, 'wb') as f:
try:
logger.debug('Minifying: %s' % filename)
compressed = min(uncompressed)
f.write(compressed)
except Exception, ex:
logger.critical('HTML Minification failed: %s' % ex)
finally:
f.close()
def register():
"""Run the HTML minification stuff after all articles have been generated,
at the very end of the processing loop.
"""
signals.finalized.connect(minify_html)
| """A Pelican plugin which minifies HTML pages."""
from logging import getLogger
from os import walk
from os.path import join
import sys
from htmlmin.minify import html_minify as min
from pelican import signals
# we need save unicode strings to files
if sys.version_info[0] < 3:
import codecs
_open_func_bak = open # Make a back up, just in case
open = codecs.open
logger = getLogger(__name__)
def minify_html(pelican):
"""Minify all HTML files.
:param pelican: The Pelican instance.
"""
for dirpath, _, filenames in walk(pelican.settings['OUTPUT_PATH']):
for name in filenames:
if name.endswith('.html'):
filepath = join(dirpath, name)
create_minified_file(filepath)
def create_minified_file(filename):
"""Create a minified HTML file, overwriting the original.
:param str filename: The file to minify.
"""
uncompressed = open(filename).read()
with open(filename, 'w', encoding='utf-8') as f:
try:
logger.debug('Minifying: %s' % filename)
compressed = min(uncompressed)
f.write(compressed)
except Exception, ex:
logger.critical('HTML Minification failed: %s' % ex)
finally:
f.close()
def register():
"""Run the HTML minification stuff after all articles have been generated,
at the very end of the processing loop.
"""
signals.finalized.connect(minify_html)
| Save unicode strings to file | Save unicode strings to file
Fix for #1 'ascii' codec can't encode character... | Python | unlicense | rdegges/pelican-minify | ---
+++
@@ -4,10 +4,16 @@
from logging import getLogger
from os import walk
from os.path import join
+import sys
from htmlmin.minify import html_minify as min
from pelican import signals
+# we need save unicode strings to files
+if sys.version_info[0] < 3:
+ import codecs
+ _open_func_bak = open # Make a back up, just in case
+ open = codecs.open
logger = getLogger(__name__)
@@ -30,7 +36,7 @@
:param str filename: The file to minify.
"""
uncompressed = open(filename).read()
- with open(filename, 'wb') as f:
+ with open(filename, 'w', encoding='utf-8') as f:
try:
logger.debug('Minifying: %s' % filename)
compressed = min(uncompressed) |
395d35eafba14b52a301cb2cf0e7345ae7e0430c | src/argparser.py | src/argparser.py | """ArgumentParser with Italian translation."""
import argparse
import sys
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
class ArgParser(argparse.ArgumentParser):
def __init__(self, **kwargs):
if kwargs.get('parent', None) is None:
kwargs['parents'] = []
super().__init__(**kwargs)
def set_default_subparser(self, name, args=None):
"""
Default subparser selection.
name: is the name of the subparser to call by default
args: if set is the argument list handed to parse_args()
"""
subparser_found = False
for arg in sys.argv[1:]:
if arg in ['-h', '--help']: # global help if no subparser
break
else:
for x in self._subparsers._actions:
if not isinstance(x, argparse._SubParsersAction):
continue
for sp_name in x._name_parser_map.keys():
if sp_name in sys.argv[1:]:
subparser_found = True
if not subparser_found:
# insert default in first position, this implies no
# global options without a sub_parsers specified
if args is None:
sys.argv.insert(1, name)
else:
args.insert(0, name) | """ArgumentParser with Italian translation."""
import argparse
import sys
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
class ArgParser(argparse.ArgumentParser):
def __init__(self, **kwargs):
kwargs.setdefault('parents', [])
super().__init__(**kwargs)
def set_default_subparser(self, name, args=None):
"""
Default subparser selection.
name: is the name of the subparser to call by default
args: if set is the argument list handed to parse_args()
"""
subparser_found = False
for arg in sys.argv[1:]:
if arg in ['-h', '--help']: # global help if no subparser
break
else:
for x in self._subparsers._actions:
if not isinstance(x, argparse._SubParsersAction):
continue
for sp_name in x._name_parser_map.keys():
if sp_name in sys.argv[1:]:
subparser_found = True
if not subparser_found:
# insert default in first position, this implies no
# global options without a sub_parsers specified
if args is None:
sys.argv.insert(1, name)
else:
args.insert(0, name)
| FIx wrong key name and simplify the assignment | FIx wrong key name and simplify the assignment | Python | mit | claudio-unipv/pvcheck,claudio-unipv/pvcheck | ---
+++
@@ -10,8 +10,7 @@
class ArgParser(argparse.ArgumentParser):
def __init__(self, **kwargs):
- if kwargs.get('parent', None) is None:
- kwargs['parents'] = []
+ kwargs.setdefault('parents', [])
super().__init__(**kwargs)
|
12096f2961f72e250e16b168c053e89277e442a5 | test.py | test.py | import dis
import time
import pyte
# Create a new consts value.
consts = pyte.create_consts(None, "Hello, world!")
# New varnames values
varnames = pyte.create_varnames()
# Create names (for globals)
names = pyte.create_names("print")
bc = [pyte.call.CALL_FUNCTION(names[0], consts[1]),
pyte.tokens.RETURN_VALUE]
# Compile the code.
func = pyte.compile(bc, consts, names, varnames, stack_size=99)
print("==================================================")
print(dis.code_info(func))
print("\nFunction disassembly: ")
dis.dis(func)
print("\n==================================================\n")
time.sleep(0.05)
a = func()
| import dis
import time
import pyte
# Create a new consts value.
consts = pyte.create_consts(None, "Hello, world!")
# New varnames values
varnames = pyte.create_varnames()
# Create names (for globals)
names = pyte.create_names("print")
bc = [pyte.ops.CALL_FUNCTION(names[0], consts[1]),
pyte.ops.END_FUNCTION(consts[0])]
# Compile the code.
func = pyte.compile(bc, consts, names, varnames, stack_size=99)
print("==================================================")
print(dis.code_info(func))
print("\nFunction disassembly: ")
dis.dis(func)
print("\n==================================================\n")
time.sleep(0.05)
a = func()
| Add instruction for returning a value | Add instruction for returning a value
| Python | mit | SunDwarf/Pyte | ---
+++
@@ -10,8 +10,8 @@
# Create names (for globals)
names = pyte.create_names("print")
-bc = [pyte.call.CALL_FUNCTION(names[0], consts[1]),
- pyte.tokens.RETURN_VALUE]
+bc = [pyte.ops.CALL_FUNCTION(names[0], consts[1]),
+ pyte.ops.END_FUNCTION(consts[0])]
# Compile the code.
func = pyte.compile(bc, consts, names, varnames, stack_size=99) |
0bd1865730106d2573acb04d95b23290e935f4c4 | util.py | util.py | from math import sin, cos, asin, sqrt
def hav(lona, lonb, lata, latb):
# ported from latlontools
# assume latitude and longitudes are in radians
diff_lat = lata - latb
diff_lon = lona - lonb
a = sin(diff_lat/2)**2 + cos(lona) * cos(latb) * sin(diff_lon/2)**2
c = 2 * asin(sqrt(a))
r = 6371 # radius of earth in km
return c * r | from math import sin, cos, asin, sqrt
def hav(lonlata, lonlatb):
# ported from latlontools
# assume latitude and longitudes are in radians
lona = lonlata[0]
lata = lonlata[1]
lonb = lonlatb[0]
latb = lonlatb[1]
diff_lat = lata - latb
diff_lon = lona - lonb
a = sin(diff_lat/2)**2 + cos(lona) * cos(latb) * sin(diff_lon/2)**2
c = 2 * asin(sqrt(a))
r = 6371 # radius of earth in km
return c * r | Change call signature of hav to 2 pairs | Change call signature of hav to 2 pairs
| Python | bsd-3-clause | LemonPi/Pathtreker,LemonPi/Pathtreker,LemonPi/Pathtreker | ---
+++
@@ -1,8 +1,14 @@
from math import sin, cos, asin, sqrt
-def hav(lona, lonb, lata, latb):
+def hav(lonlata, lonlatb):
# ported from latlontools
# assume latitude and longitudes are in radians
+ lona = lonlata[0]
+ lata = lonlata[1]
+
+ lonb = lonlatb[0]
+ latb = lonlatb[1]
+
diff_lat = lata - latb
diff_lon = lona - lonb
|
de7219dd9d40f316dc0dd6f6c2cad68e66898762 | tests/test_live.py | tests/test_live.py | """
Tests run against live mail providers.
These aren't generally run as part of the test suite.
"""
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
os.environ.get("CI") == "true",
reason="No tests against real servers on CI servers",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
| """
Tests run against live mail providers.
These aren't generally run as part of the test suite.
"""
import os
import pytest
from aiosmtplib import SMTP, SMTPAuthenticationError, SMTPStatus
pytestmark = [
pytest.mark.skipif(
os.environ.get("AIOSMTPLIB_LIVE_TESTS") != "true",
reason="No tests against real servers unless requested",
),
pytest.mark.asyncio(),
]
async def test_starttls_gmail():
client = SMTP(hostname="smtp.gmail.com", port=587, use_tls=False)
await client.connect(timeout=1.0)
await client.ehlo()
await client.starttls(validate_certs=False)
response = await client.ehlo()
assert response.code == SMTPStatus.completed
assert "smtp.gmail.com at your service" in response.message
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
@pytest.mark.asyncio()
async def test_qq_login():
client = SMTP(hostname="smtp.qq.com", port=587, use_tls=False)
await client.connect(timeout=2.0)
await client.ehlo()
await client.starttls(validate_certs=False)
with pytest.raises(SMTPAuthenticationError):
await client.login("test", "test")
| Disable live tests by default | Disable live tests by default
| Python | mit | cole/aiosmtplib | ---
+++
@@ -12,8 +12,8 @@
pytestmark = [
pytest.mark.skipif(
- os.environ.get("CI") == "true",
- reason="No tests against real servers on CI servers",
+ os.environ.get("AIOSMTPLIB_LIVE_TESTS") != "true",
+ reason="No tests against real servers unless requested",
),
pytest.mark.asyncio(),
] |
94475ea2ed73e57870e8947a5b3ed474a70447e4 | src/sentry/signals.py | src/sentry/signals.py | from __future__ import absolute_import
from functools import wraps
from django.dispatch import Signal
class BetterSignal(Signal):
def connect(self, receiver=None, **kwargs):
"""
Support decorator syntax:
>>> @signal.connect(sender=type)
>>> def my_receiver(**kwargs):
>>> pass
"""
def wrapped(func):
return super(BetterSignal, self).connect(func, **kwargs)
if receiver is None:
return wrapped
return wraps(receiver)(wrapped(receiver))
regression_signal = BetterSignal(providing_args=["instance"])
buffer_incr_complete = BetterSignal(providing_args=["model", "columns", "extra", "result"])
event_received = BetterSignal(providing_args=["ip"])
pending_delete = BetterSignal(providing_args=["instance"])
event_processed = BetterSignal(providing_args=['project', 'group', 'event'])
# Organization Onboarding Signals
project_created = BetterSignal(providing_args=["project", "user"])
first_event_pending = BetterSignal(providing_args=["project", "user"])
first_event_received = BetterSignal(providing_args=["project", "group"])
member_invited = BetterSignal(providing_args=["member", "user"])
member_joined = BetterSignal(providing_args=["member"])
issue_tracker_used = BetterSignal(providing_args=["plugin", "project", "user"])
plugin_enabled = BetterSignal(providing_args=["plugin", "project", "user"])
| from __future__ import absolute_import
from functools import wraps
from django.dispatch import Signal
class BetterSignal(Signal):
def connect(self, receiver=None, **kwargs):
"""
Support decorator syntax:
>>> @signal.connect(sender=type)
>>> def my_receiver(**kwargs):
>>> pass
"""
def wrapped(func):
return super(BetterSignal, self).connect(func, **kwargs)
if receiver is None:
return wrapped
return wraps(receiver)(wrapped(receiver))
regression_signal = BetterSignal(providing_args=["instance"])
buffer_incr_complete = BetterSignal(providing_args=["model", "columns", "extra", "result"])
event_received = BetterSignal(providing_args=["ip", "auth", "data"])
pending_delete = BetterSignal(providing_args=["instance"])
event_processed = BetterSignal(providing_args=['project', 'group', 'event'])
# Organization Onboarding Signals
project_created = BetterSignal(providing_args=["project", "user"])
first_event_pending = BetterSignal(providing_args=["project", "user"])
first_event_received = BetterSignal(providing_args=["project", "group"])
member_invited = BetterSignal(providing_args=["member", "user"])
member_joined = BetterSignal(providing_args=["member"])
issue_tracker_used = BetterSignal(providing_args=["plugin", "project", "user"])
plugin_enabled = BetterSignal(providing_args=["plugin", "project", "user"])
| Add missing args to event_received | Add missing args to event_received
| Python | bsd-3-clause | jean/sentry,mitsuhiko/sentry,ifduyue/sentry,zenefits/sentry,JamesMura/sentry,mvaled/sentry,beeftornado/sentry,fotinakis/sentry,JamesMura/sentry,jean/sentry,mitsuhiko/sentry,gencer/sentry,jean/sentry,zenefits/sentry,BuildingLink/sentry,JackDanger/sentry,looker/sentry,looker/sentry,zenefits/sentry,beeftornado/sentry,mvaled/sentry,ifduyue/sentry,zenefits/sentry,jean/sentry,looker/sentry,BuildingLink/sentry,gencer/sentry,fotinakis/sentry,BuildingLink/sentry,JamesMura/sentry,JamesMura/sentry,alexm92/sentry,alexm92/sentry,mvaled/sentry,fotinakis/sentry,beeftornado/sentry,gencer/sentry,gencer/sentry,JackDanger/sentry,mvaled/sentry,ifduyue/sentry,fotinakis/sentry,JackDanger/sentry,BuildingLink/sentry,jean/sentry,BuildingLink/sentry,JamesMura/sentry,mvaled/sentry,ifduyue/sentry,mvaled/sentry,ifduyue/sentry,looker/sentry,alexm92/sentry,looker/sentry,zenefits/sentry,gencer/sentry | ---
+++
@@ -25,7 +25,7 @@
regression_signal = BetterSignal(providing_args=["instance"])
buffer_incr_complete = BetterSignal(providing_args=["model", "columns", "extra", "result"])
-event_received = BetterSignal(providing_args=["ip"])
+event_received = BetterSignal(providing_args=["ip", "auth", "data"])
pending_delete = BetterSignal(providing_args=["instance"])
event_processed = BetterSignal(providing_args=['project', 'group', 'event'])
|
8ce70e6b1af50f59102ccc33086a47fb36037fec | py2js.py | py2js.py | #! /usr/bin/env python
from optparse import OptionParser
from compiler import convert_py2js
def main():
parser = OptionParser(usage="%prog [options] filename",
description="Python to JavaScript compiler.")
parser.add_option("--include-builtins",
action="store_true", dest="include_builtins",
default=False, help="include py-builtins.js library in the output")
options, args = parser.parse_args()
if len(args) == 1:
filename = args[0]
s = open(filename).read() #unsafe for large files!
builtins = open("py-builtins.js").read() # unsafe fro large files!
js = convert_py2js(s)
if options.include_builtins:
print builtins
print js
else:
parser.print_help()
if __name__ == '__main__':
main()
| #! /usr/bin/env python
from optparse import OptionParser
from compiler import convert_py2js
def main():
parser = OptionParser(usage="%prog [options] filename",
description="Python to JavaScript compiler.")
parser.add_option("--include-builtins",
action="store_true", dest="include_builtins",
default=False, help="include py-builtins.js library in the output")
options, args = parser.parse_args()
if len(args) == 1:
filename = args[0]
s = open(filename).read() #unsafe for large files!
builtins = open("py-builtins.js").read() # unsafe for large files!
js = convert_py2js(s)
if options.include_builtins:
print builtins
print js
else:
parser.print_help()
if __name__ == '__main__':
main()
| Fix a typo "fro" -> "for" | Fix a typo "fro" -> "for"
| Python | mit | buchuki/pyjaco,buchuki/pyjaco,qsnake/py2js,chrivers/pyjaco,qsnake/py2js,buchuki/pyjaco,chrivers/pyjaco,chrivers/pyjaco | ---
+++
@@ -13,7 +13,7 @@
if len(args) == 1:
filename = args[0]
s = open(filename).read() #unsafe for large files!
- builtins = open("py-builtins.js").read() # unsafe fro large files!
+ builtins = open("py-builtins.js").read() # unsafe for large files!
js = convert_py2js(s)
if options.include_builtins:
print builtins |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.