commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
a2582b3352582034af1b8dff99d4ac39a15d9b54
shuffler.py
shuffler.py
#!/usr/bin/env python3 import argparse import random import sys DESCRIPTION = '''Shuffle the arguments received, if called without arguments the lines read from stdin will be shuffled and printed to stdout''' def get_list(): return sys.stdin.readlines() def print_list(list_): for elem in list_: print(elem.rstrip()) def main(): parser = argparse.ArgumentParser(description=DESCRIPTION) (args, list_) = parser.parse_known_args() r = random.SystemRandom() if not list_: list_ = get_list() r.shuffle(list_) print_list(list_) if __name__ == '__main__': main()
#!/usr/bin/env python3 import argparse import random import sys DESCRIPTION = '''Shuffle the arguments received, if called without arguments the lines read from stdin will be shuffled and printed to stdout''' def get_list(): return sys.stdin.readlines() def print_list(list_): for elem in list_: print(elem.rstrip()) def main(): parser = argparse.ArgumentParser(description=DESCRIPTION) (args, list_) = parser.parse_known_args() r = random.SystemRandom() if not list_: list_ = get_list() r.shuffle(list_) print_list(list_) if __name__ == '__main__': main()
Fix script to pass pep8
Fix script to pass pep8
Python
apache-2.0
aebm/shuffler
--- +++ @@ -4,16 +4,19 @@ import random import sys -DESCRIPTION = '''Shuffle the arguments received, if called without arguments +DESCRIPTION = '''Shuffle the arguments received, if called without arguments the lines read from stdin will be shuffled and printed to stdout''' + def get_list(): return sys.stdin.readlines() + def print_list(list_): for elem in list_: print(elem.rstrip()) + def main(): parser = argparse.ArgumentParser(description=DESCRIPTION) @@ -24,5 +27,6 @@ r.shuffle(list_) print_list(list_) + if __name__ == '__main__': main()
6a83c4808d7f1104aba832f53bcd25fb98be1686
pycrs/__init__.py
pycrs/__init__.py
""" # PyCRS PyCRS is a pure Python GIS package for reading, writing, and converting between various common coordinate reference system (CRS) string and data source formats. - [Home Page](http://github.com/karimbahgat/PyCRS) - [API Documentation](http://pythonhosted.org/PyCRS) """ __version__ = "0.1.4" from . import loader from . import parser from . import utils from .elements.containers import CRS
""" # PyCRS PyCRS is a pure Python GIS package for reading, writing, and converting between various common coordinate reference system (CRS) string and data source formats. - [Home Page](http://github.com/karimbahgat/PyCRS) - [API Documentation](http://pythonhosted.org/PyCRS) """ __version__ = "1.0.0-dev" from . import loader from . import parser from . import utils from .elements.containers import CRS
Bump to 1.0 dev version
Bump to 1.0 dev version
Python
mit
karimbahgat/PyCRS
--- +++ @@ -9,7 +9,7 @@ """ -__version__ = "0.1.4" +__version__ = "1.0.0-dev" from . import loader
a510d20cebe2aff86a6bf842d063b5df8937a7ec
raven/contrib/pylons/__init__.py
raven/contrib/pylons/__init__.py
""" raven.contrib.pylons ~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from raven.middleware import Sentry as Middleware from raven.base import Client class Sentry(Middleware): def __init__(self, app, config): if not config.get('sentry.servers'): raise TypeError('The sentry.servers config variable is required') client = Client( servers=config['sentry.servers'].split(), name=config.get('sentry.name'), key=config.get('sentry.key'), public_key=config.get('sentry.public_key'), secret_key=config.get('sentry.secret_key'), project=config.get('sentry.site_project'), site=config.get('sentry.site_name'), include_paths=config.get( 'sentry.include_paths', '').split() or None, exclude_paths=config.get( 'sentry.exclude_paths', '').split() or None, ) super(Sentry, self).__init__(app, client)
""" raven.contrib.pylons ~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from raven.middleware import Sentry as Middleware from raven.base import Client def list_from_setting(config, setting): value = config.get(setting) if not value: return None return value.split() class Sentry(Middleware): def __init__(self, app, config): if not config.get('sentry.servers'): raise TypeError('The sentry.servers config variable is required') servers = config.get('sentry_servers') if servers: servers = servers.split() client = Client( dsn=config.get('sentry.dsn'), servers=list_from_setting(config, 'sentry.servers'), name=config.get('sentry.name'), key=config.get('sentry.key'), public_key=config.get('sentry.public_key'), secret_key=config.get('sentry.secret_key'), project=config.get('sentry.project'), site=config.get('sentry.site'), include_paths=list_from_setting(config, 'sentry.include_paths'), exclude_paths=list_from_setting(config, 'sentry.exclude_paths'), ) super(Sentry, self).__init__(app, client)
Update site and project names for pylons integration. Fix behavior of empty lists. Add DSN.
Update site and project names for pylons integration. Fix behavior of empty lists. Add DSN.
Python
bsd-3-clause
tarkatronic/opbeat_python,tarkatronic/opbeat_python,inspirehep/raven-python,ticosax/opbeat_python,jbarbuto/raven-python,jmagnusson/raven-python,ronaldevers/raven-python,akalipetis/raven-python,collective/mr.poe,patrys/opbeat_python,arthurlogilab/raven-python,percipient/raven-python,inspirehep/raven-python,ronaldevers/raven-python,icereval/raven-python,jmp0xf/raven-python,Photonomie/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,nikolas/raven-python,patrys/opbeat_python,patrys/opbeat_python,dbravender/raven-python,akheron/raven-python,lopter/raven-python-old,akheron/raven-python,inspirehep/raven-python,akheron/raven-python,smarkets/raven-python,johansteffner/raven-python,arthurlogilab/raven-python,lepture/raven-python,danriti/raven-python,getsentry/raven-python,arthurlogilab/raven-python,johansteffner/raven-python,arthurlogilab/raven-python,openlabs/raven,recht/raven-python,beniwohli/apm-agent-python,ewdurbin/raven-python,smarkets/raven-python,alex/raven,jmagnusson/raven-python,jbarbuto/raven-python,beniwohli/apm-agent-python,someonehan/raven-python,hzy/raven-python,icereval/raven-python,danriti/raven-python,someonehan/raven-python,jmp0xf/raven-python,akalipetis/raven-python,icereval/raven-python,ewdurbin/raven-python,ticosax/opbeat_python,nikolas/raven-python,ewdurbin/raven-python,jmp0xf/raven-python,nikolas/raven-python,patrys/opbeat_python,daikeren/opbeat_python,Photonomie/raven-python,dbravender/raven-python,getsentry/raven-python,beniwohli/apm-agent-python,ronaldevers/raven-python,icereval/raven-python,jbarbuto/raven-python,johansteffner/raven-python,dirtycoder/opbeat_python,recht/raven-python,danriti/raven-python,hzy/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,akalipetis/raven-python,smarkets/raven-python,hzy/raven-python,lepture/raven-python,inspirehep/raven-python,daikeren/opbeat_python,percipient/raven-python,someonehan/raven-python,dbravender/raven-python,recht/raven-python,percipient/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,jbarbuto/raven-python,jmagnusson/raven-python,daikeren/opbeat_python,Goldmund-Wyldebeast-Wunderliebe/raven-python,smarkets/raven-python,dirtycoder/opbeat_python,tarkatronic/opbeat_python,nikolas/raven-python,Photonomie/raven-python,getsentry/raven-python,dirtycoder/opbeat_python,beniwohli/apm-agent-python,ticosax/opbeat_python,lepture/raven-python
--- +++ @@ -9,23 +9,33 @@ from raven.base import Client +def list_from_setting(config, setting): + value = config.get(setting) + if not value: + return None + return value.split() + + class Sentry(Middleware): def __init__(self, app, config): if not config.get('sentry.servers'): raise TypeError('The sentry.servers config variable is required') + servers = config.get('sentry_servers') + if servers: + servers = servers.split() + client = Client( - servers=config['sentry.servers'].split(), + dsn=config.get('sentry.dsn'), + servers=list_from_setting(config, 'sentry.servers'), name=config.get('sentry.name'), key=config.get('sentry.key'), public_key=config.get('sentry.public_key'), secret_key=config.get('sentry.secret_key'), - project=config.get('sentry.site_project'), - site=config.get('sentry.site_name'), - include_paths=config.get( - 'sentry.include_paths', '').split() or None, - exclude_paths=config.get( - 'sentry.exclude_paths', '').split() or None, + project=config.get('sentry.project'), + site=config.get('sentry.site'), + include_paths=list_from_setting(config, 'sentry.include_paths'), + exclude_paths=list_from_setting(config, 'sentry.exclude_paths'), ) super(Sentry, self).__init__(app, client)
a3570205c90dd8757a833aed4f4069fbd33028e0
course/views.py
course/views.py
from django.shortcuts import render, redirect, get_object_or_404 from django.urls import reverse from mainmodels.models import Category, Course, CourseInCategory from django.contrib.auth.models import User # Create your views here. def createCourse(req): if req.method == 'POST': try: courseName = req.POST['courseName'] courseCategory = req.POST['courseCategory'] courseDesc = req.POST['courseDesc'] courseThumbnail = req.FILES['courseThumbnail'] coursePrice = req.POST['coursePrice'] owner = User.objects.get(username='nut') newCourse = Course(courseName=courseName, courseDesc=courseDesc,courseThumbnail=courseThumbnail, owner=owner, coursePrice=coursePrice, isDelete=False) newCourse.save() category = Category.objects.get(categoryID=courseCategory) newCourseCategory = CourseInCategory(category=category, course=newCourse) newCourseCategory.save() return render(req, 'course/createCourse.html', {'courseCategory':courseCategory, 'success': True, 'message': 'Create course successfully.'}) except: return render(req, 'course/createCourse.html', {'courseCategory':courseCategory, 'success': False, 'message': 'Create course failed.'}) else: courseCategory = Category.objects.all() return render(req, 'course/createCourse.html', {'courseCategory':courseCategory})
from django.shortcuts import render, redirect, get_object_or_404 from django.urls import reverse from mainmodels.models import Category, Course, CourseInCategory # Create your views here. def createCourse(req): if req.method == 'POST': try: courseName = req.POST['courseName'] courseCategory = req.POST['courseCategory'] courseDesc = req.POST['courseDesc'] courseThumbnail = req.FILES['courseThumbnail'] coursePrice = req.POST['coursePrice'] owner = req.user newCourse = Course(courseName=courseName, courseDesc=courseDesc,courseThumbnail=courseThumbnail, owner=owner, coursePrice=coursePrice, isDelete=False) newCourse.save() category = Category.objects.get(categoryID=courseCategory) newCourseCategory = CourseInCategory(category=category, course=newCourse) newCourseCategory.save() return render(req, 'course/createCourse.html', {'courseCategory':courseCategory, 'success': True, 'message': 'Create course successfully.'}) except: return render(req, 'course/createCourse.html', {'courseCategory':courseCategory, 'success': False, 'message': 'Create course failed.'}) else: courseCategory = Category.objects.all() return render(req, 'course/createCourse.html', {'courseCategory':courseCategory})
Remove fixed owner and make loged in user instead
Remove fixed owner and make loged in user instead
Python
apache-2.0
PNNutkung/Coursing-Field,PNNutkung/Coursing-Field,PNNutkung/Coursing-Field
--- +++ @@ -1,7 +1,6 @@ from django.shortcuts import render, redirect, get_object_or_404 from django.urls import reverse from mainmodels.models import Category, Course, CourseInCategory -from django.contrib.auth.models import User # Create your views here. def createCourse(req): if req.method == 'POST': @@ -11,7 +10,7 @@ courseDesc = req.POST['courseDesc'] courseThumbnail = req.FILES['courseThumbnail'] coursePrice = req.POST['coursePrice'] - owner = User.objects.get(username='nut') + owner = req.user newCourse = Course(courseName=courseName, courseDesc=courseDesc,courseThumbnail=courseThumbnail, owner=owner, coursePrice=coursePrice, isDelete=False) newCourse.save()
6f7890c8b29670f613b6a551ebac2b383f3a7a64
tests/test_recipes.py
tests/test_recipes.py
import unittest from brew.constants import IMPERIAL_UNITS from brew.constants import SI_UNITS from brew.recipes import Recipe from fixtures import grain_additions from fixtures import hop_additions from fixtures import recipe class TestRecipe(unittest.TestCase): def setUp(self): # Define Grains self.grain_additions = grain_additions # Define Hops self.hop_additions = hop_additions # Define Recipes self.recipe = recipe def test_str(self): out = str(self.recipe) self.assertEquals(out, 'pale ale') def test_set_units(self): self.assertEquals(self.recipe.units, IMPERIAL_UNITS) self.recipe.set_units(SI_UNITS) self.assertEquals(self.recipe.units, SI_UNITS) def test_set_raises(self): with self.assertRaises(Exception): self.recipe.set_units('bad') def test_validate(self): data = self.recipe.to_dict() Recipe.validate(data)
import unittest from brew.constants import IMPERIAL_UNITS from brew.constants import SI_UNITS from brew.recipes import Recipe from fixtures import grain_additions from fixtures import hop_additions from fixtures import recipe from fixtures import yeast class TestRecipe(unittest.TestCase): def setUp(self): # Define Grains self.grain_additions = grain_additions # Define Hops self.hop_additions = hop_additions # Define Yeast self.yeast = yeast # Define Recipes self.recipe = recipe def test_str(self): out = str(self.recipe) self.assertEquals(out, 'pale ale') def test_set_units(self): self.assertEquals(self.recipe.units, IMPERIAL_UNITS) self.recipe.set_units(SI_UNITS) self.assertEquals(self.recipe.units, SI_UNITS) def test_set_raises(self): with self.assertRaises(Exception): self.recipe.set_units('bad') def test_grains_units_mismatch_raises(self): grain_additions = [g.change_units() for g in self.grain_additions] with self.assertRaises(Exception): Recipe(name='pale ale', grain_additions=grain_additions, hop_additions=self.hop_additions, yeast=self.yeast) def test_hops_units_mismatch_raises(self): hop_additions = [h.change_units() for h in self.hop_additions] with self.assertRaises(Exception): Recipe(name='pale ale', grain_additions=self.grain_additions, hop_additions=hop_additions, yeast=self.yeast) def test_validate(self): data = self.recipe.to_dict() Recipe.validate(data)
Test units mismatch in recipe
Test units mismatch in recipe
Python
mit
chrisgilmerproj/brewday,chrisgilmerproj/brewday
--- +++ @@ -6,6 +6,7 @@ from fixtures import grain_additions from fixtures import hop_additions from fixtures import recipe +from fixtures import yeast class TestRecipe(unittest.TestCase): @@ -16,6 +17,9 @@ # Define Hops self.hop_additions = hop_additions + + # Define Yeast + self.yeast = yeast # Define Recipes self.recipe = recipe @@ -33,6 +37,22 @@ with self.assertRaises(Exception): self.recipe.set_units('bad') + def test_grains_units_mismatch_raises(self): + grain_additions = [g.change_units() for g in self.grain_additions] + with self.assertRaises(Exception): + Recipe(name='pale ale', + grain_additions=grain_additions, + hop_additions=self.hop_additions, + yeast=self.yeast) + + def test_hops_units_mismatch_raises(self): + hop_additions = [h.change_units() for h in self.hop_additions] + with self.assertRaises(Exception): + Recipe(name='pale ale', + grain_additions=self.grain_additions, + hop_additions=hop_additions, + yeast=self.yeast) + def test_validate(self): data = self.recipe.to_dict() Recipe.validate(data)
f0c374eba55cdeb56bf3526ea0da041556f6ffe2
tests/test_yamlmod.py
tests/test_yamlmod.py
import os import sys from nose.tools import * def setup_yamlmod(): import yamlmod reload(yamlmod) def teardown_yamlmod(): import yamlmod for hook in sys.meta_path: if isinstance(hook, yamlmod.YamlImportHook): sys.meta_path.remove(hook) break @with_setup(setup_yamlmod, teardown_yamlmod) def test_import_installs_hook(): import yamlmod hooks = [] for hook in sys.meta_path: if isinstance(hook, yamlmod.YamlImportHook): hooks.append(hook) eq_(len(hooks), 1, 'did not find exactly one hook') @with_setup(setup_yamlmod, teardown_yamlmod) def test_import_fixture(): import fixture eq_(fixture.debug, True) eq_(fixture.domain, 'example.com') eq_(fixture.users, ['alice', 'bob', 'cathy']) @with_setup(setup_yamlmod, teardown_yamlmod) def test_hidden_attributes(): import fixture eq_(fixture.__name__, 'fixture') eq_(fixture.__file__, os.path.join(os.path.dirname(__file__), 'fixture.yml'))
import os import sys from nose.tools import * try: from importlib import reload except ImportError: pass def setup_yamlmod(): import yamlmod reload(yamlmod) def teardown_yamlmod(): import yamlmod for hook in sys.meta_path: if isinstance(hook, yamlmod.YamlImportHook): sys.meta_path.remove(hook) break @with_setup(setup_yamlmod, teardown_yamlmod) def test_import_installs_hook(): import yamlmod hooks = [] for hook in sys.meta_path: if isinstance(hook, yamlmod.YamlImportHook): hooks.append(hook) eq_(len(hooks), 1, 'did not find exactly one hook') @with_setup(setup_yamlmod, teardown_yamlmod) def test_import_fixture(): import fixture eq_(fixture.debug, True) eq_(fixture.domain, 'example.com') eq_(fixture.users, ['alice', 'bob', 'cathy']) @with_setup(setup_yamlmod, teardown_yamlmod) def test_hidden_attributes(): import fixture eq_(fixture.__name__, 'fixture') eq_(fixture.__file__, os.path.join(os.path.dirname(__file__), 'fixture.yml'))
Fix tests on python 3
Fix tests on python 3
Python
mit
sciyoshi/yamlmod
--- +++ @@ -2,6 +2,11 @@ import sys from nose.tools import * + +try: + from importlib import reload +except ImportError: + pass def setup_yamlmod(): import yamlmod
931a858dc1cfde1652d21e1ccd60a82dde683ce3
moxie/butterfield.py
moxie/butterfield.py
import os import json import asyncio from butterfield.utils import at_bot from aiodocker import Docker from aiocore import Service WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888") @asyncio.coroutine def events(bot): docker = Docker() events = docker.events events.saferun() stream = events.listen() while True: el = yield from stream.get() yield from bot.post("#cron", "`{}`".format(str(el))) @asyncio.coroutine @at_bot def run(bot, message: "message"): runner = Service.resolve("moxie.cores.run.RunService") text = message.get("text", "") if text == "": yield from bot.post(message['channel'], "Invalid request") cmd, arg = text.split(" ", 1) if cmd == "run": job = arg yield from bot.post( message['channel'], "Doing bringup of {}".format(job)) try: yield from runner.run(job) except ValueError as e: yield from bot.post( message['channel'], "Gah, {job} failed - {e}".format(e=e, job=job) ) return yield from bot.post(message['channel'], "Job {job} online - {webroot}/container/{job}/".format( webroot=WEB_ROOT, job=job))
import os import json import asyncio from butterfield.utils import at_bot from aiodocker import Docker from aiocore import Service WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888") @asyncio.coroutine def events(bot): docker = Docker() events = docker.events events.saferun() stream = events.listen() while True: el = yield from stream.get() yield from bot.post("#cron", "`{}`".format(str(el))) @asyncio.coroutine @at_bot def run(bot, message: "message"): runner = Service.resolve("moxie.cores.run.RunService") text = message.get("text", "") if text == "": yield from bot.post(message['channel'], "Invalid request") cmd, arg = text.split(" ", 1) if cmd == "run": job = arg yield from bot.post( message['channel'], "Doing bringup of {}".format(job)) try: yield from runner.run(job) except ValueError as e: yield from bot.post( message['channel'], "Gah, {job} failed - {e}".format(e=e, job=job) ) return yield from bot.post(message['channel'], "Job {job} online - {webroot}/container/{job}/".format( webroot=WEB_ROOT, job=job)) elif cmd == "yo": yield from bot.post( message['channel'], "Yo {}".format(message['user']))
Add simple "yo" bot command
Add simple "yo" bot command
Python
mit
paultag/moxie,loandy/moxie,mileswwatkins/moxie,mileswwatkins/moxie,paultag/moxie,loandy/moxie,loandy/moxie,paultag/moxie,rshorey/moxie,rshorey/moxie,rshorey/moxie,mileswwatkins/moxie
--- +++ @@ -48,3 +48,6 @@ yield from bot.post(message['channel'], "Job {job} online - {webroot}/container/{job}/".format( webroot=WEB_ROOT, job=job)) + elif cmd == "yo": + yield from bot.post( + message['channel'], "Yo {}".format(message['user']))
69baf66b13331d5936e8c540a2bb7eccb1d64cb7
invoice/views.py
invoice/views.py
from django.shortcuts import get_object_or_404 from invoice.models import Invoice from invoice.pdf import draw_pdf from invoice.utils import pdf_response def pdf_view(request, pk): invoice = get_object_or_404(Invoice, pk=pk) return pdf_response(draw_pdf, invoice.file_name(), invoice)
from django.shortcuts import get_object_or_404 from invoice.models import Invoice from invoice.pdf import draw_pdf from invoice.utils import pdf_response def pdf_view(request, pk): invoice = get_object_or_404(Invoice, pk=pk) return pdf_response(draw_pdf, invoice.file_name(), invoice) def pdf_user_view(request, invoice_id): invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user) return pdf_response(draw_pdf, invoice.file_name(), invoice)
Add view for user to download invoice
Add view for user to download invoice
Python
bsd-3-clause
Chris7/django-invoice,Chris7/django-invoice,simonluijk/django-invoice
--- +++ @@ -7,3 +7,8 @@ def pdf_view(request, pk): invoice = get_object_or_404(Invoice, pk=pk) return pdf_response(draw_pdf, invoice.file_name(), invoice) + + +def pdf_user_view(request, invoice_id): + invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user) + return pdf_response(draw_pdf, invoice.file_name(), invoice)
94f795dbbac32cbe1c83da1fb3cd3e55fdceef11
testFile.py
testFile.py
__author__ = 'adrie_000' import numpy as np import numpy.matrixlib as nm p = nm.matrix([[3, 3, 3], [3, 5, 7]]) q = p - p.mean(1) print q r = [np.array([q.getA()[0][k], q.getA()[1][k]]) for k in range(q.shape[1])] print r d = [np.linalg.norm(w) for w in r] print d
__author__ = 'adrie_000' import numpy as np import numpy.matrixlib as nm 'test' p = nm.matrix([[3, 3, 3], [3, 5, 7]]) q = p - p.mean(1) print q r = [np.array([q.getA()[0][k], q.getA()[1][k]]) for k in range(q.shape[1])] print r d = [np.linalg.norm(w) for w in r] print d
Test de pycharm par Simon.
Test de pycharm par Simon.
Python
apache-2.0
adrien-bellaiche/ia-cdf-rob-2015
--- +++ @@ -2,7 +2,7 @@ import numpy as np import numpy.matrixlib as nm - +'test' p = nm.matrix([[3, 3, 3], [3, 5, 7]]) q = p - p.mean(1) print q
013048e1d68174e71d4579e28efd0339144ce186
2017/11.07/python/jya_homework2.py
2017/11.07/python/jya_homework2.py
```python i = -1 sum = 0 one_to_ten = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] while i < 10: i += 1 sum += one_to_ten[i] print(sum) ```
```python i = -1 sum = 0 one_to_ten = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] while i < 10: i += 1 sum += one_to_ten[i] print(sum) ``` **Make function!** `like:` ```python def sum(number): # .. TODO return 0 ``` `I want to reuse this function, like:` ```python one_to_ten = sum(range(1,11)) one_to_five = sum(range(1,6)) five_to_ten = sum(range(5,11)) ```
Make function! and Use 4 tabs, not 8 tabs.
Make function! and Use 4 tabs, not 8 tabs.
Python
mit
Yokan-Study/study,Yokan-Study/study,Yokan-Study/study
--- +++ @@ -12,3 +12,18 @@ print(sum) ``` + +**Make function!** +`like:` +```python +def sum(number): + # .. TODO + return 0 +``` + +`I want to reuse this function, like:` +```python +one_to_ten = sum(range(1,11)) +one_to_five = sum(range(1,6)) +five_to_ten = sum(range(5,11)) +```
f7b351a43d99a6063c49dfdf8db60c654fd89b74
scrapi/processing/postgres.py
scrapi/processing/postgres.py
from __future__ import absolute_import import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webview.settings") import logging from api.webview.models import Document from scrapi import events from scrapi.processing.base import BaseProcessor logger = logging.getLogger(__name__) class PostgresProcessor(BaseProcessor): NAME = 'postgres' @events.logged(events.PROCESSING, 'raw.postgres') def process_raw(self, raw_doc): source, docID = raw_doc['source'], raw_doc['docID'] document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID) document.raw = raw_doc.attributes document.save() @events.logged(events.PROCESSING, 'normalized.postgres') def process_normalized(self, raw_doc, normalized): source, docID = raw_doc['source'], raw_doc['docID'] document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID) document.normalized = normalized.attributes document.providerUpdatedDateTime = normalized['providerUpdatedDateTime'] document.save() def _get_by_source_id(self, model, source, docID): return Document.objects.filter(source=source, docID=docID)
from __future__ import absolute_import import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webview.settings") import django import logging from api.webview.models import Document from scrapi import events from scrapi.processing.base import BaseProcessor django.setup() logger = logging.getLogger(__name__) class PostgresProcessor(BaseProcessor): NAME = 'postgres' @events.logged(events.PROCESSING, 'raw.postgres') def process_raw(self, raw_doc): source, docID = raw_doc['source'], raw_doc['docID'] document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID) document.raw = raw_doc.attributes document.save() @events.logged(events.PROCESSING, 'normalized.postgres') def process_normalized(self, raw_doc, normalized): source, docID = raw_doc['source'], raw_doc['docID'] document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID) document.normalized = normalized.attributes document.providerUpdatedDateTime = normalized['providerUpdatedDateTime'] document.save() def _get_by_source_id(self, model, source, docID): return Document.objects.filter(source=source, docID=docID)
Add django setup for some initialization
Add django setup for some initialization
Python
apache-2.0
CenterForOpenScience/scrapi,fabianvf/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,erinspace/scrapi,felliott/scrapi,mehanig/scrapi,fabianvf/scrapi,felliott/scrapi
--- +++ @@ -3,11 +3,14 @@ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webview.settings") +import django import logging from api.webview.models import Document from scrapi import events from scrapi.processing.base import BaseProcessor + +django.setup() logger = logging.getLogger(__name__)
0ea1153438c1d98232a921c8d14d401a541e95fd
examples/regex/regex_parser.py
examples/regex/regex_parser.py
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import, unicode_literals from parser_base import RegexParser import model class RegexSemantics(object): def __init__(self): super(RegexSemantics, self).__init__() self._count = 0 def START(self, ast): return model.Regex(ast) def CHOICE(self, ast): return model.Choice(ast.opts) def SEQUENCE(self, ast): if not ast.terms: return model.Empty() elif len(ast.terms) < 2: return ast.terms[0] else: return model.Sequence(ast.terms) def CLOSURE(self, ast): return model.Closure(ast) def SUBEXP(self, ast): return ast def LITERAL(self, ast): return model.Literal(ast) def translate(regex, trace=False): parser = RegexParser(trace=trace, semantics=RegexSemantics()) model = parser.parse(regex, 'START') model.set_rule_numbers() return model.render()
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import, unicode_literals from parser_base import RegexParser import model class RegexSemantics(object): def __init__(self): super(RegexSemantics, self).__init__() self._count = 0 def START(self, ast): return model.Regex(ast) def CHOICE(self, ast): return model.Choice(ast.opts) def SEQUENCE(self, ast): if not ast.terms: return model.Empty() elif len(ast.terms) < 2: return ast.terms[0] else: return model.Sequence(ast.terms) def CLOSURE(self, ast): return model.Closure(ast) def SUBEXP(self, ast): return ast def LITERAL(self, ast): return model.Literal(ast) def translate(regex, trace=False): parser = RegexParser(trace=trace, semantics=RegexSemantics()) model = parser.parse(regex, 'START') model.set_rule_numbers() return model.render().encode("ascii")
Fix regex example, the model must not be a unicode string.
Fix regex example, the model must not be a unicode string.
Python
bsd-2-clause
vmuriart/grako,frnknglrt/grako
--- +++ @@ -37,4 +37,4 @@ parser = RegexParser(trace=trace, semantics=RegexSemantics()) model = parser.parse(regex, 'START') model.set_rule_numbers() - return model.render() + return model.render().encode("ascii")
0da74f42f7d7311859a340b0e72c1b8902287d5c
Python/tigre/utilities/sample_loader.py
Python/tigre/utilities/sample_loader.py
from __future__ import division import os import numpy as np import scipy.io import scipy.ndimage.interpolation def load_head_phantom(number_of_voxels=None): if number_of_voxels is None: number_of_voxels = np.array((128, 128, 128)) dirname = os.path.dirname(__file__) dirname = os.path.join(dirname,'../../../Common/data/head.mat') test_data = scipy.io.loadmat(dirname) # Loads data in F_CONTIGUOUS MODE (column major), convert to Row major image = test_data['img'].transpose(2,1,0).copy() image_dimensions = image.shape zoom_x = number_of_voxels[0] / image_dimensions[0] zoom_y = number_of_voxels[1] / image_dimensions[1] zoom_z = number_of_voxels[2] / image_dimensions[2] # TODO: add test for this is resizing and not simply zooming resized_image = scipy.ndimage.interpolation.zoom(image, (zoom_x, zoom_y, zoom_z), order=3, prefilter=False) return resized_image
from __future__ import division import os import numpy as np import scipy.io import scipy.ndimage.interpolation def load_head_phantom(number_of_voxels=None): if number_of_voxels is None: number_of_voxels = np.array((128, 128, 128)) dirname = os.path.dirname(__file__) dirname = os.path.join(dirname,'../../../Common/data/head.mat') if not os.path.isfile(dirname): dirname = os.path.dirname(__file__) dirname = os.path.join(dirname,'./../../data/head.mat') test_data = scipy.io.loadmat(dirname) # Loads data in F_CONTIGUOUS MODE (column major), convert to Row major image = test_data['img'].transpose(2,1,0).copy() image_dimensions = image.shape zoom_x = number_of_voxels[0] / image_dimensions[0] zoom_y = number_of_voxels[1] / image_dimensions[1] zoom_z = number_of_voxels[2] / image_dimensions[2] # TODO: add test for this is resizing and not simply zooming resized_image = scipy.ndimage.interpolation.zoom(image, (zoom_x, zoom_y, zoom_z), order=3, prefilter=False) return resized_image
Allow for local run of example and demos installed with tool.
Allow for local run of example and demos installed with tool.
Python
bsd-3-clause
CERN/TIGRE,CERN/TIGRE,CERN/TIGRE,CERN/TIGRE
--- +++ @@ -10,6 +10,9 @@ number_of_voxels = np.array((128, 128, 128)) dirname = os.path.dirname(__file__) dirname = os.path.join(dirname,'../../../Common/data/head.mat') + if not os.path.isfile(dirname): + dirname = os.path.dirname(__file__) + dirname = os.path.join(dirname,'./../../data/head.mat') test_data = scipy.io.loadmat(dirname) # Loads data in F_CONTIGUOUS MODE (column major), convert to Row major
fcd85a1b15ca8b82f892bba171c21f9a1b4f6e4a
SOAPpy/tests/alanbushTest.py
SOAPpy/tests/alanbushTest.py
#!/usr/bin/env python # Copyright (c) 2001 actzero, inc. All rights reserved. import sys sys.path.insert (1, '..') import SOAP ident = '$Id$' SoapEndpointURL = 'http://www.alanbushtrust.org.uk/soap/compositions.asp' MethodNamespaceURI = 'urn:alanbushtrust-org-uk:soap:methods' SoapAction = MethodNamespaceURI + "#GetCategories" server = SOAP.SOAPProxy( SoapEndpointURL, namespace=MethodNamespaceURI, soapaction=SoapAction ) print "server level>>", server.GetCategories()
#!/usr/bin/env python # Copyright (c) 2001 actzero, inc. All rights reserved. import sys sys.path.insert (1, '..') import SOAP ident = '$Id$' SoapEndpointURL = 'http://www.alanbushtrust.org.uk/soap/compositions.asp' MethodNamespaceURI = 'urn:alanbushtrust-org-uk:soap.methods' SoapAction = MethodNamespaceURI + ".GetCategories" server = SOAP.SOAPProxy( SoapEndpointURL, namespace=MethodNamespaceURI, soapaction=SoapAction ) for category in server.GetCategories(): print category
Correct URI and list categories
Correct URI and list categories git-svn-id: c4afb4e777bcbfe9afa898413b708b5abcd43877@69 7150bf37-e60d-0410-b93f-83e91ef0e581
Python
mit
acigna/pywez,acigna/pywez,acigna/pywez
--- +++ @@ -11,8 +11,9 @@ ident = '$Id$' SoapEndpointURL = 'http://www.alanbushtrust.org.uk/soap/compositions.asp' -MethodNamespaceURI = 'urn:alanbushtrust-org-uk:soap:methods' -SoapAction = MethodNamespaceURI + "#GetCategories" +MethodNamespaceURI = 'urn:alanbushtrust-org-uk:soap.methods' +SoapAction = MethodNamespaceURI + ".GetCategories" server = SOAP.SOAPProxy( SoapEndpointURL, namespace=MethodNamespaceURI, soapaction=SoapAction ) -print "server level>>", server.GetCategories() +for category in server.GetCategories(): + print category
c5970991ed2d3285e6a3ef9badb6e73756ff876b
tests/test_session.py
tests/test_session.py
# Local imports from uplink import session def test_base_url(uplink_builder_mock): # Setup uplink_builder_mock.base_url = "https://api.github.com" sess = session.Session(uplink_builder_mock) # Run & Verify assert uplink_builder_mock.base_url == sess.base_url def test_headers(uplink_builder_mock): # Setup sess = session.Session(uplink_builder_mock) # Run sess.headers["key"] = "value" # Verify uplink_builder_mock.add_hook.assert_called() assert sess.headers == {"key": "value"} def test_params(uplink_builder_mock): # Setup sess = session.Session(uplink_builder_mock) # Run sess.params["key"] = "value" # Verify uplink_builder_mock.add_hook.assert_called() assert sess.params == {"key": "value"} def test_auth(uplink_builder_mock): # Setup uplink_builder_mock.auth = ("username", "password") sess = session.Session(uplink_builder_mock) # Run & Verify assert uplink_builder_mock.auth == sess.auth def test_auth_set(uplink_builder_mock): # Setup sess = session.Session(uplink_builder_mock) # Run sess.auth = ("username", "password") # Verify assert ("username", "password") == uplink_builder_mock.auth
# Local imports from uplink import session def test_base_url(uplink_builder_mock): # Setup uplink_builder_mock.base_url = "https://api.github.com" sess = session.Session(uplink_builder_mock) # Run & Verify assert uplink_builder_mock.base_url == sess.base_url def test_headers(uplink_builder_mock): # Setup sess = session.Session(uplink_builder_mock) # Run sess.headers["key"] = "value" # Verify assert uplink_builder_mock.add_hook.called assert sess.headers == {"key": "value"} def test_params(uplink_builder_mock): # Setup sess = session.Session(uplink_builder_mock) # Run sess.params["key"] = "value" # Verify uplink_builder_mock.add_hook.assert_called() assert sess.params == {"key": "value"} def test_auth(uplink_builder_mock): # Setup uplink_builder_mock.auth = ("username", "password") sess = session.Session(uplink_builder_mock) # Run & Verify assert uplink_builder_mock.auth == sess.auth def test_auth_set(uplink_builder_mock): # Setup sess = session.Session(uplink_builder_mock) # Run sess.auth = ("username", "password") # Verify assert ("username", "password") == uplink_builder_mock.auth
Fix `assert_called` usage for Python 3.5 build
Fix `assert_called` usage for Python 3.5 build The `assert_called` method seems to invoke a bug caused by a type in the unittest mock module. (The bug was ultimately tracked and fix here: https://bugs.python.org/issue24656)
Python
mit
prkumar/uplink
--- +++ @@ -19,7 +19,7 @@ sess.headers["key"] = "value" # Verify - uplink_builder_mock.add_hook.assert_called() + assert uplink_builder_mock.add_hook.called assert sess.headers == {"key": "value"}
3412b0db76f77a4772cf76c7794fbe61b58f5a25
bakery/views.py
bakery/views.py
# -*- coding: utf-8 -*- from django.core.urlresolvers import reverse from django.views.generic import ListView, TemplateView, RedirectView from django.contrib import auth from bakery.cookies.models import Cookie from bakery.socialize.models import Vote class HomeView(ListView): model = Cookie template_name = 'home.html' def get_context_data(self, **kwargs): context = super(HomeView, self).get_context_data(**kwargs) user_votes = Vote.objects.get_for_user(self.request.user.id) voted_cookie_ids = user_votes.values_list('pk', flat=True).all() context['voted_cookie_ids'] = voted_cookie_ids return context home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view() class LoginErrorView(TemplateView): template_name = 'error.html' login_error = LoginErrorView.as_view() class LogoutView(RedirectView): permanent = False def get_redirect_url(self, **kwargs): auth.logout(self.request) return reverse('home') logout = LogoutView.as_view()
# -*- coding: utf-8 -*- from django.core.urlresolvers import reverse from django.views.generic import ListView, TemplateView, RedirectView from django.contrib import auth from bakery.cookies.models import Cookie from bakery.socialize.models import Vote class HomeView(ListView): model = Cookie template_name = 'home.html' def get_context_data(self, **kwargs): context = super(HomeView, self).get_context_data(**kwargs) user_votes = Vote.objects.get_for_user(self.request.user.id) voted_cookie_ids = user_votes.values_list('cookie_id', flat=True).all() context['voted_cookie_ids'] = voted_cookie_ids return context home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view() class LoginErrorView(TemplateView): template_name = 'error.html' login_error = LoginErrorView.as_view() class LogoutView(RedirectView): permanent = False def get_redirect_url(self, **kwargs): auth.logout(self.request) return reverse('home') logout = LogoutView.as_view()
Select correct for vote detection
Select correct for vote detection
Python
bsd-3-clause
muffins-on-dope/bakery,muffins-on-dope/bakery,muffins-on-dope/bakery
--- +++ @@ -15,7 +15,7 @@ def get_context_data(self, **kwargs): context = super(HomeView, self).get_context_data(**kwargs) user_votes = Vote.objects.get_for_user(self.request.user.id) - voted_cookie_ids = user_votes.values_list('pk', flat=True).all() + voted_cookie_ids = user_votes.values_list('cookie_id', flat=True).all() context['voted_cookie_ids'] = voted_cookie_ids return context
d635fc9129bc4ccfd5384be6958ae1c14e9916ec
scripts/merge_translations.py
scripts/merge_translations.py
import sys import yaml def main(base_file, new_file, overwrite_language): old = yaml.load(file(base_file).read()) new = yaml.load(file(new_file).read()) assert len(overwrite_language) == 2 for o, n in zip(old, new): if overwrite_language in n['text']: o['text'][overwrite_language] = n['text'][overwrite_language] if o['type'] == 'multiple_choice': for oo, on in zip(o['options'], n['options']): if 'details' in oo and overwrite_language in on['details']: oo['details'][overwrite_language] = on['details'][overwrite_language] sys.stdout.write(yaml.safe_dump(old, allow_unicode=True, default_flow_style=False, encoding='utf-8', width=10000)) if __name__ == '__main__': main(sys.argv[1], sys.argv[2], sys.argv[3])
import sys import yaml def persona(old, new, overwrite_language): old_t = old['translations'] new_t = new['translations'] for key in old_t: if key in new_t and overwrite_language in new_t[key]: old_t[key][overwrite_language] = new_t[key][overwrite_language] def questions(old, new, overwrite_language): for o, n in zip(old, new): if overwrite_language in n['text']: o['text'][overwrite_language] = n['text'][overwrite_language] if overwrite_language in n['explanation']: o['explanation'][overwrite_language] = n['explanation'][overwrite_language] if overwrite_language in n['explanationmore']: o['explanationmore'][overwrite_language] = n['explanationmore'][overwrite_language] if o['type'] == 'multiple_choice': for oo, on in zip(o['options'], n['options']): if 'details' in oo and overwrite_language in on['details']: oo['details'][overwrite_language] = on['details'][overwrite_language] def main(mode, base_file, new_file, overwrite_language): old = yaml.load(file(base_file).read()) new = yaml.load(file(new_file).read()) assert len(overwrite_language) == 2 if mode == 'persona': persona(old, new, overwrite_language) elif mode == 'questions': questions(old, new, overwrite_language) sys.stdout.write(yaml.safe_dump(old, allow_unicode=True, default_flow_style=False, encoding='utf-8', width=10000)) if __name__ == '__main__': persona(*sys.argv)
Add persona merging to translation merge script
Add persona merging to translation merge script
Python
mit
okfde/eucopyright,okfde/eucopyright,okfde/eucopyright
--- +++ @@ -3,21 +3,40 @@ import yaml -def main(base_file, new_file, overwrite_language): - old = yaml.load(file(base_file).read()) - new = yaml.load(file(new_file).read()) +def persona(old, new, overwrite_language): + old_t = old['translations'] + new_t = new['translations'] - assert len(overwrite_language) == 2 + for key in old_t: + if key in new_t and overwrite_language in new_t[key]: + old_t[key][overwrite_language] = new_t[key][overwrite_language] + +def questions(old, new, overwrite_language): for o, n in zip(old, new): if overwrite_language in n['text']: o['text'][overwrite_language] = n['text'][overwrite_language] + if overwrite_language in n['explanation']: + o['explanation'][overwrite_language] = n['explanation'][overwrite_language] + if overwrite_language in n['explanationmore']: + o['explanationmore'][overwrite_language] = n['explanationmore'][overwrite_language] if o['type'] == 'multiple_choice': for oo, on in zip(o['options'], n['options']): if 'details' in oo and overwrite_language in on['details']: oo['details'][overwrite_language] = on['details'][overwrite_language] + +def main(mode, base_file, new_file, overwrite_language): + old = yaml.load(file(base_file).read()) + new = yaml.load(file(new_file).read()) + + assert len(overwrite_language) == 2 + if mode == 'persona': + persona(old, new, overwrite_language) + elif mode == 'questions': + questions(old, new, overwrite_language) + sys.stdout.write(yaml.safe_dump(old, allow_unicode=True, default_flow_style=False, encoding='utf-8', width=10000)) if __name__ == '__main__': - main(sys.argv[1], sys.argv[2], sys.argv[3]) + persona(*sys.argv)
03a78a509c0213f8f95223a7926a1bce547f05fe
rotterdam/proc.py
rotterdam/proc.py
import logging import os import signal from setproctitle import setproctitle class Proc(object): signal_map = {} def __init__(self): self.logger = logging.getLogger(self.__module__) self.pid = None @property def name(self): return self.__class__.__name__.lower() def setup(self): self.pid = os.getpid() self.parent_pid = os.getppid() self.setup_signals() setproctitle("rotterdam: %s" % self.name) def run(self): self.setup() self.logger.info("Starting %s (%d)", self.name, int(self.pid)) def setup_signals(self): for signal_name, handler_name in self.signal_map.iteritems(): signal.signal( getattr(signal, "SIG%s" % signal_name.upper()), getattr(self, handler_name) )
import logging import os import signal from setproctitle import setproctitle class Proc(object): signal_map = {} def __init__(self): self.logger = logging.getLogger(self.__module__) self.pid = None self.parent_pid = None @property def name(self): return self.__class__.__name__.lower() def setup(self): self.pid = os.getpid() self.parent_pid = os.getppid() self.setup_signals() setproctitle("rotterdam: %s" % self.name) def run(self): self.setup() self.logger.info("Starting %s (%d)", self.name, int(self.pid)) def setup_signals(self): for signal_name, handler_name in self.signal_map.iteritems(): signal.signal( getattr(signal, "SIG%s" % signal_name.upper()), getattr(self, handler_name) )
Initialize parent_pid in the Proc class init.
Initialize parent_pid in the Proc class init.
Python
mit
wglass/rotterdam
--- +++ @@ -13,6 +13,7 @@ self.logger = logging.getLogger(self.__module__) self.pid = None + self.parent_pid = None @property def name(self):
ce6a3a3833d498fa32a5317fd95e206cad9d5a83
alg_gcd.py
alg_gcd.py
def gcd(m, n): """Greatest Common Divisor (GCD) by Euclid's Algorithm. Time complexity: O(m%n). """ while n != 0: m, n = n, m % n return m def main(): print(gcd(4, 2)) print(gcd(2, 4)) print(gcd(10, 4)) print(gcd(4, 10)) print(gcd(10, 1)) print(gcd(1, 10)) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import division from __future__ import print_function def gcd(m, n): """Greatest Common Divisor (GCD) by Euclid's Algorithm. Time complexity: O(m%n). """ while n != 0: m, n = n, m % n return m def main(): print('gcd(4, 2): {}'.format(gcd(4, 2))) print('gcd(2, 4): {}'.format(gcd(2, 4))) print('gcd(10, 4): {}'.format(gcd(10, 4))) print('gcd(4, 10): {}'.format(gcd(4, 10))) print('gcd(3, 4): {}'.format(gcd(3, 4))) print('gcd(4, 3): {}'.format(gcd(4, 3))) if __name__ == '__main__': main()
Add importing from __future__ & revise main()
Add importing from __future__ & revise main()
Python
bsd-2-clause
bowen0701/algorithms_data_structures
--- +++ @@ -1,3 +1,8 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + def gcd(m, n): """Greatest Common Divisor (GCD) by Euclid's Algorithm. @@ -9,14 +14,14 @@ def main(): - print(gcd(4, 2)) - print(gcd(2, 4)) + print('gcd(4, 2): {}'.format(gcd(4, 2))) + print('gcd(2, 4): {}'.format(gcd(2, 4))) - print(gcd(10, 4)) - print(gcd(4, 10)) + print('gcd(10, 4): {}'.format(gcd(10, 4))) + print('gcd(4, 10): {}'.format(gcd(4, 10))) - print(gcd(10, 1)) - print(gcd(1, 10)) + print('gcd(3, 4): {}'.format(gcd(3, 4))) + print('gcd(4, 3): {}'.format(gcd(4, 3))) if __name__ == '__main__':
0b45ec48955f73a0e88422660af82ff6fd89333b
tools/crons/newwoz.py
tools/crons/newwoz.py
import os import django os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings") django.setup() print("NEW WOZZT TEST") from museum_site.wozzt_queue import WoZZT_Queue # noqa: E402 def main(): entry = WoZZT_Queue.objects.all().order_by("-priority", "id")[0] entry.send_tweet() entry.delete_image() entry.delete() print("Well that was easy.") if __name__ == "__main__": main()
import os import django from datetime import datetime os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings") django.setup() from museum_site.wozzt_queue import WoZZT_Queue # noqa: E402 def main(): now = datetime.now() if now.weekday() == 1: # Tuesday entry = WoZZT_Queue.objects.filter(category="tuesday") else: entry = WoZZT_Queue.objects.all(category="wozzt") entry = entry.order_by("-priority", "id")[0] entry.send_tweet() entry.delete_image() entry.delete() print("Done.") if __name__ == "__main__": main()
Use Tuesday branch on Tuesdays
Use Tuesday branch on Tuesdays
Python
mit
DrDos0016/z2,DrDos0016/z2,DrDos0016/z2
--- +++ @@ -2,21 +2,27 @@ import django +from datetime import datetime + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings") django.setup() - -print("NEW WOZZT TEST") from museum_site.wozzt_queue import WoZZT_Queue # noqa: E402 def main(): - entry = WoZZT_Queue.objects.all().order_by("-priority", "id")[0] + now = datetime.now() + if now.weekday() == 1: # Tuesday + entry = WoZZT_Queue.objects.filter(category="tuesday") + else: + entry = WoZZT_Queue.objects.all(category="wozzt") + + entry = entry.order_by("-priority", "id")[0] entry.send_tweet() entry.delete_image() entry.delete() - print("Well that was easy.") + print("Done.") if __name__ == "__main__":
ed89c92ac56e89648bf965ea3aa8963e840e3a5c
tests/test_excuses.py
tests/test_excuses.py
# Copyright 2017 Starbot Discord Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from api import message, plugin from plugins import excuses class TestExcuseSuite(unittest.TestCase): def testExcuseMsg(self): msg = message.message(body="") msg.command = "excuse" fullExcuseList = [] for sorry in excuses.sorryList: for excuse in excuses.excuseList: fullExcuseList.append('*{} {}*'.format(sorryList[sorry], excuseList[excuse]) result=excuses.onCommand(msg) self.assertEqual(type(result), type(msg)) self.assertEqual(result.body in fullExcuseList, True)
# Copyright 2017 Starbot Discord Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from api import message, plugin from plugins import excuses class TestExcuseSuite(unittest.TestCase): def testExcuseMsg(self): msg = message.message(body="") msg.command = "excuse" excuseList = ["I have an appointment with a robot.", "I was abducted by robots.", "I didn’t know what day it was because I was looking at the Robotic Calendar.", "My robot threw up on my source code.", "I need to take my robot for a walk.", "I had to get a cybernetic head and couldn't get anything done.", "My Robot Assistant blue-screened.", "A kernel panic erased my work.", "Somebody used up the data limit watching YouTube."] sorryList = ["Please excuse me,", "I'm sorry, but", "I hope you forgive me, because"] fullExcuseList = [] for sorry in sorryList: for excuse in excuseList: fullExcuseList.append('*{} {}*'.format(sorry, excuse)) result=excuses.onCommand(msg) print(result.body) self.assertEqual(type(result), type(msg)) self.assertEqual(result.body in fullExcuseList, True)
Fix the unit test :)
Fix the unit test :)
Python
apache-2.0
StarbotDiscord/Starbot,dhinakg/BitSTAR,StarbotDiscord/Starbot,dhinakg/BitSTAR
--- +++ @@ -22,12 +22,23 @@ msg = message.message(body="") msg.command = "excuse" + excuseList = ["I have an appointment with a robot.", + "I was abducted by robots.", + "I didn’t know what day it was because I was looking at the Robotic Calendar.", + "My robot threw up on my source code.", + "I need to take my robot for a walk.", + "I had to get a cybernetic head and couldn't get anything done.", + "My Robot Assistant blue-screened.", + "A kernel panic erased my work.", + "Somebody used up the data limit watching YouTube."] + sorryList = ["Please excuse me,", "I'm sorry, but", "I hope you forgive me, because"] fullExcuseList = [] - for sorry in excuses.sorryList: - for excuse in excuses.excuseList: - fullExcuseList.append('*{} {}*'.format(sorryList[sorry], excuseList[excuse]) + for sorry in sorryList: + for excuse in excuseList: + fullExcuseList.append('*{} {}*'.format(sorry, excuse)) result=excuses.onCommand(msg) + print(result.body) self.assertEqual(type(result), type(msg)) self.assertEqual(result.body in fullExcuseList, True)
38749a0033c2acc6c9fd8971749c68f93cb9c0db
virtualenv/__init__.py
virtualenv/__init__.py
from __future__ import absolute_import, division, print_function from virtualenv.__about__ import ( __author__, __copyright__, __email__, __license__, __summary__, __title__, __uri__, __version__ ) __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", ]
from __future__ import absolute_import, division, print_function from virtualenv.__about__ import ( __author__, __copyright__, __email__, __license__, __summary__, __title__, __uri__, __version__ ) from virtualenv.core import create __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", "create", ]
Add virtualenv.create function to enable easy virtualenv creation
Add virtualenv.create function to enable easy virtualenv creation
Python
mit
ionelmc/virtualenv,ionelmc/virtualenv,ionelmc/virtualenv
--- +++ @@ -4,9 +4,11 @@ __author__, __copyright__, __email__, __license__, __summary__, __title__, __uri__, __version__ ) +from virtualenv.core import create __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", + "create", ]
3d813e9960a44bfacd94551b95caf489b85ea3bb
trac/upgrades/db11.py
trac/upgrades/db11.py
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macro_dir__ as macro_dir for f in os.listdir(macro_dir): if not f.endswith('.py'): continue src = os.path.join(macro_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macros_dir__ as macros_dir for f in os.listdir(macros_dir): if not f.endswith('.py'): continue src = os.path.join(macros_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
Fix typo in upgrade script
Fix typo in upgrade script git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2
Python
bsd-3-clause
exocad/exotrac,dafrito/trac-mirror,dokipen/trac,dokipen/trac,dokipen/trac,exocad/exotrac,exocad/exotrac,dafrito/trac-mirror,moreati/trac-gitsvn,moreati/trac-gitsvn,dafrito/trac-mirror,exocad/exotrac,moreati/trac-gitsvn,dafrito/trac-mirror,moreati/trac-gitsvn
--- +++ @@ -32,11 +32,11 @@ cursor.execute(sql) # Copy the new default wiki macros over to the environment - from trac.siteconfig import __default_macro_dir__ as macro_dir - for f in os.listdir(macro_dir): + from trac.siteconfig import __default_macros_dir__ as macros_dir + for f in os.listdir(macros_dir): if not f.endswith('.py'): continue - src = os.path.join(macro_dir, f) + src = os.path.join(macros_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
e521b16844efc2853c0db9014098cb3e37f6eb04
numpy/_array_api/_sorting_functions.py
numpy/_array_api/_sorting_functions.py
def argsort(x, /, *, axis=-1, descending=False, stable=True): from .. import argsort from .. import flip # Note: this keyword argument is different, and the default is different. kind = 'stable' if stable else 'quicksort' res = argsort(x, axis=axis, kind=kind) if descending: res = flip(res, axis=axis) def sort(x, /, *, axis=-1, descending=False, stable=True): from .. import sort from .. import flip # Note: this keyword argument is different, and the default is different. kind = 'stable' if stable else 'quicksort' res = sort(x, axis=axis, kind=kind) if descending: res = flip(res, axis=axis)
def argsort(x, /, *, axis=-1, descending=False, stable=True): from .. import argsort from .. import flip # Note: this keyword argument is different, and the default is different. kind = 'stable' if stable else 'quicksort' res = argsort(x, axis=axis, kind=kind) if descending: res = flip(res, axis=axis) return res def sort(x, /, *, axis=-1, descending=False, stable=True): from .. import sort from .. import flip # Note: this keyword argument is different, and the default is different. kind = 'stable' if stable else 'quicksort' res = sort(x, axis=axis, kind=kind) if descending: res = flip(res, axis=axis) return res
Add missing returns to the array API sorting functions
Add missing returns to the array API sorting functions
Python
bsd-3-clause
jakirkham/numpy,numpy/numpy,mhvk/numpy,seberg/numpy,pdebuyl/numpy,numpy/numpy,numpy/numpy,jakirkham/numpy,anntzer/numpy,charris/numpy,simongibbons/numpy,endolith/numpy,simongibbons/numpy,mhvk/numpy,seberg/numpy,mattip/numpy,mattip/numpy,jakirkham/numpy,mhvk/numpy,charris/numpy,seberg/numpy,simongibbons/numpy,pdebuyl/numpy,simongibbons/numpy,rgommers/numpy,mattip/numpy,endolith/numpy,numpy/numpy,anntzer/numpy,rgommers/numpy,pdebuyl/numpy,anntzer/numpy,charris/numpy,endolith/numpy,seberg/numpy,rgommers/numpy,mhvk/numpy,endolith/numpy,mhvk/numpy,mattip/numpy,charris/numpy,pdebuyl/numpy,jakirkham/numpy,jakirkham/numpy,rgommers/numpy,anntzer/numpy,simongibbons/numpy
--- +++ @@ -6,6 +6,7 @@ res = argsort(x, axis=axis, kind=kind) if descending: res = flip(res, axis=axis) + return res def sort(x, /, *, axis=-1, descending=False, stable=True): from .. import sort @@ -15,3 +16,4 @@ res = sort(x, axis=axis, kind=kind) if descending: res = flip(res, axis=axis) + return res
f57511df7de901f3694fa4225af458e192975299
pdf_parser/pdf_types/compound_types.py
pdf_parser/pdf_types/compound_types.py
from .common import PdfType class PdfArray(PdfType, list): def __init__(self, *args, **kwargs): PdfType.__init__(self) list.__init__(self, *args, **kwargs) class PdfDict(PdfType, dict): def __init__(self, *args, **kwargs): PdfType.__init__(self) dict.__init__(self, *args, **kwargs) def __getattribute__(self, name): try: return self[name].parsed_object except AttributeError: return self[name] except KeyError: raise AttributeError('Object has no attribute "%s"'%name)
from .common import PdfType class PdfArray(PdfType, list): def __init__(self, *args, **kwargs): PdfType.__init__(self) list.__init__(self, *args, **kwargs) class PdfDict(PdfType, dict): def __init__(self, *args, **kwargs): PdfType.__init__(self) dict.__init__(self, *args, **kwargs) def __getattr__(self, name): try: return self[name].parsed_object except AttributeError: return self[name] except KeyError: raise AttributeError('Object has no attribute "%s"'%name)
Fix PdfDict: __getattr__, not __getattribute__
Fix PdfDict: __getattr__, not __getattribute__
Python
mit
ajmarks/gymnast,ajmarks/gymnast
--- +++ @@ -10,7 +10,7 @@ PdfType.__init__(self) dict.__init__(self, *args, **kwargs) - def __getattribute__(self, name): + def __getattr__(self, name): try: return self[name].parsed_object except AttributeError:
ec15cfe29eb4f99725315d54c97d99d81e3b3ccc
elevator/config.py
elevator/config.py
# -*- coding: utf-8 -*- # Copyright (c) 2012 theo crevon # # See the file LICENSE for copying permission. from ConfigParser import ConfigParser from utils.snippets import items_to_dict class Config(dict): """ Unix shells like environment class. Implements add, get, load, flush methods. Handles lists of values too. Basically Acts like a basic key/value store. """ def __init__(self, f, *args, **kwargs): if f: self.update_with_file(f) # Has to be called last! self.update(kwargs) dict.__init__(self, *args, **kwargs) def update_with_file(self, f): """ Updates the environment using an ini file containing key/value descriptions. """ config = ConfigParser() with open(f, 'r') as f: config.readfp(f) for section in config.sections(): self.update(items_to_dict(config.items(section))) def reload_from_file(self, f=''): self.flush(f) self.load(f) def update_with_args(self, args): """Loads argparse kwargs into environment, as `section`""" for (arg, value) in args: if value is not None: self[arg] = value def flush(self): """ Flushes the environment from it's manually set attributes. """ for attr in self.attributes: delattr(self, attr)
# -*- coding: utf-8 -*- # Copyright (c) 2012 theo crevon # # See the file LICENSE for copying permission. from ConfigParser import ConfigParser from utils.snippets import items_to_dict class Config(dict): """ Unix shells like environment class. Implements add, get, load, flush methods. Handles lists of values too. Basically Acts like a basic key/value store. """ def __init__(self, f=None, *args, **kwargs): if f: self.update_with_file(f) # Has to be called last! self.update(kwargs) dict.__init__(self, *args, **kwargs) def update_with_file(self, f): """ Updates the environment using an ini file containing key/value descriptions. """ config = ConfigParser() with open(f, 'r') as f: config.readfp(f) for section in config.sections(): self.update(items_to_dict(config.items(section))) def reload_from_file(self, f=''): self.flush(f) self.load(f) def update_with_args(self, args): """Loads argparse kwargs into environment, as `section`""" for (arg, value) in args: if value is not None: self[arg] = value def flush(self): """ Flushes the environment from it's manually set attributes. """ for attr in self.attributes: delattr(self, attr)
Enable Config instantiation from kwargs only
Fix: Enable Config instantiation from kwargs only
Python
mit
oleiade/Elevator
--- +++ @@ -15,7 +15,7 @@ get, load, flush methods. Handles lists of values too. Basically Acts like a basic key/value store. """ - def __init__(self, f, *args, **kwargs): + def __init__(self, f=None, *args, **kwargs): if f: self.update_with_file(f) # Has to be called last!
f3cc2de83c88f01f7ec554ae6223132c284b4ad4
kotti_site_gallery/__init__.py
kotti_site_gallery/__init__.py
from fanstatic import Library from fanstatic import Resource from kotti.resources import Image import kotti.static as ks lib_kotti_site_gallery = Library('kotti_site_gallery', 'static') view_css = Resource(lib_kotti_site_gallery, "kotti_site_gallery.css", minified="kotti_site_gallery.min.css", depends=[ks.view_css]) def kotti_configure(settings): settings['kotti.available_types'] += ' kotti_site_gallery.resources.Site' settings['kotti.available_types'] += ' kotti_site_gallery.resources.SiteGallery' settings['pyramid.includes'] += ' kotti_site_gallery.includeme' settings['pyramid.includes'] += ' kotti_site_gallery.views.includeme' Image.type_info.addable_to.append(u'Site') def includeme(config): ks.view_needed.add(view_css)
from __future__ import absolute_import from fanstatic import Library from fanstatic import Resource from kotti.resources import Image from kotti.fanstatic import view_css from kotti.fanstatic import view_needed lib_kotti_site_gallery = Library('kotti_site_gallery', 'static') ksg_view_css = Resource(lib_kotti_site_gallery, "kotti_site_gallery.css", minified="kotti_site_gallery.min.css", depends=[view_css]) def kotti_configure(settings): settings['kotti.available_types'] += ' kotti_site_gallery.resources.Site' settings['kotti.available_types'] += ' kotti_site_gallery.resources.SiteGallery' settings['pyramid.includes'] += ' kotti_site_gallery.includeme' settings['pyramid.includes'] += ' kotti_site_gallery.views.includeme' Image.type_info.addable_to.append(u'Site') def includeme(config): view_needed.add(ksg_view_css)
Fix import for Kotti > 0.8x.
Fix import for Kotti > 0.8x.
Python
bsd-2-clause
Kotti/kotti_site_gallery,Kotti/kotti_site_gallery
--- +++ @@ -1,14 +1,17 @@ +from __future__ import absolute_import + from fanstatic import Library from fanstatic import Resource from kotti.resources import Image -import kotti.static as ks +from kotti.fanstatic import view_css +from kotti.fanstatic import view_needed lib_kotti_site_gallery = Library('kotti_site_gallery', 'static') -view_css = Resource(lib_kotti_site_gallery, +ksg_view_css = Resource(lib_kotti_site_gallery, "kotti_site_gallery.css", minified="kotti_site_gallery.min.css", - depends=[ks.view_css]) + depends=[view_css]) def kotti_configure(settings): @@ -22,4 +25,4 @@ def includeme(config): - ks.view_needed.add(view_css) + view_needed.add(ksg_view_css)
a4c5782158e7d3fa696fc4532836355457f48cc0
versebot/webparser.py
versebot/webparser.py
""" VerseBot for reddit By Matthieu Grieger parser.py Copyright (c) 2015 Matthieu Grieger (MIT License) """ from bs4 import BeautifulSoup from urllib.request import urlopen class Parser: """ Parser class for BibleGateway parsing methods. """ def __init__(self): """ Initializes translations attribute and checks if there are any new translations to add to the database. """ self.translations = self.find_supported_translations().sort(key=len, reverse=True) def find_supported_translations(self): """ Retrieves a list of supported translations from BibleGateway's translation page. """ url = "http://www.biblegateway.com/versions/" translations = list() page = urlopen(url) soup = BeautifulSoup(page.read()) translations = soup.find("select", {"class":"search-translation-select"}) trans = translations.findAll("option") for t in trans: if t.has_attr("value") and not t.has_attr("class"): cur_trans = t["value"] translations.append(cur_trans) # Add local translations to supported translations list translations.append("NJPS") return translations
""" VerseBot for reddit By Matthieu Grieger webparser.py Copyright (c) 2015 Matthieu Grieger (MIT License) """ from bs4 import BeautifulSoup from urllib.request import urlopen class Parser: """ Parser class for BibleGateway parsing methods. """ def __init__(self): """ Initializes translations attribute and checks if there are any new translations to add to the database. """ trans = self.find_supported_translations() if trans is None: self.translations = None else: self.translations = trans.sort(key=len, reverse=True) def find_supported_translations(self): """ Retrieves a list of supported translations from BibleGateway's translation page. """ url = "http://www.biblegateway.com/versions/" translations = list() page = urlopen(url) soup = BeautifulSoup(page.read()) # It seems that BibleGateway has changed the layout of their versions page. This needs # to be redone! translations = soup.find("select", {"class":"search-translation-select"}) trans = translations.findAll("option") for t in trans: if t.has_attr("value") and not t.has_attr("class"): cur_trans = t["value"] translations.append(cur_trans) # Add local translations to supported translations list translations.append("NJPS") return translations
Add note about BibleGateway layout
Add note about BibleGateway layout
Python
mit
Matthew-Arnold/slack-versebot,Matthew-Arnold/slack-versebot
--- +++ @@ -1,7 +1,7 @@ """ VerseBot for reddit By Matthieu Grieger -parser.py +webparser.py Copyright (c) 2015 Matthieu Grieger (MIT License) """ @@ -14,7 +14,11 @@ def __init__(self): """ Initializes translations attribute and checks if there are any new translations to add to the database. """ - self.translations = self.find_supported_translations().sort(key=len, reverse=True) + trans = self.find_supported_translations() + if trans is None: + self.translations = None + else: + self.translations = trans.sort(key=len, reverse=True) def find_supported_translations(self): """ Retrieves a list of supported translations from BibleGateway's translation @@ -24,7 +28,9 @@ page = urlopen(url) soup = BeautifulSoup(page.read()) - + + # It seems that BibleGateway has changed the layout of their versions page. This needs + # to be redone! translations = soup.find("select", {"class":"search-translation-select"}) trans = translations.findAll("option") for t in trans:
41b5a95a5c396c131d1426dd926e0a1a4beccc86
mrp_workorder_sequence/models/mrp_production.py
mrp_workorder_sequence/models/mrp_production.py
# Copyright 2019-20 ForgeFlow S.L. (https://www.forgeflow.com) # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class MrpProduction(models.Model): _inherit = "mrp.production" def _reset_work_order_sequence(self): for rec in self: current_sequence = 1 for work in rec.workorder_ids: work.sequence = current_sequence current_sequence += 1 def _generate_workorders(self, exploded_boms): res = super()._generate_workorders(exploded_boms) self._reset_work_order_sequence() return res
# Copyright 2019-20 ForgeFlow S.L. (https://www.forgeflow.com) # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class MrpProduction(models.Model): _inherit = "mrp.production" def _reset_work_order_sequence(self): for rec in self: current_sequence = 1 for work in rec.workorder_ids: work.sequence = current_sequence current_sequence += 1 def _create_workorder(self): res = super()._create_workorder() self._reset_work_order_sequence() return res
Call method changed on v14
[FIX] mrp_workorder_sequence: Call method changed on v14
Python
agpl-3.0
OCA/manufacture,OCA/manufacture
--- +++ @@ -14,7 +14,7 @@ work.sequence = current_sequence current_sequence += 1 - def _generate_workorders(self, exploded_boms): - res = super()._generate_workorders(exploded_boms) + def _create_workorder(self): + res = super()._create_workorder() self._reset_work_order_sequence() return res
54b147e59d1dfd4b65643a3f8a56098eb5a99451
tests/kafka_cluster_manager/decommission_test.py
tests/kafka_cluster_manager/decommission_test.py
from __future__ import unicode_literals from argparse import Namespace import mock import pytest from kafka_utils.kafka_cluster_manager.cluster_info \ .partition_count_balancer import PartitionCountBalancer from kafka_utils.kafka_cluster_manager.cmds import decommission from tests.kafka_cluster_manager.helper import broker_range @pytest.fixture def command_instance(): cmd = decommission.DecommissionCmd() cmd.args = mock.Mock(spec=Namespace) cmd.args.force_progress = False cmd.args.broker_ids = [] cmd.args.auto_max_movement_size = True return cmd def test_decommission_no_partitions_to_move(command_instance, create_cluster_topology): cluster_one_broker_empty = create_cluster_topology( assignment={('topic', 0): [0, 1]}, brokers=broker_range(3), ) command_instance.args.brokers_ids = [2] balancer = PartitionCountBalancer(cluster_one_broker_empty, command_instance.args) command_instance.run_command(cluster_one_broker_empty, balancer)
from __future__ import unicode_literals from argparse import Namespace import mock import pytest from kafka_utils.kafka_cluster_manager.cluster_info \ .partition_count_balancer import PartitionCountBalancer from kafka_utils.kafka_cluster_manager.cmds import decommission from tests.kafka_cluster_manager.helper import broker_range @pytest.fixture def command_instance(): cmd = decommission.DecommissionCmd() cmd.args = mock.Mock(spec=Namespace) cmd.args.force_progress = False cmd.args.broker_ids = [] cmd.args.auto_max_movement_size = True cmd.args.max_partition_movements = 10 cmd.args.max_leader_changes = 10 return cmd def test_decommission_no_partitions_to_move(command_instance, create_cluster_topology): cluster_one_broker_empty = create_cluster_topology( assignment={('topic', 0): [0, 1]}, brokers=broker_range(3), ) command_instance.args.brokers_ids = [2] balancer = PartitionCountBalancer(cluster_one_broker_empty, command_instance.args) command_instance.run_command(cluster_one_broker_empty, balancer)
Add more default args so tests pass in py3+
Add more default args so tests pass in py3+
Python
apache-2.0
Yelp/kafka-utils,Yelp/kafka-utils
--- +++ @@ -18,6 +18,8 @@ cmd.args.force_progress = False cmd.args.broker_ids = [] cmd.args.auto_max_movement_size = True + cmd.args.max_partition_movements = 10 + cmd.args.max_leader_changes = 10 return cmd
a805d414557a14a8588b6e34a9fbb93cc87651df
lib/scheduler.py
lib/scheduler.py
import time import logging log = logging.getLogger(__name__) class TaskSchedule(): def __init__(self): self.tasks = [] log.debug("TaskSchedule created") def add(self, task, ttl_skew=0.8): offset = ((ttl_skew * task.ttl) - task.skew()) log.info("Scheduling '%s' for %ss from now" % (task.name, offset)) if task.skew() > (task.ttl * 0.5): log.warning("Task skew of %s is > 50%% of TTL(%s) for '%s'" % (task.skew(), task.ttl, task.name)) else: log.debug("Task skew for '%s' is %s" % ( task.name, task.skew())) deadline = time.time() + offset self.tasks.append((task, deadline)) def update(self): self.tasks.sort(key=lambda task: task[1], reverse=True) def next(self): task, deadline = self.tasks.pop() log.info("Next task is '%s' scheduled to run in %ss" % (task.name, deadline-time.time())) return (task, deadline) def ready(self, deadline, grace=1.1): now = time.time() return (deadline - now) < grace def waiting(self): self.update() return len([t for t in self.tasks if self.ready(t[1])])
import time import logging log = logging.getLogger(__name__) class TaskSchedule(): def __init__(self): self.tasks = [] log.debug("TaskSchedule created") def add(self, task, ttl_skew=0.5): offset = ((ttl_skew * task.ttl) - task.skew()) log.info("Scheduling '%s' for %ss from now" % (task.name, offset)) if task.skew() > (task.ttl * ttl_skew): log.warning("Task skew of %s is > %s%% of TTL(%s) for '%s'" % (task.skew(), (ttl_skew*100), task.ttl, task.name)) else: log.debug("Task skew for '%s' is %s" % ( task.name, task.skew())) deadline = time.time() + offset self.tasks.append((task, deadline)) def update(self): self.tasks.sort(key=lambda task: task[1], reverse=True) def next(self): task, deadline = self.tasks.pop() log.info("Next task is '%s' scheduled to run in %ss" % (task.name, deadline-time.time())) return (task, deadline) def ready(self, deadline, grace=1.1): now = time.time() return (deadline - now) < grace def waiting(self): self.update() return len([t for t in self.tasks if self.ready(t[1])])
Change ttl_skew to 50%, to allow a task to run twice before riemann notices that it never checked in.
Change ttl_skew to 50%, to allow a task to run twice before riemann notices that it never checked in.
Python
mit
crashlytics/riemann-sumd
--- +++ @@ -8,13 +8,13 @@ self.tasks = [] log.debug("TaskSchedule created") - def add(self, task, ttl_skew=0.8): + def add(self, task, ttl_skew=0.5): offset = ((ttl_skew * task.ttl) - task.skew()) log.info("Scheduling '%s' for %ss from now" % (task.name, offset)) - if task.skew() > (task.ttl * 0.5): - log.warning("Task skew of %s is > 50%% of TTL(%s) for '%s'" % (task.skew(), task.ttl, task.name)) + if task.skew() > (task.ttl * ttl_skew): + log.warning("Task skew of %s is > %s%% of TTL(%s) for '%s'" % (task.skew(), (ttl_skew*100), task.ttl, task.name)) else: log.debug("Task skew for '%s' is %s" % ( task.name, task.skew()))
159aea1c97b8e8de45802cace031e7206c3c8fec
thecure/sprites/tile.py
thecure/sprites/tile.py
from thecure.resources import load_spritesheet_frame from thecure.sprites import Sprite class Tile(Sprite): NAME = 'tile' WIDTH = 64 HEIGHT = 64 NEED_TICKS = False def __init__(self, filename, tile_offset): super(Tile, self).__init__() self.filename = filename self.tile_offset = tile_offset self.rect.size = (self.WIDTH, self.HEIGHT) def update_image(self): self.image = load_spritesheet_frame(self.filename, self.tile_offset, frame_size=(self.WIDTH, self.HEIGHT)) assert self.image
from thecure.resources import load_spritesheet_frame from thecure.sprites import Sprite class Tile(Sprite): NAME = 'tile' WIDTH = 64 HEIGHT = 64 NEED_TICKS = False def __init__(self, filename, tile_offset): super(Tile, self).__init__() self.filename = filename self.tile_offset = tile_offset self.rect.size = (self.WIDTH, self.HEIGHT) def __str__(self): return 'Tile %s:%s at %s' % (self.filename, self.tile_offset, self.rect.topleft) def update_image(self): self.image = load_spritesheet_frame(self.filename, self.tile_offset, frame_size=self.rect.size) assert self.image
Add a __str__ for Tile.
Add a __str__ for Tile.
Python
mit
chipx86/the-cure
--- +++ @@ -16,8 +16,11 @@ self.tile_offset = tile_offset self.rect.size = (self.WIDTH, self.HEIGHT) + def __str__(self): + return 'Tile %s:%s at %s' % (self.filename, self.tile_offset, + self.rect.topleft) + def update_image(self): self.image = load_spritesheet_frame(self.filename, self.tile_offset, - frame_size=(self.WIDTH, - self.HEIGHT)) + frame_size=self.rect.size) assert self.image
def66bc381f03970640a61d64b49ad5de9ef3879
ocaml/build-in.py
ocaml/build-in.py
# Needed because ocamlbuild 3.12.1 doesn't support absolute pathnames (4.00.1 does) import sys import os from os.path import relpath ocaml_build_dir = relpath(sys.argv[1], '.') os.execvp("make", ["make", 'OCAML_BUILDDIR=' + ocaml_build_dir, "ocaml"])
# Needed because ocamlbuild 3.12.1 doesn't support absolute pathnames (4.00.1 does) import sys import os from os.path import relpath ocaml_build_dir = relpath(sys.argv[1], '.') # Hack: when we can depend on a full OCaml feed with the build tools, we can remove this. # Until then, we need to avoid trying to compile against the limited runtime environment. if 'OCAMLLIB' in os.environ: del os.environ['OCAMLLIB'] os.execvp("make", ["make", 'OCAML_BUILDDIR=' + ocaml_build_dir, "ocaml"])
Remove OCAMLLIB from build environment
Remove OCAMLLIB from build environment This is a temporary hack: when we can depend on a full OCaml feed with the build tools, we can remove this. Until then, we need to avoid trying to compile against the limited runtime environment.
Python
lgpl-2.1
0install/0install,afb/0install,afb/0install,afb/0install,gasche/0install,bastianeicher/0install,bhilton/0install,fdopen/0install,gasche/0install,0install/0install,jaychoo/0install,dbenamy/0install,gfxmonk/0install,jaychoo/0install,dbenamy/0install,DarkGreising/0install,bastianeicher/0install,fdopen/0install,bhilton/0install,bhilton/0install,bartbes/0install,gasche/0install,bastianeicher/0install,dbenamy/0install,DarkGreising/0install,bartbes/0install,fdopen/0install,gasche/0install,HoMeCracKeR/0install,jaychoo/0install,pombreda/0install,gfxmonk/0install,bartbes/0install,afb/0install,HoMeCracKeR/0install,HoMeCracKeR/0install,gfxmonk/0install,DarkGreising/0install,pombreda/0install,pombreda/0install,0install/0install
--- +++ @@ -3,4 +3,10 @@ import os from os.path import relpath ocaml_build_dir = relpath(sys.argv[1], '.') + +# Hack: when we can depend on a full OCaml feed with the build tools, we can remove this. +# Until then, we need to avoid trying to compile against the limited runtime environment. +if 'OCAMLLIB' in os.environ: + del os.environ['OCAMLLIB'] + os.execvp("make", ["make", 'OCAML_BUILDDIR=' + ocaml_build_dir, "ocaml"])
b297ad6b4d52b688a1c50ffc2a5574d8061c5ce0
csvdiff/records.py
csvdiff/records.py
# -*- coding: utf-8 -*- # # records.py # csvdiff # import csv class InvalidKeyError(Exception): pass def load(file_or_stream): istream = (open(file_or_stream) if not hasattr(file_or_stream, 'read') else file_or_stream) return csv.DictReader(istream) def index(record_seq, index_columns): try: return { tuple(r[i] for i in index_columns): r for r in record_seq } except KeyError as k: raise InvalidKeyError('invalid column name {k} as key'.format(k=k)) def save(record_seq, fieldnames, ostream): writer = csv.DictWriter(ostream, fieldnames) writer.writeheader() for r in record_seq: writer.writerow(r) def sort(recs): return sorted(recs, key=_record_key) def _record_key(r): return sorted(r.items())
# -*- coding: utf-8 -*- # # records.py # csvdiff # import csv from . import error class InvalidKeyError(Exception): pass def load(file_or_stream): istream = (open(file_or_stream) if not hasattr(file_or_stream, 'read') else file_or_stream) return _safe_iterator(csv.DictReader(istream)) def _safe_iterator(reader): for lineno, r in enumerate(reader, 2): if any(k is None for k in r): error.abort('CSV parse error on line {}'.format(lineno)) yield r def index(record_seq, index_columns): try: return { tuple(r[i] for i in index_columns): r for r in record_seq } except KeyError as k: raise InvalidKeyError('invalid column name {k} as key'.format(k=k)) def save(record_seq, fieldnames, ostream): writer = csv.DictWriter(ostream, fieldnames) writer.writeheader() for r in record_seq: writer.writerow(r) def sort(recs): return sorted(recs, key=_record_key) def _record_key(r): return sorted(r.items())
Check for errors parsing the CSV as we go.
Check for errors parsing the CSV as we go. When rows bleed into each other, we can get keys that are None in the records. Now we get a user error in this case.
Python
bsd-3-clause
larsyencken/csvdiff
--- +++ @@ -5,6 +5,8 @@ # import csv + +from . import error class InvalidKeyError(Exception): @@ -16,7 +18,15 @@ if not hasattr(file_or_stream, 'read') else file_or_stream) - return csv.DictReader(istream) + return _safe_iterator(csv.DictReader(istream)) + + +def _safe_iterator(reader): + for lineno, r in enumerate(reader, 2): + if any(k is None for k in r): + error.abort('CSV parse error on line {}'.format(lineno)) + + yield r def index(record_seq, index_columns):
75e65f6144820ba216166eee4417912394f8cdca
tools/bundle.py
tools/bundle.py
#!/usr/bin/env python import os import sys import glob import getopt def file_list(path): files = [] if os.path.isfile(path): return [path] for f in os.listdir(path): new_dir = os.path.join(path, f) if os.path.isdir(new_dir) and not os.path.islink(new_dir): files.extend(file_list(new_dir)) else: if f.endswith('.lua'): files.append(path + '/' + f) return files def generate_bundle_map(module_name, path, is_base=False): t = [] for os_filename in file_list(path): bundle_filename = (os_filename.replace(path, '')[1:]) if is_base: bundle_filename = 'modules/' + bundle_filename else: bundle_filename = module_name + '/' + bundle_filename t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename }) return t try: opts, args = getopt.getopt(sys.argv[1:], 'lb', []) except: sys.exit(2) if __name__ == '__main__': module_path = args[0] module_name = os.path.basename(module_path) for o, a in opts: if o == '-l': for path in args: print('\n'.join(file_list(path))) elif o == '-b': for path in args: print(generate_bundle_map(module_name, path))
#!/usr/bin/env python import os import sys import glob import getopt def file_list(path): files = [] if os.path.isfile(path): return [path] for f in os.listdir(path): new_dir = path + '/' + f if os.path.isdir(new_dir) and not os.path.islink(new_dir): files.extend(file_list(new_dir)) else: if f.endswith('.lua'): files.append(path + '/' + f) return files def generate_bundle_map(module_name, path, is_base=False): t = [] for os_filename in file_list(path): bundle_filename = (os_filename.replace(path, '')[1:]) if is_base: bundle_filename = 'modules/' + bundle_filename else: bundle_filename = module_name + '/' + bundle_filename t.append({ 'os_filename': os_filename, 'bundle_filename': bundle_filename }) return t try: opts, args = getopt.getopt(sys.argv[1:], 'lb', []) except: sys.exit(2) if __name__ == '__main__': module_path = args[0] module_name = os.path.basename(module_path) for o, a in opts: if o == '-l': for path in args: print('\n'.join(file_list(path))) elif o == '-b': for path in args: print(generate_bundle_map(module_name, path))
Stop using os.path.join, because Visual Studio can actually handle forward slash style paths, and the os.path method was creating mixed \\ and / style paths, b0rking everything.
Stop using os.path.join, because Visual Studio can actually handle forward slash style paths, and the os.path method was creating mixed \\ and / style paths, b0rking everything.
Python
apache-2.0
kans/birgo,kans/birgo,kans/birgo,kans/birgo,kans/birgo
--- +++ @@ -12,7 +12,7 @@ return [path] for f in os.listdir(path): - new_dir = os.path.join(path, f) + new_dir = path + '/' + f if os.path.isdir(new_dir) and not os.path.islink(new_dir): files.extend(file_list(new_dir)) else:
e7356e6ca1968d930f4fd348b48dcdd1cb9a02ab
taggit/admin.py
taggit/admin.py
from django.contrib import admin from taggit.models import Tag, TaggedItem class TaggedItemInline(admin.StackedInline): model = TaggedItem extra = 0 class TagAdmin(admin.ModelAdmin): inlines = [ TaggedItemInline ] ordering = ['name'] admin.site.register(Tag, TagAdmin)
from django.contrib import admin from taggit.models import Tag, TaggedItem class TaggedItemInline(admin.StackedInline): model = TaggedItem extra = 0 class TagAdmin(admin.ModelAdmin): inlines = [ TaggedItemInline ] ordering = ['name'] search_fields = ['name'] admin.site.register(Tag, TagAdmin)
Add ability to search tags by name
Add ability to search tags by name
Python
bsd-3-clause
theatlantic/django-taggit2,decibyte/django-taggit,theatlantic/django-taggit,decibyte/django-taggit,theatlantic/django-taggit,theatlantic/django-taggit2
--- +++ @@ -12,5 +12,6 @@ TaggedItemInline ] ordering = ['name'] + search_fields = ['name'] admin.site.register(Tag, TagAdmin)
3aba7e7f654e492fb689b8030615658cae93c2d1
txircd/modules/umode_o.py
txircd/modules/umode_o.py
from txircd.modbase import Mode class OperMode(Mode): def checkSet(self, target, param): return False # Should only be set by the OPER command; hence, reject any normal setting of the mode def checkWhoFilter(self, user, targetUser, filters, fields, channel, udata): if "o" in filters and not udata["oper"]: return {} return udata class Spawner(object): def __init__(self, ircd): self.ircd = ircd self.oper_mode = None def spawn(self): self.oper_mode = OperMode() return { "modes": { "uno": self.oper_mode }, "actions": { "wholinemodify": self.oper_mode.checkWhoFilter }, "common": True }
from txircd.modbase import Mode class OperMode(Mode): def checkSet(self, user, target, param): user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - User mode o may not be set") return False # Should only be set by the OPER command; hence, reject any normal setting of the mode def checkWhoFilter(self, user, targetUser, filters, fields, channel, udata): if "o" in filters and not udata["oper"]: return {} return udata class Spawner(object): def __init__(self, ircd): self.ircd = ircd self.oper_mode = None def spawn(self): self.oper_mode = OperMode() return { "modes": { "uno": self.oper_mode }, "actions": { "wholinemodify": self.oper_mode.checkWhoFilter }, "common": True }
Fix crashing when a user attempts to set usermode +o without /oper
Fix crashing when a user attempts to set usermode +o without /oper
Python
bsd-3-clause
Heufneutje/txircd,DesertBus/txircd,ElementalAlchemist/txircd
--- +++ @@ -1,7 +1,8 @@ from txircd.modbase import Mode class OperMode(Mode): - def checkSet(self, target, param): + def checkSet(self, user, target, param): + user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - User mode o may not be set") return False # Should only be set by the OPER command; hence, reject any normal setting of the mode def checkWhoFilter(self, user, targetUser, filters, fields, channel, udata):
c3b0cc681b06ab5b8d64612d5c35fb27da56beeb
spk/sabnzbd/src/app/sabnzbd.cgi.py
spk/sabnzbd/src/app/sabnzbd.cgi.py
#!/usr/local/sabnzbd/env/bin/python import os import configobj config = configobj.ConfigObj('/usr/local/sabnzbd/var/config.ini') protocol = 'https' if int(config['misc']['enable_https']) else 'http' port = int(config['misc']['https_port']) if int(config['misc']['enable_https']) else int(config['misc']['port']) print 'Location: %s://%s:%d' % (protocol, os.environ['SERVER_NAME'], port) print
#!/usr/local/sabnzbd/env/bin/python import os import configobj config = configobj.ConfigObj('/usr/local/sabnzbd/var/config.ini') protocol = 'https' if int(config['misc']['enable_https']) else 'http' https_port = int(config['misc']['port']) if len(config['misc']['https_port']) == 0 else int(config['misc']['https_port']) port = https_port if protocol == 'https' else int(config['misc']['port']) print 'Location: %s://%s:%d' % (protocol, os.environ['SERVER_NAME'], port) print
Fix port number detection in sabnzbd
Fix port number detection in sabnzbd Thanks DcR-NL
Python
bsd-3-clause
Decipher/spksrc,astroganga/spksrc,mjoe/spksrc,markbastiaans/spksrc,Decipher/spksrc,mreppen/spksrc,saschpe/spksrc,schumi2004/spksrc,hadess/spksrc,mirweb/spksrc,mirweb/spksrc,jdierkse/spksrc,adrien-delhorme/spksrc,sea3pea0/spksrc,thunfischbrot/spksrc,mirweb/spksrc,Foncekar/spksrc,thunfischbrot/spksrc,lysin/spksrc,hmflash/spksrc,mirweb/spksrc,mreppen/spksrc,andyblac/spksrc,GaetanCambier/spksrc,Grimthorr/spksrc,mjoe/spksrc,mjoe/spksrc,momiji/spksrc,xtech9/spksrc,Decipher/spksrc,mirweb/spksrc,Foncekar/spksrc,andyblac/spksrc,bwynants/spksrc,Foncekar/spksrc,riverful/spksrc,saschpe/spksrc,lost-carrier/spksrc,Pyrex-FWI/spksrc,cdcabrera/spksrc,Pyrex-FWI/spksrc,saschpe/spksrc,lost-carrier/spksrc,Foncekar/spksrc,sea3pea0/spksrc,GoodOmens83/spksrc,nickbroon/spksrc,phoenix741/spksrc,nickbroon/spksrc,hmflash/spksrc,thunfischbrot/spksrc,Foncekar/spksrc,mjoe/spksrc,demorfi/spksrc,Grimthorr/spksrc,xtech9/spksrc,phoenix741/spksrc,momiji/spksrc,astroganga/spksrc,bwynants/spksrc,lysin/spksrc,Grimthorr/spksrc,jdel/syno-packages,schumi2004/spksrc,adrien-delhorme/spksrc,markbastiaans/spksrc,sea3pea0/spksrc,mreppen/spksrc,demorfi/spksrc,mreppen/spksrc,riverful/spksrc,jdel/syno-packages,thunfischbrot/spksrc,xtech9/spksrc,xtech9/spksrc,Pyrex-FWI/spksrc,markbastiaans/spksrc,hadess/spksrc,nickbroon/spksrc,jdierkse/spksrc,Decipher/spksrc,xtech9/spksrc,jdierkse/spksrc,Decipher/spksrc,cdcabrera/spksrc,jdel/syno-packages,lost-carrier/spksrc,bwynants/spksrc,Pyrex-FWI/spksrc,GaetanCambier/spksrc,jdel/syno-packages,sangood/spksrc,lost-carrier/spksrc,JasOXIII/spksrc,bwynants/spksrc,markbastiaans/spksrc,JasOXIII/spksrc,astroganga/spksrc,mirweb/spksrc,xtech9/spksrc,lysin/spksrc,Foncekar/spksrc,riverful/spksrc,hmflash/spksrc,nickbroon/spksrc,Decipher/spksrc,mreppen/spksrc,sea3pea0/spksrc,sea3pea0/spksrc,jdierkse/spksrc,adrien-delhorme/spksrc,riverful/spksrc,lost-carrier/spksrc,mirweb/spksrc,sea3pea0/spksrc,sangood/spksrc,lost-carrier/spksrc,lost-carrier/spksrc,sangood/spksrc,Decipher/spksrc,demorfi/spksrc,sea3pea0/spksrc,mreppen/spksrc,momiji/spksrc,Dr-Bean/spksrc,mirweb/spksrc,bwynants/spksrc,lost-carrier/spksrc,cdcabrera/spksrc,Grimthorr/spksrc,adrien-delhorme/spksrc,hadess/spksrc,demorfi/spksrc,jdierkse/spksrc,Dr-Bean/spksrc,riverful/spksrc,markbastiaans/spksrc,thunfischbrot/spksrc,astroganga/spksrc,jdierkse/spksrc,GaetanCambier/spksrc,Decipher/spksrc,phoenix741/spksrc,saschpe/spksrc,Dr-Bean/spksrc,nickbroon/spksrc,cdcabrera/spksrc,bwynants/spksrc,GoodOmens83/spksrc,demorfi/spksrc,mirweb/spksrc,Foncekar/spksrc,saschpe/spksrc,riverful/spksrc,hmflash/spksrc,Grimthorr/spksrc,hadess/spksrc,Foncekar/spksrc,schumi2004/spksrc,GoodOmens83/spksrc,bwynants/spksrc,bwynants/spksrc,GaetanCambier/spksrc,astroganga/spksrc,Pyrex-FWI/spksrc,JasOXIII/spksrc,andyblac/spksrc,phoenix741/spksrc,phoenix741/spksrc,mreppen/spksrc,andyblac/spksrc,Foncekar/spksrc,sea3pea0/spksrc,cdcabrera/spksrc,Grimthorr/spksrc,Dr-Bean/spksrc,andyblac/spksrc,saschpe/spksrc,sangood/spksrc,saschpe/spksrc,lysin/spksrc,Dr-Bean/spksrc,sangood/spksrc,mjoe/spksrc,nickbroon/spksrc,astroganga/spksrc,mjoe/spksrc,bwynants/spksrc,GoodOmens83/spksrc,demorfi/spksrc,hadess/spksrc,demorfi/spksrc,astroganga/spksrc,sangood/spksrc,hmflash/spksrc,adrien-delhorme/spksrc,andyblac/spksrc,lysin/spksrc,phoenix741/spksrc,jdierkse/spksrc,hadess/spksrc,mjoe/spksrc,markbastiaans/spksrc,hadess/spksrc,thunfischbrot/spksrc,nickbroon/spksrc,riverful/spksrc,adrien-delhorme/spksrc,demorfi/spksrc,momiji/spksrc,mreppen/spksrc,Dr-Bean/spksrc,adrien-delhorme/spksrc,lysin/spksrc,phoenix741/spksrc,GoodOmens83/spksrc,Pyrex-FWI/spksrc,momiji/spksrc,GaetanCambier/spksrc,mirweb/spksrc,JasOXIII/spksrc,GaetanCambier/spksrc,nickbroon/spksrc,riverful/spksrc,sangood/spksrc,saschpe/spksrc,cdcabrera/spksrc,JasOXIII/spksrc,phoenix741/spksrc,astroganga/spksrc,andyblac/spksrc,riverful/spksrc,thunfischbrot/spksrc,GoodOmens83/spksrc,saschpe/spksrc,sea3pea0/spksrc,GaetanCambier/spksrc,schumi2004/spksrc,sangood/spksrc,Pyrex-FWI/spksrc,lysin/spksrc,Pyrex-FWI/spksrc,lost-carrier/spksrc,schumi2004/spksrc,JasOXIII/spksrc,nickbroon/spksrc,JasOXIII/spksrc,Dr-Bean/spksrc,momiji/spksrc,GaetanCambier/spksrc,Grimthorr/spksrc,mjoe/spksrc,Pyrex-FWI/spksrc,schumi2004/spksrc,adrien-delhorme/spksrc,cdcabrera/spksrc,jdierkse/spksrc,thunfischbrot/spksrc,xtech9/spksrc,GoodOmens83/spksrc,phoenix741/spksrc,lysin/spksrc,markbastiaans/spksrc,GoodOmens83/spksrc,GaetanCambier/spksrc,cdcabrera/spksrc,schumi2004/spksrc,momiji/spksrc,schumi2004/spksrc,schumi2004/spksrc,GoodOmens83/spksrc,lysin/spksrc,thunfischbrot/spksrc,andyblac/spksrc,xtech9/spksrc,mreppen/spksrc,hadess/spksrc,andyblac/spksrc,hadess/spksrc,adrien-delhorme/spksrc,Grimthorr/spksrc,JasOXIII/spksrc,mjoe/spksrc,hmflash/spksrc,hmflash/spksrc,markbastiaans/spksrc,demorfi/spksrc,sangood/spksrc,Decipher/spksrc,hmflash/spksrc,Dr-Bean/spksrc,astroganga/spksrc,JasOXIII/spksrc,jdierkse/spksrc,hmflash/spksrc,xtech9/spksrc,markbastiaans/spksrc,cdcabrera/spksrc,Grimthorr/spksrc,momiji/spksrc
--- +++ @@ -5,7 +5,8 @@ config = configobj.ConfigObj('/usr/local/sabnzbd/var/config.ini') protocol = 'https' if int(config['misc']['enable_https']) else 'http' -port = int(config['misc']['https_port']) if int(config['misc']['enable_https']) else int(config['misc']['port']) +https_port = int(config['misc']['port']) if len(config['misc']['https_port']) == 0 else int(config['misc']['https_port']) +port = https_port if protocol == 'https' else int(config['misc']['port']) print 'Location: %s://%s:%d' % (protocol, os.environ['SERVER_NAME'], port) print
550dee3e13a0ee80d0bd9338c281e51fefdcfdc8
slack_log_handler/__init__.py
slack_log_handler/__init__.py
import traceback from logging import Handler from slacker import Chat class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Chat(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = '{}'.format(record.getMessage()) if self.stack_trace and record.exc_info: message += '\n' message += '\n'.join(traceback.format_exception(*record.exc_info)) self.slack_chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji )
import json import traceback from logging import Handler from slacker import Slacker class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = '{}'.format(record.getMessage()) if self.stack_trace and record.exc_info: message += '\n' message += '\n'.join(traceback.format_exception(*record.exc_info)) attachments = [{ 'fallback': self.username, 'color': 'danger', 'author_name': self.username, 'title': self.username, 'text': message }] self.slack_chat.chat.post_message( channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) )
Add format with slack attachments.
Add format with slack attachments.
Python
apache-2.0
mathiasose/slacker_log_handler
--- +++ @@ -1,13 +1,14 @@ +import json import traceback from logging import Handler -from slacker import Chat +from slacker import Slacker class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) - self.slack_chat = Chat(api_key) + self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username @@ -23,10 +24,17 @@ message += '\n' message += '\n'.join(traceback.format_exception(*record.exc_info)) - self.slack_chat.post_message( - text=message, + attachments = [{ + 'fallback': self.username, + 'color': 'danger', + 'author_name': self.username, + 'title': self.username, + 'text': message + }] + self.slack_chat.chat.post_message( channel=self.channel, username=self.username, icon_url=self.icon_url, - icon_emoji=self.icon_emoji + icon_emoji=self.icon_emoji, + attachments=json.dumps(attachments) )
ed68bd18b88f349a7348006a2e14cdddbc993da7
script/lib/config.py
script/lib/config.py
#!/usr/bin/env python import platform import sys NODE_VERSION = 'v0.11.13' BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent' LIBCHROMIUMCONTENT_COMMIT = 'afb4570ceee2ad10f3caf5a81335a2ee11ec68a5' ARCH = { 'cygwin': '32bit', 'darwin': '64bit', 'linux2': platform.architecture()[0], 'win32': '32bit', }[sys.platform] DIST_ARCH = { '32bit': 'ia32', '64bit': 'x64', }[ARCH] TARGET_PLATFORM = { 'cygwin': 'win32', 'darwin': 'darwin', 'linux2': 'linux', 'win32': 'win32', }[sys.platform]
#!/usr/bin/env python import platform import sys NODE_VERSION = 'v0.11.13' BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent' LIBCHROMIUMCONTENT_COMMIT = 'ea1a7e85a3de1878e5656110c76f4d2d8af41c6e' ARCH = { 'cygwin': '32bit', 'darwin': '64bit', 'linux2': platform.architecture()[0], 'win32': '32bit', }[sys.platform] DIST_ARCH = { '32bit': 'ia32', '64bit': 'x64', }[ARCH] TARGET_PLATFORM = { 'cygwin': 'win32', 'darwin': 'darwin', 'linux2': 'linux', 'win32': 'win32', }[sys.platform]
Upgrade libchromiumcontent to Chrome 37.
Upgrade libchromiumcontent to Chrome 37.
Python
mit
beni55/electron,gbn972/electron,chrisswk/electron,mrwizard82d1/electron,Andrey-Pavlov/electron,yan-foto/electron,roadev/electron,abhishekgahlot/electron,MaxWhere/electron,nicholasess/electron,Jonekee/electron,synaptek/electron,edulan/electron,tincan24/electron,faizalpribadi/electron,davazp/electron,tylergibson/electron,gstack/infinium-shell,the-ress/electron,astoilkov/electron,yalexx/electron,gabrielPeart/electron,jjz/electron,gabriel/electron,dongjoon-hyun/electron,miniak/electron,jaanus/electron,ankitaggarwal011/electron,ianscrivener/electron,davazp/electron,gbn972/electron,chriskdon/electron,jannishuebl/electron,abhishekgahlot/electron,rhencke/electron,destan/electron,arusakov/electron,nagyistoce/electron-atom-shell,thingsinjars/electron,mirrh/electron,baiwyc119/electron,shaundunne/electron,arusakov/electron,pirafrank/electron,Neron-X5/electron,fritx/electron,etiktin/electron,Evercoder/electron,tonyganch/electron,Zagorakiss/electron,soulteary/electron,matiasinsaurralde/electron,chrisswk/electron,bobwol/electron,adamjgray/electron,tinydew4/electron,wolfflow/electron,icattlecoder/electron,howmuchcomputer/electron,DivyaKMenon/electron,simonfork/electron,vipulroxx/electron,mattotodd/electron,oiledCode/electron,systembugtj/electron,smczk/electron,maxogden/atom-shell,jlhbaseball15/electron,deepak1556/atom-shell,shaundunne/electron,tomashanacek/electron,jonatasfreitasv/electron,fireball-x/atom-shell,vipulroxx/electron,natgolov/electron,carsonmcdonald/electron,maxogden/atom-shell,ianscrivener/electron,bwiggs/electron,mirrh/electron,nicobot/electron,Ivshti/electron,kenmozi/electron,takashi/electron,anko/electron,bobwol/electron,edulan/electron,IonicaBizauKitchen/electron,jhen0409/electron,Zagorakiss/electron,renaesop/electron,leftstick/electron,tincan24/electron,astoilkov/electron,yalexx/electron,deed02392/electron,digideskio/electron,christian-bromann/electron,nicholasess/electron,gabriel/electron,lrlna/electron,mhkeller/electron,wan-qy/electron,cos2004/electron,zhakui/electron,Faiz7412/electron,matiasinsaurralde/electron,John-Lin/electron,brenca/electron,electron/electron,evgenyzinoviev/electron,shennushi/electron,mubassirhayat/electron,preco21/electron,tylergibson/electron,bobwol/electron,DivyaKMenon/electron,hokein/atom-shell,mjaniszew/electron,BionicClick/electron,etiktin/electron,jsutcodes/electron,LadyNaggaga/electron,gerhardberger/electron,etiktin/electron,bwiggs/electron,bpasero/electron,kokdemo/electron,bbondy/electron,subblue/electron,webmechanicx/electron,IonicaBizauKitchen/electron,chriskdon/electron,dkfiresky/electron,synaptek/electron,jiaz/electron,posix4e/electron,digideskio/electron,stevemao/electron,matiasinsaurralde/electron,gamedevsam/electron,egoist/electron,anko/electron,adamjgray/electron,mrwizard82d1/electron,beni55/electron,JussMee15/electron,cos2004/electron,adcentury/electron,tomashanacek/electron,takashi/electron,Floato/electron,gbn972/electron,medixdev/electron,cqqccqc/electron,eriser/electron,vHanda/electron,abhishekgahlot/electron,mirrh/electron,astoilkov/electron,robinvandernoord/electron,MaxWhere/electron,soulteary/electron,natgolov/electron,mattotodd/electron,brave/electron,jlhbaseball15/electron,electron/electron,electron/electron,brave/electron,MaxGraey/electron,bright-sparks/electron,jannishuebl/electron,Evercoder/electron,Rokt33r/electron,roadev/electron,dahal/electron,GoooIce/electron,aliib/electron,yalexx/electron,tylergibson/electron,nekuz0r/electron,leolujuyi/electron,faizalpribadi/electron,Faiz7412/electron,neutrous/electron,yan-foto/electron,deed02392/electron,wolfflow/electron,shockone/electron,rsvip/electron,lzpfmh/electron,yan-foto/electron,adcentury/electron,greyhwndz/electron,jonatasfreitasv/electron,simonfork/electron,rajatsingla28/electron,roadev/electron,gstack/infinium-shell,dongjoon-hyun/electron,felixrieseberg/electron,fomojola/electron,miniak/electron,LadyNaggaga/electron,rhencke/electron,mattdesl/electron,chriskdon/electron,edulan/electron,kostia/electron,Rokt33r/electron,jaanus/electron,fritx/electron,sircharleswatson/electron,mhkeller/electron,noikiy/electron,farmisen/electron,vipulroxx/electron,iftekeriba/electron,soulteary/electron,destan/electron,shockone/electron,jaanus/electron,jcblw/electron,John-Lin/electron,twolfson/electron,RIAEvangelist/electron,soulteary/electron,Zagorakiss/electron,jtburke/electron,edulan/electron,saronwei/electron,pandoraui/electron,IonicaBizauKitchen/electron,brave/electron,seanchas116/electron,seanchas116/electron,deepak1556/atom-shell,astoilkov/electron,neutrous/electron,trankmichael/electron,jjz/electron,astoilkov/electron,mjaniszew/electron,dongjoon-hyun/electron,sshiting/electron,kikong/electron,bright-sparks/electron,coderhaoxin/electron,RIAEvangelist/electron,stevekinney/electron,LadyNaggaga/electron,michaelchiche/electron,mattdesl/electron,rreimann/electron,brenca/electron,saronwei/electron,farmisen/electron,pirafrank/electron,robinvandernoord/electron,bbondy/electron,voidbridge/electron,stevemao/electron,lrlna/electron,shiftkey/electron,bright-sparks/electron,natgolov/electron,bright-sparks/electron,gamedevsam/electron,jacksondc/electron,renaesop/electron,mattdesl/electron,leethomas/electron,bruce/electron,eric-seekas/electron,preco21/electron,simonfork/electron,Rokt33r/electron,beni55/electron,Zagorakiss/electron,leftstick/electron,bbondy/electron,shiftkey/electron,dongjoon-hyun/electron,arturts/electron,kokdemo/electron,neutrous/electron,posix4e/electron,greyhwndz/electron,JesselJohn/electron,fabien-d/electron,fireball-x/atom-shell,biblerule/UMCTelnetHub,jjz/electron,howmuchcomputer/electron,bpasero/electron,bpasero/electron,MaxWhere/electron,Gerhut/electron,lrlna/electron,joneit/electron,icattlecoder/electron,webmechanicx/electron,gbn972/electron,Floato/electron,wolfflow/electron,kazupon/electron,rajatsingla28/electron,Evercoder/electron,aaron-goshine/electron,pirafrank/electron,mjaniszew/electron,biblerule/UMCTelnetHub,shaundunne/electron,arturts/electron,webmechanicx/electron,jiaz/electron,simonfork/electron,MaxGraey/electron,electron/electron,christian-bromann/electron,RIAEvangelist/electron,egoist/electron,fomojola/electron,benweissmann/electron,Faiz7412/electron,mirrh/electron,gabrielPeart/electron,chriskdon/electron,nicholasess/electron,robinvandernoord/electron,jiaz/electron,shennushi/electron,bruce/electron,cqqccqc/electron,bobwol/electron,minggo/electron,JesselJohn/electron,JussMee15/electron,greyhwndz/electron,nekuz0r/electron,Jacobichou/electron,eriser/electron,xiruibing/electron,benweissmann/electron,matiasinsaurralde/electron,systembugtj/electron,IonicaBizauKitchen/electron,gstack/infinium-shell,dkfiresky/electron,takashi/electron,gamedevsam/electron,carsonmcdonald/electron,lrlna/electron,shockone/electron,joaomoreno/atom-shell,benweissmann/electron,dkfiresky/electron,baiwyc119/electron,bruce/electron,yalexx/electron,thomsonreuters/electron,vipulroxx/electron,setzer777/electron,jhen0409/electron,sky7sea/electron,stevemao/electron,yan-foto/electron,jcblw/electron,voidbridge/electron,mirrh/electron,jcblw/electron,kcrt/electron,rsvip/electron,zhakui/electron,fomojola/electron,shockone/electron,rprichard/electron,evgenyzinoviev/electron,noikiy/electron,aliib/electron,yalexx/electron,Faiz7412/electron,icattlecoder/electron,thomsonreuters/electron,yan-foto/electron,zhakui/electron,pombredanne/electron,fireball-x/atom-shell,brave/muon,biblerule/UMCTelnetHub,greyhwndz/electron,gbn972/electron,cos2004/electron,adcentury/electron,noikiy/electron,destan/electron,farmisen/electron,MaxGraey/electron,tonyganch/electron,nicobot/electron,Evercoder/electron,coderhaoxin/electron,abhishekgahlot/electron,kenmozi/electron,christian-bromann/electron,fritx/electron,xfstudio/electron,rreimann/electron,howmuchcomputer/electron,natgolov/electron,farmisen/electron,fabien-d/electron,Ivshti/electron,adamjgray/electron,GoooIce/electron,SufianHassan/electron,fffej/electron,SufianHassan/electron,shennushi/electron,voidbridge/electron,wan-qy/electron,rprichard/electron,pandoraui/electron,tylergibson/electron,the-ress/electron,pombredanne/electron,bitemyapp/electron,nicobot/electron,Jonekee/electron,renaesop/electron,systembugtj/electron,bitemyapp/electron,benweissmann/electron,MaxGraey/electron,mubassirhayat/electron,aecca/electron,fabien-d/electron,ianscrivener/electron,dkfiresky/electron,tylergibson/electron,sircharleswatson/electron,seanchas116/electron,thingsinjars/electron,shennushi/electron,evgenyzinoviev/electron,leethomas/electron,simongregory/electron,arusakov/electron,JussMee15/electron,davazp/electron,baiwyc119/electron,timruffles/electron,ankitaggarwal011/electron,beni55/electron,shennushi/electron,xfstudio/electron,jacksondc/electron,digideskio/electron,matiasinsaurralde/electron,mattotodd/electron,Jacobichou/electron,twolfson/electron,kenmozi/electron,biblerule/UMCTelnetHub,joneit/electron,aliib/electron,pombredanne/electron,the-ress/electron,preco21/electron,nagyistoce/electron-atom-shell,Neron-X5/electron,jtburke/electron,joaomoreno/atom-shell,electron/electron,takashi/electron,lzpfmh/electron,iftekeriba/electron,miniak/electron,oiledCode/electron,digideskio/electron,stevemao/electron,gerhardberger/electron,smczk/electron,etiktin/electron,nicholasess/electron,leethomas/electron,gerhardberger/electron,wan-qy/electron,icattlecoder/electron,fabien-d/electron,natgolov/electron,rajatsingla28/electron,trankmichael/electron,brenca/electron,dongjoon-hyun/electron,JussMee15/electron,vHanda/electron,michaelchiche/electron,tinydew4/electron,minggo/electron,xiruibing/electron,zhakui/electron,nekuz0r/electron,vaginessa/electron,RobertJGabriel/electron,nagyistoce/electron-atom-shell,faizalpribadi/electron,jcblw/electron,aliib/electron,fomojola/electron,christian-bromann/electron,d-salas/electron,leftstick/electron,jlord/electron,thingsinjars/electron,gabrielPeart/electron,thingsinjars/electron,kenmozi/electron,shiftkey/electron,mattdesl/electron,bitemyapp/electron,fireball-x/atom-shell,tomashanacek/electron,Floato/electron,gstack/infinium-shell,aecca/electron,jacksondc/electron,jonatasfreitasv/electron,dahal/electron,robinvandernoord/electron,leftstick/electron,biblerule/UMCTelnetHub,RobertJGabriel/electron,leethomas/electron,neutrous/electron,micalan/electron,jjz/electron,vHanda/electron,trankmichael/electron,michaelchiche/electron,leethomas/electron,oiledCode/electron,wolfflow/electron,meowlab/electron,kikong/electron,leftstick/electron,jcblw/electron,jlord/electron,dkfiresky/electron,bbondy/electron,davazp/electron,SufianHassan/electron,saronwei/electron,posix4e/electron,trankmichael/electron,sky7sea/electron,jacksondc/electron,destan/electron,xfstudio/electron,Evercoder/electron,tinydew4/electron,noikiy/electron,jtburke/electron,abhishekgahlot/electron,jsutcodes/electron,Ivshti/electron,setzer777/electron,felixrieseberg/electron,seanchas116/electron,rreimann/electron,mattotodd/electron,noikiy/electron,felixrieseberg/electron,aichingm/electron,tinydew4/electron,xfstudio/electron,rhencke/electron,arturts/electron,BionicClick/electron,systembugtj/electron,kikong/electron,voidbridge/electron,John-Lin/electron,roadev/electron,kazupon/electron,John-Lin/electron,icattlecoder/electron,lzpfmh/electron,evgenyzinoviev/electron,GoooIce/electron,simongregory/electron,jhen0409/electron,rreimann/electron,thingsinjars/electron,BionicClick/electron,eric-seekas/electron,darwin/electron,d-salas/electron,tonyganch/electron,preco21/electron,beni55/electron,michaelchiche/electron,anko/electron,arusakov/electron,kcrt/electron,nekuz0r/electron,gamedevsam/electron,cqqccqc/electron,jhen0409/electron,ervinb/electron,faizalpribadi/electron,edulan/electron,shockone/electron,mhkeller/electron,trigrass2/electron,simonfork/electron,kcrt/electron,sky7sea/electron,bpasero/electron,destan/electron,Evercoder/electron,xiruibing/electron,the-ress/electron,kazupon/electron,deepak1556/atom-shell,MaxWhere/electron,ankitaggarwal011/electron,trigrass2/electron,JussMee15/electron,vaginessa/electron,takashi/electron,trigrass2/electron,Jonekee/electron,mubassirhayat/electron,webmechanicx/electron,tomashanacek/electron,bwiggs/electron,pandoraui/electron,jaanus/electron,nekuz0r/electron,xiruibing/electron,tonyganch/electron,gerhardberger/electron,rhencke/electron,gabriel/electron,renaesop/electron,smczk/electron,simongregory/electron,sshiting/electron,Rokt33r/electron,greyhwndz/electron,chrisswk/electron,shennushi/electron,SufianHassan/electron,michaelchiche/electron,shiftkey/electron,carsonmcdonald/electron,coderhaoxin/electron,pandoraui/electron,JesselJohn/electron,Neron-X5/electron,adamjgray/electron,takashi/electron,rprichard/electron,Andrey-Pavlov/electron,iftekeriba/electron,xiruibing/electron,iftekeriba/electron,smczk/electron,leolujuyi/electron,minggo/electron,soulteary/electron,bruce/electron,fomojola/electron,wan-qy/electron,stevekinney/electron,shiftkey/electron,setzer777/electron,jacksondc/electron,gerhardberger/electron,Floato/electron,timruffles/electron,trigrass2/electron,robinvandernoord/electron,seanchas116/electron,arturts/electron,soulteary/electron,jiaz/electron,DivyaKMenon/electron,jonatasfreitasv/electron,joaomoreno/atom-shell,Gerhut/electron,davazp/electron,jjz/electron,John-Lin/electron,miniak/electron,eriser/electron,mattotodd/electron,jlord/electron,setzer777/electron,deepak1556/atom-shell,sircharleswatson/electron,thompsonemerson/electron,chrisswk/electron,medixdev/electron,arturts/electron,timruffles/electron,felixrieseberg/electron,bitemyapp/electron,IonicaBizauKitchen/electron,pombredanne/electron,Jonekee/electron,simongregory/electron,Gerhut/electron,nicobot/electron,anko/electron,deed02392/electron,zhakui/electron,vaginessa/electron,xfstudio/electron,vaginessa/electron,micalan/electron,meowlab/electron,the-ress/electron,minggo/electron,aaron-goshine/electron,twolfson/electron,aichingm/electron,JesselJohn/electron,JesselJohn/electron,stevekinney/electron,meowlab/electron,jhen0409/electron,nagyistoce/electron-atom-shell,Zagorakiss/electron,micalan/electron,leolujuyi/electron,shiftkey/electron,sky7sea/electron,neutrous/electron,thompsonemerson/electron,tonyganch/electron,jannishuebl/electron,pirafrank/electron,Jacobichou/electron,systembugtj/electron,christian-bromann/electron,fritx/electron,Zagorakiss/electron,sshiting/electron,vHanda/electron,felixrieseberg/electron,gstack/infinium-shell,xfstudio/electron,JussMee15/electron,Jonekee/electron,eriser/electron,Ivshti/electron,jlord/electron,ianscrivener/electron,gbn972/electron,kikong/electron,joaomoreno/atom-shell,oiledCode/electron,BionicClick/electron,webmechanicx/electron,nekuz0r/electron,Jonekee/electron,gabriel/electron,mjaniszew/electron,egoist/electron,bruce/electron,darwin/electron,destan/electron,DivyaKMenon/electron,nicholasess/electron,shockone/electron,dahal/electron,lzpfmh/electron,davazp/electron,simongregory/electron,zhakui/electron,kazupon/electron,sshiting/electron,roadev/electron,d-salas/electron,seanchas116/electron,jaanus/electron,neutrous/electron,Faiz7412/electron,vaginessa/electron,posix4e/electron,fffej/electron,micalan/electron,twolfson/electron,aliib/electron,aichingm/electron,thomsonreuters/electron,RobertJGabriel/electron,stevemao/electron,chriskdon/electron,gabrielPeart/electron,mhkeller/electron,kostia/electron,kcrt/electron,nicholasess/electron,aichingm/electron,etiktin/electron,leolujuyi/electron,voidbridge/electron,thompsonemerson/electron,saronwei/electron,beni55/electron,RIAEvangelist/electron,faizalpribadi/electron,rsvip/electron,sshiting/electron,Gerhut/electron,synaptek/electron,electron/electron,kazupon/electron,RobertJGabriel/electron,bobwol/electron,thomsonreuters/electron,Jacobichou/electron,mrwizard82d1/electron,Jacobichou/electron,gabrielPeart/electron,rreimann/electron,arturts/electron,brave/electron,fffej/electron,Andrey-Pavlov/electron,ankitaggarwal011/electron,mubassirhayat/electron,coderhaoxin/electron,robinvandernoord/electron,joneit/electron,voidbridge/electron,meowlab/electron,matiasinsaurralde/electron,brenca/electron,GoooIce/electron,cos2004/electron,bitemyapp/electron,digideskio/electron,lzpfmh/electron,michaelchiche/electron,Gerhut/electron,rreimann/electron,dahal/electron,thomsonreuters/electron,brave/muon,ianscrivener/electron,aaron-goshine/electron,darwin/electron,d-salas/electron,kikong/electron,trankmichael/electron,sky7sea/electron,LadyNaggaga/electron,jlhbaseball15/electron,timruffles/electron,brave/muon,ankitaggarwal011/electron,Andrey-Pavlov/electron,rsvip/electron,dongjoon-hyun/electron,mhkeller/electron,stevekinney/electron,vaginessa/electron,kostia/electron,jiaz/electron,Gerhut/electron,bwiggs/electron,nicobot/electron,wolfflow/electron,timruffles/electron,ankitaggarwal011/electron,yalexx/electron,carsonmcdonald/electron,jsutcodes/electron,hokein/atom-shell,kenmozi/electron,leftstick/electron,GoooIce/electron,stevekinney/electron,mjaniszew/electron,anko/electron,pandoraui/electron,aichingm/electron,Rokt33r/electron,kcrt/electron,eriser/electron,sky7sea/electron,LadyNaggaga/electron,sshiting/electron,joneit/electron,aecca/electron,sircharleswatson/electron,synaptek/electron,kcrt/electron,setzer777/electron,gamedevsam/electron,MaxWhere/electron,fffej/electron,preco21/electron,ianscrivener/electron,leethomas/electron,carsonmcdonald/electron,bright-sparks/electron,RobertJGabriel/electron,electron/electron,the-ress/electron,subblue/electron,shaundunne/electron,tincan24/electron,fffej/electron,DivyaKMenon/electron,synaptek/electron,kokdemo/electron,nagyistoce/electron-atom-shell,eric-seekas/electron,mrwizard82d1/electron,Andrey-Pavlov/electron,gabrielPeart/electron,etiktin/electron,rsvip/electron,fomojola/electron,thompsonemerson/electron,eric-seekas/electron,aecca/electron,jtburke/electron,howmuchcomputer/electron,howmuchcomputer/electron,faizalpribadi/electron,bwiggs/electron,ervinb/electron,brave/electron,adcentury/electron,jannishuebl/electron,mattotodd/electron,sircharleswatson/electron,the-ress/electron,chriskdon/electron,renaesop/electron,lrlna/electron,Neron-X5/electron,wan-qy/electron,baiwyc119/electron,kokdemo/electron,setzer777/electron,bobwol/electron,joaomoreno/atom-shell,ervinb/electron,twolfson/electron,shaundunne/electron,hokein/atom-shell,Neron-X5/electron,kenmozi/electron,jlhbaseball15/electron,bright-sparks/electron,brave/muon,hokein/atom-shell,smczk/electron,jlord/electron,bpasero/electron,meowlab/electron,fireball-x/atom-shell,Jacobichou/electron,mattdesl/electron,edulan/electron,howmuchcomputer/electron,evgenyzinoviev/electron,sircharleswatson/electron,chrisswk/electron,SufianHassan/electron,bbondy/electron,lrlna/electron,medixdev/electron,brave/electron,hokein/atom-shell,ervinb/electron,aaron-goshine/electron,GoooIce/electron,aaron-goshine/electron,jiaz/electron,tincan24/electron,Neron-X5/electron,maxogden/atom-shell,gabriel/electron,adamjgray/electron,roadev/electron,kokdemo/electron,tomashanacek/electron,rajatsingla28/electron,mhkeller/electron,webmechanicx/electron,Floato/electron,rajatsingla28/electron,saronwei/electron,nicobot/electron,systembugtj/electron,jhen0409/electron,brave/muon,coderhaoxin/electron,egoist/electron,mjaniszew/electron,wolfflow/electron,minggo/electron,vipulroxx/electron,christian-bromann/electron,iftekeriba/electron,LadyNaggaga/electron,aecca/electron,icattlecoder/electron,subblue/electron,SufianHassan/electron,tinydew4/electron,meowlab/electron,subblue/electron,mubassirhayat/electron,kokdemo/electron,jannishuebl/electron,RIAEvangelist/electron,arusakov/electron,cqqccqc/electron,darwin/electron,tomashanacek/electron,gamedevsam/electron,deepak1556/atom-shell,bitemyapp/electron,natgolov/electron,ervinb/electron,John-Lin/electron,maxogden/atom-shell,farmisen/electron,IonicaBizauKitchen/electron,adcentury/electron,thompsonemerson/electron,yan-foto/electron,joneit/electron,medixdev/electron,astoilkov/electron,dahal/electron,BionicClick/electron,jsutcodes/electron,d-salas/electron,kostia/electron,mattdesl/electron,oiledCode/electron,noikiy/electron,aaron-goshine/electron,RobertJGabriel/electron,bpasero/electron,subblue/electron,gabriel/electron,twolfson/electron,d-salas/electron,renaesop/electron,smczk/electron,stevemao/electron,eric-seekas/electron,kostia/electron,mirrh/electron,trigrass2/electron,jtburke/electron,aecca/electron,fritx/electron,kazupon/electron,deed02392/electron,jannishuebl/electron,pirafrank/electron,bruce/electron,deed02392/electron,biblerule/UMCTelnetHub,trigrass2/electron,abhishekgahlot/electron,brenca/electron,iftekeriba/electron,cos2004/electron,tinydew4/electron,felixrieseberg/electron,thomsonreuters/electron,trankmichael/electron,simonfork/electron,joneit/electron,RIAEvangelist/electron,greyhwndz/electron,baiwyc119/electron,Rokt33r/electron,medixdev/electron,aliib/electron,Ivshti/electron,cos2004/electron,rhencke/electron,dkfiresky/electron,micalan/electron,xiruibing/electron,shaundunne/electron,tincan24/electron,deed02392/electron,jlhbaseball15/electron,synaptek/electron,carsonmcdonald/electron,baiwyc119/electron,adamjgray/electron,thompsonemerson/electron,benweissmann/electron,jjz/electron,posix4e/electron,jonatasfreitasv/electron,jlhbaseball15/electron,brenca/electron,cqqccqc/electron,JesselJohn/electron,bpasero/electron,rhencke/electron,joaomoreno/atom-shell,Floato/electron,jsutcodes/electron,subblue/electron,Andrey-Pavlov/electron,gerhardberger/electron,aichingm/electron,pandoraui/electron,cqqccqc/electron,fabien-d/electron,jsutcodes/electron,preco21/electron,pombredanne/electron,medixdev/electron,maxogden/atom-shell,brave/muon,bbondy/electron,tincan24/electron,evgenyzinoviev/electron,arusakov/electron,jacksondc/electron,digideskio/electron,rajatsingla28/electron,simongregory/electron,jcblw/electron,benweissmann/electron,rprichard/electron,posix4e/electron,coderhaoxin/electron,miniak/electron,anko/electron,tonyganch/electron,mrwizard82d1/electron,dahal/electron,bwiggs/electron,ervinb/electron,pirafrank/electron,adcentury/electron,darwin/electron,MaxWhere/electron,miniak/electron,tylergibson/electron,egoist/electron,mrwizard82d1/electron,thingsinjars/electron,vHanda/electron,fffej/electron,fritx/electron,minggo/electron,eric-seekas/electron,kostia/electron,vipulroxx/electron,stevekinney/electron,BionicClick/electron,egoist/electron,jaanus/electron,lzpfmh/electron,jonatasfreitasv/electron,wan-qy/electron,pombredanne/electron,saronwei/electron,micalan/electron,leolujuyi/electron,DivyaKMenon/electron,jtburke/electron,oiledCode/electron,farmisen/electron,eriser/electron,leolujuyi/electron,gerhardberger/electron,MaxGraey/electron,vHanda/electron
--- +++ @@ -5,7 +5,7 @@ NODE_VERSION = 'v0.11.13' BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent' -LIBCHROMIUMCONTENT_COMMIT = 'afb4570ceee2ad10f3caf5a81335a2ee11ec68a5' +LIBCHROMIUMCONTENT_COMMIT = 'ea1a7e85a3de1878e5656110c76f4d2d8af41c6e' ARCH = { 'cygwin': '32bit',
f20156beb47f860646f31b46ff69879e190d220d
scripts/postbuild.py
scripts/postbuild.py
#!/usr/bin/python3 import sys import jenkins from firebase import firebase JENKINS_URL = '' # Enter Jenkins URL like http://localhost:8080 JENKINS_USERNAME = '' # Enter available Jenkins username JENKINS_APITOKEN = '' # Enter Jenkins API token (or password if Jenkins < 1.5) FIREBASE_DSN = '' # Enter your firebase domain FIREBASE_INVALID_CHARSET = '.$#[]/' if __name__ == "__main__": build_number = int(sys.argv[1]) job_name = sys.argv[2] server = jenkins.Jenkins(JENKINS_URL, username=JENKINS_USERNAME, password=JENKINS_APITOKEN) build_info = server.get_build_info(job_name, build_number) firebase = firebase.FirebaseApplication(FIREBASE_DSN) # Remove invalid character for firebase firebase_job_name = job_name for ic in FIREBASE_INVALID_CHARSET: if ic in firebase_job_name: firebase_job_name = firebase_job_name.replace(ic, '') # Post new job result to firebase data = {'result': build_info['result'], 'timestamp': build_info['timestamp']} firebase.put('/job/' + firebase_job_name, build_number, data)
#!/usr/bin/python3 import sys import jenkins from firebase import firebase JENKINS_URL = '' # Enter Jenkins URL like http://localhost:8080 JENKINS_USERNAME = '' # Enter available Jenkins username JENKINS_APITOKEN = '' # Enter Jenkins API token (or password if Jenkins < 1.5) FIREBASE_DSN = '' # Enter your firebase domain FIREBASE_INVALID_CHARSET = '.$#[]/' if __name__ == "__main__": build_number = int(sys.argv[1]) job_name = sys.argv[2] server = jenkins.Jenkins(JENKINS_URL, username=JENKINS_USERNAME, password=JENKINS_APITOKEN) build_info = server.get_build_info(job_name, build_number) console_output = server.get_build_console_output(job_name, build_number) firebase = firebase.FirebaseApplication(FIREBASE_DSN) # Remove invalid character for firebase firebase_job_name = job_name for ic in FIREBASE_INVALID_CHARSET: if ic in firebase_job_name: firebase_job_name = firebase_job_name.replace(ic, '') # Post new job result to firebase data = {'result': build_info['result'], 'timestamp': build_info['timestamp']} firebase.put('/job/' + firebase_job_name, build_number, data) # Post new job console output to firebase data = {'output': console_output} firebase.put('/job_console/' + firebase_job_name, build_number, data)
Add job console output to firebase
Add job console output to firebase
Python
mpl-2.0
MDTsai/webcompat-system-addon-autotest
--- +++ @@ -16,6 +16,7 @@ server = jenkins.Jenkins(JENKINS_URL, username=JENKINS_USERNAME, password=JENKINS_APITOKEN) build_info = server.get_build_info(job_name, build_number) + console_output = server.get_build_console_output(job_name, build_number) firebase = firebase.FirebaseApplication(FIREBASE_DSN) @@ -28,3 +29,7 @@ # Post new job result to firebase data = {'result': build_info['result'], 'timestamp': build_info['timestamp']} firebase.put('/job/' + firebase_job_name, build_number, data) + + # Post new job console output to firebase + data = {'output': console_output} + firebase.put('/job_console/' + firebase_job_name, build_number, data)
374bd4881e00c2605f28ea816fa94468a76f2621
jps/utils.py
jps/utils.py
import json from .publisher import Publisher from .common import DEFAULT_PUB_PORT from .common import DEFAULT_HOST from .env import get_master_host class JsonMultiplePublisher(object): '''publish multiple topics by one json message Example: >>> p = JsonMultiplePublisher() >>> p.publish('{"topic1": 1.0, "topic2": {"x": 0.1}}') ''' def __init__(self, host=get_master_host(), pub_port=DEFAULT_PUB_PORT): self._pub = Publisher('*', host=host, pub_port=pub_port) def publish(self, json_msg): ''' json_msg = '{"topic1": 1.0, "topic2": {"x": 0.1}}' ''' pyobj = json.loads(json_msg) for topic, value in pyobj.items(): msg = '{topic} {data}'.format(topic=topic, data=json.dumps(value)) self._pub.publish(msg)
import json from .publisher import Publisher from .common import DEFAULT_PUB_PORT from .common import DEFAULT_HOST from .env import get_master_host class JsonMultiplePublisher(object): '''publish multiple topics by one json message Example: >>> p = JsonMultiplePublisher() >>> p.publish('{"topic1": 1.0, "topic2": {"x": 0.1}}') ''' def __init__(self, host=get_master_host(), pub_port=DEFAULT_PUB_PORT): self._pub = Publisher('*', host=host, pub_port=pub_port) def publish(self, json_msg): ''' json_msg = '{"topic1": 1.0, "topic2": {"x": 0.1}}' ''' pyobj = json.loads(json_msg) for topic, value in pyobj.items(): msg = '{topic} {data}'.format(topic=topic, data=json.dumps(value)) self._pub.publish(msg) class MultiplePublisher(object): def __init__(self, base_topic_name): self._publishers = {} self._base_topic_name = base_topic_name def publish(self, msg, topic_suffix=''): if topic_suffix not in self._publishers: self._publishers[topic_suffix] = Publisher(self._base_topic_name + topic_suffix) self._publishers[topic_suffix].publish(msg)
Add MultiplePublisher to handle topic name suffix
Add MultiplePublisher to handle topic name suffix
Python
apache-2.0
OTL/jps
--- +++ @@ -27,3 +27,14 @@ for topic, value in pyobj.items(): msg = '{topic} {data}'.format(topic=topic, data=json.dumps(value)) self._pub.publish(msg) + + +class MultiplePublisher(object): + def __init__(self, base_topic_name): + self._publishers = {} + self._base_topic_name = base_topic_name + + def publish(self, msg, topic_suffix=''): + if topic_suffix not in self._publishers: + self._publishers[topic_suffix] = Publisher(self._base_topic_name + topic_suffix) + self._publishers[topic_suffix].publish(msg)
48dc53c5c92a49fca959a962434bee6c2810a0c8
tensorboard_plugin_wit/pip_package/setup.py
tensorboard_plugin_wit/pip_package/setup.py
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import setuptools setuptools.setup( name="tensorboard_plugin_wit", version="1.6.0", description="What-If Tool TensorBoard plugin.", packages=setuptools.find_packages(), package_data={ "tensorboard_plugin_wit": ["static/**"], }, entry_points={ "tensorboard_plugins": [ "wit = tensorboard_plugin_wit.wit_plugin_loader:WhatIfToolPluginLoader", ], }, )
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import setuptools setuptools.setup( name="tensorboard_plugin_wit", version="1.6.0", description="What-If Tool TensorBoard plugin.", packages=setuptools.find_packages(), license='Apache 2.0', package_data={ "tensorboard_plugin_wit": ["static/**"], }, entry_points={ "tensorboard_plugins": [ "wit = tensorboard_plugin_wit.wit_plugin_loader:WhatIfToolPluginLoader", ], }, )
Add missing license to pip package
Add missing license to pip package
Python
apache-2.0
PAIR-code/what-if-tool,PAIR-code/what-if-tool,PAIR-code/what-if-tool,pair-code/what-if-tool,pair-code/what-if-tool,PAIR-code/what-if-tool,pair-code/what-if-tool,pair-code/what-if-tool,PAIR-code/what-if-tool,pair-code/what-if-tool
--- +++ @@ -25,6 +25,7 @@ version="1.6.0", description="What-If Tool TensorBoard plugin.", packages=setuptools.find_packages(), + license='Apache 2.0', package_data={ "tensorboard_plugin_wit": ["static/**"], },
937a5e32c77ca57917d60a891616fbcf19ab19f9
respite/utils.py
respite/utils.py
from django import forms def generate_form(model): """ Generate a form from a model. Arguments: model -- A Django model. """ _model = model class Form(forms.ModelForm): class Meta: model = _model return Form def parse_http_accept_header(header): """ Return a list of content types listed in the HTTP Accept header ordered by quality. Arguments: header -- A string describing the contents of the HTTP Accept header. """ components = header.split(',') l = [] for component in components: if ';' in component: subcomponents = component.split(';') l.append( ( subcomponents[0], # eg. 'text/html' subcomponents[1][2:] # eg. 'q=0.9' ) ) else: l.append((component, '1')) l.sort( key = lambda i: i[1], reverse = True ) content_types = [] for i in l: content_types.append(i[0]) return content_types
from django import forms def generate_form(model): """ Generate a form from a model. Arguments: model -- A Django model. """ _model = model class Form(forms.ModelForm): class Meta: model = _model return Form def parse_http_accept_header(header): """ Return a list of content types listed in the HTTP Accept header ordered by quality. Arguments: header -- A string describing the contents of the HTTP Accept header. """ components = [item.strip() for item in header.split(',')] l = [] for component in components: if ';' in component: subcomponents = [item.strip() for item in component.split(';')] l.append( ( subcomponents[0], # eg. 'text/html' subcomponents[1][2:] # eg. 'q=0.9' ) ) else: l.append((component, '1')) l.sort( key = lambda i: i[1], reverse = True ) content_types = [] for i in l: content_types.append(i[0]) return content_types
Fix a bug that caused HTTP Accept headers with whitespace to be parsed incorrectly
Fix a bug that caused HTTP Accept headers with whitespace to be parsed incorrectly
Python
mit
jgorset/django-respite,jgorset/django-respite,jgorset/django-respite
--- +++ @@ -21,12 +21,12 @@ Arguments: header -- A string describing the contents of the HTTP Accept header. """ - components = header.split(',') + components = [item.strip() for item in header.split(',')] l = [] for component in components: if ';' in component: - subcomponents = component.split(';') + subcomponents = [item.strip() for item in component.split(';')] l.append( ( subcomponents[0], # eg. 'text/html'
7c3ac5adc33d2048f28a96d8145e71a4c12518cc
udata/__init__.py
udata/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- ''' uData ''' from __future__ import unicode_literals __version__ = '1.5.1.dev' __description__ = 'Open data portal'
#!/usr/bin/env python # -*- coding: utf-8 -*- ''' uData ''' from __future__ import unicode_literals __version__ = '1.6.0.dev' __description__ = 'Open data portal'
Set base version to 1.6
Set base version to 1.6
Python
agpl-3.0
etalab/udata,opendatateam/udata,opendatateam/udata,opendatateam/udata,etalab/udata,etalab/udata
--- +++ @@ -5,5 +5,5 @@ ''' from __future__ import unicode_literals -__version__ = '1.5.1.dev' +__version__ = '1.6.0.dev' __description__ = 'Open data portal'
a021f279341eb15c17597200d4bbe97a98034c54
fate_flow/examples/test_inference.py
fate_flow/examples/test_inference.py
# -*-coding:utf8 -*- import json import requests import time import uuid import datetime import time ids = ["18576635456", "13512345432"] url1 = "http://172.16.153.71:8059/federation/1.0/inference" for i in range(2): request_data_tmp = { "head": { "serviceId": "test_model_service", "applyId": "209090900991", }, "body": { "featureData": { "phone_num": ids[i], }, "sendToRemoteFeatureData": { "device_type": "imei", "phone_num": ids[i], "encrypt_type": "raw" } } } headers = {"Content-Type": "application/json"} response = requests.post(url1, json=request_data_tmp, headers=headers) print("url地址:", url1) print("请求信息:\n", request_data_tmp) print() print("响应信息:\n", response.text) print() #time.sleep(0.1)
# -*-coding:utf8 -*- import json import requests import time import uuid import datetime import time ids = ["18576635456", "13512345432"] url1 = "http://127.0.0.1:8059/federation/1.0/inference" for i in range(2): request_data_tmp = { "head": { "serviceId": "test_model_service", "applyId": "209090900991", }, "body": { "featureData": { "phone_num": ids[i], }, "sendToRemoteFeatureData": { "device_type": "imei", "phone_num": ids[i], "encrypt_type": "raw" } } } headers = {"Content-Type": "application/json"} response = requests.post(url1, json=request_data_tmp, headers=headers) print("url地址:", url1) print("请求信息:\n", request_data_tmp) print() print("响应信息:\n", response.text) print() #time.sleep(0.1)
Remove sensitive information from the example
Remove sensitive information from the example
Python
apache-2.0
FederatedAI/FATE,FederatedAI/FATE,FederatedAI/FATE
--- +++ @@ -8,7 +8,7 @@ ids = ["18576635456", "13512345432"] -url1 = "http://172.16.153.71:8059/federation/1.0/inference" +url1 = "http://127.0.0.1:8059/federation/1.0/inference" for i in range(2): request_data_tmp = {
c9392a6578b0894dff7a5407410e8892e9f3ae6d
win_unc/validators.py
win_unc/validators.py
from win_unc.sanitizors import sanitize_username, sanitize_unc_path def is_valid_drive_letter(string): """ Drive letters are one character in length and between "A" and "Z". Case does not matter. """ return (len(string) == 1 and string[0].isalpha()) def is_valid_unc_path(string): """ Valid UNC paths are at least three characters long, begin with "\\", do not start or end with whitepsace, and do not contain certain invalid characters (see `sanitize_unc_path`). """ return (len(string) > 2 and string.startswith('\\\\') and string == string.strip() and string == sanitize_unc_path(string)) def is_valid_username(string): """ A valid Windows username (logon) is a non-empty string that does not start or end with whitespace, and does not contain certain invalid characters (see `sanitize_username`). """ return (len(string) > 0 and string == string.strip() and string == sanitize_username(string))
from win_unc.internal.utils import take_while from win_unc.sanitizors import sanitize_username, sanitize_unc_path def is_valid_drive_letter(string): """ Drive letters are one character in length and between "A" and "Z". Case does not matter. """ return (len(string) == 1 and string[0].isalpha()) def is_valid_unc_path(string): """ Valid UNC paths are at least three characters long, begin with exactly two backslashes, not start or end with whitepsace, and do not contain certain invalid characters (see `sanitize_unc_path`). """ return (len(string) > 2 and len(take_while(lambda c: c == '\\', string)) == 2 and string == string.strip() and string == sanitize_unc_path(string)) def is_valid_username(string): """ A valid Windows username (logon) is a non-empty string that does not start or end with whitespace, and does not contain certain invalid characters (see `sanitize_username`). """ return (len(string) > 0 and string == string.strip() and string == sanitize_username(string))
Fix bad refactor of is_valid_unc_path
Fix bad refactor of is_valid_unc_path
Python
mit
nithinphilips/py_win_unc,CovenantEyes/py_win_unc
--- +++ @@ -1,3 +1,4 @@ +from win_unc.internal.utils import take_while from win_unc.sanitizors import sanitize_username, sanitize_unc_path @@ -11,11 +12,12 @@ def is_valid_unc_path(string): """ - Valid UNC paths are at least three characters long, begin with "\\", do not start or end with - whitepsace, and do not contain certain invalid characters (see `sanitize_unc_path`). + Valid UNC paths are at least three characters long, begin with exactly two backslashes, not + start or end with whitepsace, and do not contain certain invalid characters + (see `sanitize_unc_path`). """ return (len(string) > 2 - and string.startswith('\\\\') + and len(take_while(lambda c: c == '\\', string)) == 2 and string == string.strip() and string == sanitize_unc_path(string))
1d13bd71ff105d540c3af166056cb0b8731a3417
wooey/migrations/0037_populate-jsonfield.py
wooey/migrations/0037_populate-jsonfield.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.13 on 2018-03-04 23:14 from __future__ import unicode_literals import json from django.db import migrations def populate_default(apps, schema_editor): ScriptParameter = apps.get_model('wooey', 'ScriptParameter') for obj in ScriptParameter.objects.all(): try: obj.default = json.loads(obj._default) except Exception: obj.default = obj._default obj.save() class Migration(migrations.Migration): dependencies = [ ('wooey', '0036_add-jsonfield'), ] operations = [ migrations.RunPython(populate_default) ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.13 on 2018-03-04 23:14 from __future__ import unicode_literals import json from django.db import migrations def populate_default(apps, schema_editor): ScriptParameter = apps.get_model('wooey', 'ScriptParameter') for obj in ScriptParameter.objects.all(): try: obj.default = json.loads(obj._default) except Exception: obj.default = obj._default obj.save() def reverse_populate_default(apps, schema_editor): ScriptParameter = apps.get_model('wooey', 'ScriptParameter') for obj in ScriptParameter.objects.all(): obj._default = json.dumps(obj.default) obj.save() class Migration(migrations.Migration): dependencies = [ ('wooey', '0036_add-jsonfield'), ] operations = [ migrations.RunPython(populate_default, reverse_populate_default) ]
Add reverse to data migration
Add reverse to data migration
Python
bsd-3-clause
wooey/Wooey,wooey/Wooey,wooey/Wooey,wooey/Wooey
--- +++ @@ -16,6 +16,13 @@ obj.save() +def reverse_populate_default(apps, schema_editor): + ScriptParameter = apps.get_model('wooey', 'ScriptParameter') + for obj in ScriptParameter.objects.all(): + obj._default = json.dumps(obj.default) + obj.save() + + class Migration(migrations.Migration): dependencies = [ @@ -23,5 +30,5 @@ ] operations = [ - migrations.RunPython(populate_default) + migrations.RunPython(populate_default, reverse_populate_default) ]
fea2c0bc02a8323ad6c759ca63663499a538186e
onnx/__init__.py
onnx/__init__.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from .onnx_ml_pb2 import * # noqa from .version import version as __version__ # noqa import sys def load(obj): ''' Loads a binary protobuf that stores onnx graph @params Takes a file-like object (has "read" function) or a string containing a file name @return ONNX ModelProto object ''' model = ModelProto() if hasattr(obj, 'read') and callable(obj.read): model.ParseFromString(obj.read()) else: with open(obj, 'rb') as f: model.ParseFromString(f.read()) return model
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from .onnx_ml_pb2 import * # noqa from .version import version as __version__ # noqa # Import common subpackages so they're available when you 'import onnx' import onnx.helper # noqa import onnx.checker # noqa import onnx.defs # noqa import sys def load(obj): ''' Loads a binary protobuf that stores onnx graph @params Takes a file-like object (has "read" function) or a string containing a file name @return ONNX ModelProto object ''' model = ModelProto() if hasattr(obj, 'read') and callable(obj.read): model.ParseFromString(obj.read()) else: with open(obj, 'rb') as f: model.ParseFromString(f.read()) return model
Undo BC-breaking change, restore 'import onnx' providing submodules.
Undo BC-breaking change, restore 'import onnx' providing submodules. Signed-off-by: Edward Z. Yang <dbd597f5635f432486c5d365e9bb585b3eaa1853@fb.com>
Python
apache-2.0
onnx/onnx,onnx/onnx,onnx/onnx,onnx/onnx
--- +++ @@ -5,6 +5,11 @@ from .onnx_ml_pb2 import * # noqa from .version import version as __version__ # noqa + +# Import common subpackages so they're available when you 'import onnx' +import onnx.helper # noqa +import onnx.checker # noqa +import onnx.defs # noqa import sys
81faa7704fb355dd16674d4ed089e0ced34c24c6
rflo/start.py
rflo/start.py
import ioflo.app.run import os class Manager(object): ''' Manage the main ioflo process ''' def __init__(self): self.behaviors = ['rflo.config', 'rflo.roads'] self.floscript = os.path.join(os.path.dirname(__file__), 'raft.flo') def start(self): ioflo.app.run.start( name='rflo', period=0.01, stamp=0.0, filepath=self.floscript, behaviors=self.behaviors, verbose=2, )
import ioflo.app.run import os class Manager(object): ''' Manage the main ioflo process ''' def __init__(self): self.behaviors = ['rflo.config', 'rflo.roads', 'rflo.router'] self.floscript = os.path.join(os.path.dirname(__file__), 'raft.flo') def start(self): ioflo.app.run.start( name='rflo', period=0.01, stamp=0.0, filepath=self.floscript, behaviors=self.behaviors, verbose=2, )
Add router to the behaviors lookup
Add router to the behaviors lookup
Python
apache-2.0
thatch45/rflo
--- +++ @@ -7,7 +7,7 @@ Manage the main ioflo process ''' def __init__(self): - self.behaviors = ['rflo.config', 'rflo.roads'] + self.behaviors = ['rflo.config', 'rflo.roads', 'rflo.router'] self.floscript = os.path.join(os.path.dirname(__file__), 'raft.flo') def start(self):
28181d9bcf7aa597b88507871ffb31f4028eb67c
src/pretix/testutils/settings.py
src/pretix/testutils/settings.py
import atexit import os import tempfile tmpdir = tempfile.TemporaryDirectory() os.environ.setdefault('DATA_DIR', tmpdir.name) from pretix.settings import * # NOQA DATA_DIR = tmpdir.name LOG_DIR = os.path.join(DATA_DIR, 'logs') MEDIA_ROOT = os.path.join(DATA_DIR, 'media') atexit.register(tmpdir.cleanup) EMAIL_BACKEND = 'django.core.mail.outbox' COMPRESS_ENABLED = COMPRESS_OFFLINE = False PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher'] # Disable celery CELERY_ALWAYS_EAGER = True HAS_CELERY = False # Don't use redis SESSION_ENGINE = "django.contrib.sessions.backends.db" HAS_REDIS = False CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } }
import atexit import os import tempfile tmpdir = tempfile.TemporaryDirectory() os.environ.setdefault('DATA_DIR', tmpdir.name) from pretix.settings import * # NOQA DATA_DIR = tmpdir.name LOG_DIR = os.path.join(DATA_DIR, 'logs') MEDIA_ROOT = os.path.join(DATA_DIR, 'media') atexit.register(tmpdir.cleanup) EMAIL_BACKEND = 'django.core.mail.outbox' COMPRESS_ENABLED = COMPRESS_OFFLINE = False DEBUG = True PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher'] # Disable celery CELERY_ALWAYS_EAGER = True HAS_CELERY = False # Don't use redis SESSION_ENGINE = "django.contrib.sessions.backends.db" HAS_REDIS = False CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } }
Enable DEBUG when running the test suite
Enable DEBUG when running the test suite
Python
apache-2.0
Flamacue/pretix,Flamacue/pretix,Flamacue/pretix,Flamacue/pretix
--- +++ @@ -17,6 +17,8 @@ COMPRESS_ENABLED = COMPRESS_OFFLINE = False +DEBUG = True + PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher'] # Disable celery
c1efaefbe6a098e3f74bed20ff55a4307ec90ddd
dynamic_requirements.py
dynamic_requirements.py
install_requires = [ 'Dockets>=0.3.4,<0.4.0', 'boto>=2.26.0', ] test_requires = [ 'nose>=1.3.0,<2.0.0', 'mock>=1.0.0,<2.0.0', 'redis>=2.10.0,<3.0.0', 'moto>=0.4.1', ]
install_requires = [ 'Dockets>=0.4.0,<0.5.0', 'boto>=2.26.0', ] test_requires = [ 'nose>=1.3.0,<2.0.0', 'mock>=1.0.0,<2.0.0', 'redis>=2.10.0,<3.0.0', 'moto>=0.4.1', ]
Use new version of Dockets
Use new version of Dockets
Python
mit
gamechanger/deferrable
--- +++ @@ -1,5 +1,5 @@ install_requires = [ - 'Dockets>=0.3.4,<0.4.0', + 'Dockets>=0.4.0,<0.5.0', 'boto>=2.26.0', ]
da1a0f5e7ffcbe37cdee484b452b5376049dd1e5
python/django.py
python/django.py
""" Django middleware that enables the MDK. This is old-style (Django <1.10) middleware. Please see https://docs.djangoproject.com/en/1.10/topics/http/middleware/#upgrading-middleware if you're using Django 1.10. """ import atexit from traceback import format_exception_only from mdk import start class MDKSessionMiddleware(object): """ Add an MDK session to the Django request, as well as circuit breaker support. The request object will get a ``mdk_session`` attribute added to it. """ def __init__(self): self.mdk = start() atexit.register(self.mdk.stop) def process_request(self, request): request.mdk_session = self.mdk.join( request.META.get("HTTP_X_MDK_CONTEXT")) request.mdk_session.start_interaction() def process_response(self, request, response): request.mdk_session.finish_interaction() del request.mdk_session return response def process_exception(self, request, exception): request.mdk_session.fail_interaction( "".join(format_exception_only(exception.__class__, exception)))
""" Django middleware that enables the MDK. """ import atexit from traceback import format_exception_only from mdk import start # Django 1.10 new-style middleware compatibility: try: from django.utils.deprecation import MiddlewareMixin except ImportError: MiddlewareMixin = object class MDKSessionMiddleware(MiddlewareMixin): """ Add an MDK session to the Django request, as well as circuit breaker support. The request object will get a ``mdk_session`` attribute added to it. """ def __init__(self, *args, **kwargs): MiddlewareMixin.__init__(self, *args, **kwargs) self.mdk = start() atexit.register(self.mdk.stop) def process_request(self, request): request.mdk_session = self.mdk.join( request.META.get("HTTP_X_MDK_CONTEXT")) request.mdk_session.start_interaction() def process_response(self, request, response): request.mdk_session.finish_interaction() del request.mdk_session return response def process_exception(self, request, exception): request.mdk_session.fail_interaction( "".join(format_exception_only(exception.__class__, exception)))
Work with Django 1.10 as well.
Work with Django 1.10 as well.
Python
apache-2.0
datawire/mdk,datawire/mdk,datawire/mdk,datawire/mdk
--- +++ @@ -1,9 +1,5 @@ """ Django middleware that enables the MDK. - -This is old-style (Django <1.10) middleware. Please see -https://docs.djangoproject.com/en/1.10/topics/http/middleware/#upgrading-middleware -if you're using Django 1.10. """ import atexit @@ -11,15 +7,22 @@ from mdk import start +# Django 1.10 new-style middleware compatibility: +try: + from django.utils.deprecation import MiddlewareMixin +except ImportError: + MiddlewareMixin = object -class MDKSessionMiddleware(object): + +class MDKSessionMiddleware(MiddlewareMixin): """ Add an MDK session to the Django request, as well as circuit breaker support. The request object will get a ``mdk_session`` attribute added to it. """ - def __init__(self): + def __init__(self, *args, **kwargs): + MiddlewareMixin.__init__(self, *args, **kwargs) self.mdk = start() atexit.register(self.mdk.stop)
a3357cd4bb0859f480fa91f50604a2f129431096
eduid_signup/vccs.py
eduid_signup/vccs.py
from pwgen import pwgen from re import findall import vccs_client def generate_password(settings, credential_id, email): """ Generate a new password credential and add it to the VCCS authentication backend. The salt returned needs to be saved for use in subsequent authentications using this password. The password is returned so that it can be conveyed to the user. :param settings: settings dict :param credential_id: VCCS credential_id as string :param email: user e-mail address as string :return: (password, salt) both strings """ password = pwgen(settings.get('password_length'), no_symbols = True) factor = vccs_client.VCCSPasswordFactor(password, credential_id = credential_id) vccs = vccs_client.VCCSClient(base_url = settings.get('vccs_url')) vccs.add_credentials(email, [factor]) return (_human_readable(password), factor.salt) def _human_readable(password): """ Format a random password more readable to humans (groups of four characters). :param password: string :return: readable password as string :rtype: string """ regexp = '.{,4}' parts = findall(regexp, password) return ' '.join(parts)
from pwgen import pwgen from re import findall import vccs_client def generate_password(settings, credential_id, email): """ Generate a new password credential and add it to the VCCS authentication backend. The salt returned needs to be saved for use in subsequent authentications using this password. The password is returned so that it can be conveyed to the user. :param settings: settings dict :param credential_id: VCCS credential_id as string :param email: user e-mail address as string :return: (password, salt) both strings """ password = pwgen(settings.get('password_length'), no_capitalize = True, no_symbols = True) factor = vccs_client.VCCSPasswordFactor(password, credential_id = credential_id) vccs = vccs_client.VCCSClient(base_url = settings.get('vccs_url')) vccs.add_credentials(email, [factor]) return (_human_readable(password), factor.salt) def _human_readable(password): """ Format a random password more readable to humans (groups of four characters). :param password: string :return: readable password as string :rtype: string """ regexp = '.{,4}' parts = findall(regexp, password) return ' '.join(parts)
Exclude upper case letters from generated passwords.
Exclude upper case letters from generated passwords. 12 character passwords from the set a-z0-9 have more bits of entropy (62) than 10 character passwords from the set a-zA-Z0-9 (60), and are probably perceived as nicer by the users too (less ambiguity, easier to type on smartphones and the like).
Python
bsd-3-clause
SUNET/eduid-signup,SUNET/eduid-signup,SUNET/eduid-signup
--- +++ @@ -16,7 +16,7 @@ :param email: user e-mail address as string :return: (password, salt) both strings """ - password = pwgen(settings.get('password_length'), no_symbols = True) + password = pwgen(settings.get('password_length'), no_capitalize = True, no_symbols = True) factor = vccs_client.VCCSPasswordFactor(password, credential_id = credential_id) vccs = vccs_client.VCCSClient(base_url = settings.get('vccs_url'))
b03ed6307bd1354b931cdd993361d0a40a1d6850
api/init/graphqlapi/proxy.py
api/init/graphqlapi/proxy.py
import graphqlapi.utils as utils from graphql.parser import GraphQLParser from graphqlapi.interceptor import ExecuteBatch, TestDataSource from graphqlapi.exceptions import RequestException interceptors = [ ExecuteBatch(), TestDataSource() ] def proxy_request(payload: dict): graphql_ast = parse_query(payload['query']) # Execute request on GraphQL API status, data = utils.execute_graphql_request(payload['query']) for interceptor in interceptors: if interceptor.can_handle(graphql_ast): data = interceptor.after_request(graphql_ast, status, data) return 200 if status == 200 else 500, data def parse_query(payload_query: str): try: return GraphQLParser().parse(payload_query) except Exception: raise RequestException(400, 'Invalid GraphQL query')
import graphqlapi.utils as utils from graphqlapi.exceptions import RequestException from graphqlapi.interceptor import ExecuteBatch, TestDataSource from graphql.parser import GraphQLParser interceptors = [ ExecuteBatch(), TestDataSource() ] def proxy_request(payload: dict): graphql_ast = parse_query(payload['query']) # Execute request on GraphQL API status, data = utils.execute_graphql_request(payload['query']) for interceptor in interceptors: if interceptor.can_handle(graphql_ast): data = interceptor.after_request(graphql_ast, status, data) return 200 if status == 200 else 500, data def parse_query(payload_query: str): try: return GraphQLParser().parse(payload_query) except Exception: raise RequestException(400, 'Invalid GraphQL query')
Reorder imports in alphabetical order
Reorder imports in alphabetical order
Python
apache-2.0
alexisrolland/data-quality,alexisrolland/data-quality,alexisrolland/data-quality,alexisrolland/data-quality
--- +++ @@ -1,7 +1,7 @@ import graphqlapi.utils as utils +from graphqlapi.exceptions import RequestException +from graphqlapi.interceptor import ExecuteBatch, TestDataSource from graphql.parser import GraphQLParser -from graphqlapi.interceptor import ExecuteBatch, TestDataSource -from graphqlapi.exceptions import RequestException interceptors = [
f95aa5b36a354fe3cfd94b43d8f0f6346ec400de
soapypower/threadpool.py
soapypower/threadpool.py
import os, queue, concurrent.futures class ThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor): """ThreadPoolExecutor which allows setting max. work queue size""" def __init__(self, max_workers=0, thread_name_prefix='', max_queue_size=0): super().__init__(max_workers or os.cpu_count() or 1, thread_name_prefix) self.max_queue_size = max_queue_size or self._max_workers * 10 if self.max_queue_size > 0: self._work_queue = queue.Queue(self.max_queue_size) self.max_queue_size_reached = 0 def submit(self, fn, *args, **kwargs): """Submits a callable to be executed with the given arguments. Count maximum reached work queue size in ThreadPoolExecutor.max_queue_size_reached. """ future = super().submit(fn, *args, **kwargs) work_queue_size = self._work_queue.qsize() if work_queue_size > self.max_queue_size_reached: self.max_queue_size_reached = work_queue_size return future
import os, queue, concurrent.futures class ThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor): """ThreadPoolExecutor which allows setting max. work queue size""" def __init__(self, max_workers=0, thread_name_prefix='', max_queue_size=0): #super().__init__(max_workers or os.cpu_count() or 1, thread_name_prefix) super().__init__(max_workers or os.cpu_count() or 1) self.max_queue_size = max_queue_size or self._max_workers * 10 if self.max_queue_size > 0: self._work_queue = queue.Queue(self.max_queue_size) self.max_queue_size_reached = 0 def submit(self, fn, *args, **kwargs): """Submits a callable to be executed with the given arguments. Count maximum reached work queue size in ThreadPoolExecutor.max_queue_size_reached. """ future = super().submit(fn, *args, **kwargs) work_queue_size = self._work_queue.qsize() if work_queue_size > self.max_queue_size_reached: self.max_queue_size_reached = work_queue_size return future
Disable setting thread_name_prefix in ThreadPoolExecutor (only supported in Python >= 3.6)
Disable setting thread_name_prefix in ThreadPoolExecutor (only supported in Python >= 3.6)
Python
mit
xmikos/soapy_power,xmikos/soapy_power
--- +++ @@ -4,7 +4,8 @@ class ThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor): """ThreadPoolExecutor which allows setting max. work queue size""" def __init__(self, max_workers=0, thread_name_prefix='', max_queue_size=0): - super().__init__(max_workers or os.cpu_count() or 1, thread_name_prefix) + #super().__init__(max_workers or os.cpu_count() or 1, thread_name_prefix) + super().__init__(max_workers or os.cpu_count() or 1) self.max_queue_size = max_queue_size or self._max_workers * 10 if self.max_queue_size > 0: self._work_queue = queue.Queue(self.max_queue_size)
aed8a831bca72268ad9fbcd2f777d91af29d61b6
message_view.py
message_view.py
import sublime import sublime_plugin PANEL_NAME = "SublimeLinter Messages" def plugin_unloaded(): for window in sublime.windows(): window.destroy_output_panel(PANEL_NAME) class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand): def run(self, msg=""): panel_view = self.window.create_output_panel(PANEL_NAME, True) panel_view.set_read_only(False) panel_view.run_command('append', {'characters': msg}) panel_view.set_read_only(True) panel_view.show(0) self.window.run_command("show_panel", {"panel": "output.{}".format(PANEL_NAME)}) class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand): def run(self): self.window.destroy_output_panel(PANEL_NAME)
import sublime import sublime_plugin PANEL_NAME = "SublimeLinter Messages" OUTPUT_PANEL = "output." + PANEL_NAME def plugin_unloaded(): for window in sublime.windows(): window.destroy_output_panel(PANEL_NAME) class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand): def run(self, msg=""): panel_view = self.window.create_output_panel(PANEL_NAME, True) panel_view.set_read_only(False) panel_view.run_command('append', {'characters': msg}) panel_view.set_read_only(True) panel_view.show(0) self.window.run_command("show_panel", {"panel": OUTPUT_PANEL}) class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand): def run(self): self.window.destroy_output_panel(PANEL_NAME)
Define const `OUTPUT_PANEL` for the panel name
Define const `OUTPUT_PANEL` for the panel name
Python
mit
SublimeLinter/SublimeLinter3,SublimeLinter/SublimeLinter3
--- +++ @@ -2,6 +2,7 @@ import sublime_plugin PANEL_NAME = "SublimeLinter Messages" +OUTPUT_PANEL = "output." + PANEL_NAME def plugin_unloaded(): @@ -16,7 +17,7 @@ panel_view.run_command('append', {'characters': msg}) panel_view.set_read_only(True) panel_view.show(0) - self.window.run_command("show_panel", {"panel": "output.{}".format(PANEL_NAME)}) + self.window.run_command("show_panel", {"panel": OUTPUT_PANEL}) class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
958bb725cce490ecf5d9f2052e739d2b1fe84b3d
interface/backend/images/factories.py
interface/backend/images/factories.py
import factory import factory.fuzzy from backend.images import models class ImageSeriesFactory(factory.django.DjangoModelFactory): class Meta: model = models.ImageSeries patient_id = factory.Sequence(lambda n: "TEST-SERIES-%04d" % n) series_instance_uid = factory.Sequence(lambda n: "1.3.6.1.4.1.14519.5.2.1.6279.6001.%030d" % n) uri = factory.LazyAttribute(lambda f: 'file:///tmp/%s/' % f.series_instance_uid) class ImageLocationFactory(factory.django.DjangoModelFactory): class Meta: model = models.ImageLocation series = factory.LazyAttribute(lambda f: f.factory_parent.case.series) x = factory.fuzzy.FuzzyInteger(0, 511) y = factory.fuzzy.FuzzyInteger(0, 511) z = factory.fuzzy.FuzzyInteger(0, 63)
import factory import factory.fuzzy from backend.images import models class ImageSeriesFactory(factory.django.DjangoModelFactory): class Meta: model = models.ImageSeries patient_id = factory.Sequence(lambda n: "TEST-SERIES-%04d" % n) series_instance_uid = factory.Sequence(lambda n: "1.3.6.1.4.1.14519.5.2.1.6279.6001.%030d" % n) uri = factory.LazyAttribute(lambda f: 'file:///tmp/%s/' % f.series_instance_uid) class ImageLocationFactory(factory.django.DjangoModelFactory): class Meta: model = models.ImageLocation series = factory.LazyAttribute(lambda f: f.factory_parent.case.series) x = factory.fuzzy.FuzzyInteger(0, 256) y = factory.fuzzy.FuzzyInteger(0, 256) z = factory.fuzzy.FuzzyInteger(0, 16)
Make centroid factory locations a little more plausible
Make centroid factory locations a little more plausible
Python
mit
vessemer/concept-to-clinic,vessemer/concept-to-clinic,vessemer/concept-to-clinic,vessemer/concept-to-clinic
--- +++ @@ -20,8 +20,8 @@ series = factory.LazyAttribute(lambda f: f.factory_parent.case.series) - x = factory.fuzzy.FuzzyInteger(0, 511) + x = factory.fuzzy.FuzzyInteger(0, 256) - y = factory.fuzzy.FuzzyInteger(0, 511) + y = factory.fuzzy.FuzzyInteger(0, 256) - z = factory.fuzzy.FuzzyInteger(0, 63) + z = factory.fuzzy.FuzzyInteger(0, 16)
3fe0f73d9c9ca177cefd61636f10be77aa1261d0
autoentrepreneur/forms.py
autoentrepreneur/forms.py
from django.forms import ModelForm from django import forms from django.utils.translation import ugettext_lazy as _ from autoentrepreneur.models import UserProfile, AUTOENTREPRENEUR_ACTIVITY, \ AUTOENTREPRENEUR_PAYMENT_OPTION class UserProfileForm(ModelForm): company_name = forms.CharField(required=False, max_length=255, label=_('Company name')) company_id = forms.CharField(max_length=50, label=_('Company id')) # SIRET for France bank_information = forms.CharField(required=False, max_length=255, label=_('Bank information')) activity = forms.ChoiceField(choices=AUTOENTREPRENEUR_ACTIVITY, label=_('Activity')) creation_date = forms.DateField(label=_('Creation date')) creation_help = forms.BooleanField(required=False, label=_('Creation help')) # accre freeing_tax_payment = forms.BooleanField(required=False, label=_('Freeing tax payment')) # versement liberatoire payment_option = forms.ChoiceField(choices=AUTOENTREPRENEUR_PAYMENT_OPTION, label=_('Payment option')) class Meta: model = UserProfile exclude = ['user', 'address']
from django.forms import ModelForm from django import forms from django.utils.translation import ugettext_lazy as _ from autoentrepreneur.models import UserProfile, AUTOENTREPRENEUR_ACTIVITY, \ AUTOENTREPRENEUR_PAYMENT_OPTION class UserProfileForm(ModelForm): company_name = forms.CharField(required=False, max_length=255, label=_('Company name')) company_id = forms.CharField(max_length=50, label=_('Company id')) # SIRET for France activity = forms.ChoiceField(choices=AUTOENTREPRENEUR_ACTIVITY, label=_('Activity')) creation_date = forms.DateField(label=_('Creation date')) creation_help = forms.BooleanField(required=False, label=_('Creation help')) # accre freeing_tax_payment = forms.BooleanField(required=False, label=_('Freeing tax payment')) # versement liberatoire payment_option = forms.ChoiceField(choices=AUTOENTREPRENEUR_PAYMENT_OPTION, label=_('Payment option')) class Meta: model = UserProfile exclude = ['user', 'address']
Remove bank information from form.
Remove bank information from form.
Python
agpl-3.0
fgaudin/aemanager,fgaudin/aemanager,fgaudin/aemanager
--- +++ @@ -7,7 +7,6 @@ class UserProfileForm(ModelForm): company_name = forms.CharField(required=False, max_length=255, label=_('Company name')) company_id = forms.CharField(max_length=50, label=_('Company id')) # SIRET for France - bank_information = forms.CharField(required=False, max_length=255, label=_('Bank information')) activity = forms.ChoiceField(choices=AUTOENTREPRENEUR_ACTIVITY, label=_('Activity')) creation_date = forms.DateField(label=_('Creation date')) creation_help = forms.BooleanField(required=False, label=_('Creation help')) # accre
382a3f673c4d74f6ba2b8c4ccff9332445b76093
chmvh_website/resources/models.py
chmvh_website/resources/models.py
from django.db import models class Category(models.Model): """A category of resources.""" important = models.BooleanField( default=False, help_text=('categories marked important will be shown at the top of ', 'the resource list'), verbose_name='important') title = models.CharField( max_length=100, unique=True, verbose_name='title') class Meta: ordering = ('title',) verbose_name_plural = 'categories' def __str__(self): """Return the category's title""" return self.title class Resource(models.Model): """A resource containing various information.""" address = models.TextField( blank=True, verbose_name='address') category = models.ForeignKey( to='Category', verbose_name='resource category') description = models.TextField( blank=True, verbose_name='description') email = models.EmailField( blank=True, verbose_name='email address') phone = models.CharField( blank=True, max_length=50, verbose_name='phone number') title = models.CharField( max_length=100, unique=True, verbose_name='title') url = models.URLField( blank=True, verbose_name='website URL') class Meta: ordering = ('title',) def __str__(self): """Return the resource's title""" return self.title
from django.db import models class Category(models.Model): """A category of resources.""" important = models.BooleanField( default=False, help_text=('Categories marked important will be shown at the top of ' 'the resource list'), verbose_name='important') title = models.CharField( max_length=100, unique=True, verbose_name='title') class Meta: ordering = ('title',) verbose_name_plural = 'categories' def __str__(self): """Return the category's title""" return self.title class Resource(models.Model): """A resource containing various information.""" address = models.TextField( blank=True, verbose_name='address') category = models.ForeignKey( to='Category', verbose_name='resource category') description = models.TextField( blank=True, verbose_name='description') email = models.EmailField( blank=True, verbose_name='email address') phone = models.CharField( blank=True, max_length=50, verbose_name='phone number') title = models.CharField( max_length=100, unique=True, verbose_name='title') url = models.URLField( blank=True, verbose_name='website URL') class Meta: ordering = ('title',) def __str__(self): """Return the resource's title""" return self.title
Fix resource category help text.
Fix resource category help text. Fixes #15
Python
mit
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
--- +++ @@ -5,7 +5,7 @@ """A category of resources.""" important = models.BooleanField( default=False, - help_text=('categories marked important will be shown at the top of ', + help_text=('Categories marked important will be shown at the top of ' 'the resource list'), verbose_name='important') title = models.CharField(
969b2d322174392a85f6fa6fc92160cb18144594
bulbs/content/serializers.py
bulbs/content/serializers.py
from django import forms from django.contrib.auth.models import User from rest_framework import serializers from .models import Content, Tag class TagSerializer(serializers.ModelSerializer): class Meta: model = Tag class UserSerializer(serializers.ModelSerializer): class Meta: model = User exclude = ('password',) class SimpleAuthorSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id', 'first_name', 'last_name') class ContentSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( view_name='content-detail', lookup_field='pk' ) class Meta: model = Content exclude = ('polymorphic_ctype',) class ContentSerializerReadOnly(ContentSerializer): tags = TagSerializer(many=True, required=False) authors = SimpleAuthorSerializer(many=True, required=False) class PolymorphicContentSerializerMixin(object): def to_native(self, value): if hasattr(value, 'get_serializer_class'): ThisSerializer = value.get_serializer_class() else: class ThisSerializer(serializers.ModelSerializer): class Meta: model = value.__class__ serializer = ThisSerializer(context=self.context) return serializer.to_native(value) class PolymorphicContentSerializer(ContentSerializer, PolymorphicContentSerializerMixin): pass class PolymorphicContentSerializerReadOnly(ContentSerializerReadOnly, PolymorphicContentSerializerMixin): pass
from django import forms from django.contrib.auth.models import User from rest_framework import serializers from .models import Content, Tag class TagSerializer(serializers.ModelSerializer): class Meta: model = Tag class UserSerializer(serializers.ModelSerializer): class Meta: model = User exclude = ('password',) class SimpleAuthorSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id', 'first_name', 'last_name') class ContentSerializer(serializers.ModelSerializer): url = serializers.HyperlinkedIdentityField( view_name='content-detail', lookup_field='pk' ) tags = serializers.PrimaryKeyRelatedField(many=True, required=False) authors = serializers.PrimaryKeyRelatedField(many=True, required=False) class Meta: model = Content exclude = ('polymorphic_ctype',) class ContentSerializerReadOnly(ContentSerializer): tags = TagSerializer(many=True, required=False) authors = SimpleAuthorSerializer(many=True, required=False) class PolymorphicContentSerializerMixin(object): def to_native(self, value): if hasattr(value, 'get_serializer_class'): ThisSerializer = value.get_serializer_class() else: class ThisSerializer(serializers.ModelSerializer): class Meta: model = value.__class__ serializer = ThisSerializer(context=self.context) return serializer.to_native(value) class PolymorphicContentSerializer(ContentSerializer, PolymorphicContentSerializerMixin): pass class PolymorphicContentSerializerReadOnly(ContentSerializerReadOnly, PolymorphicContentSerializerMixin): pass
Allow for empty tags and authors on `ContentSerializer`
Allow for empty tags and authors on `ContentSerializer`
Python
mit
theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,pombredanne/django-bulbs,pombredanne/django-bulbs,theonion/django-bulbs
--- +++ @@ -27,6 +27,8 @@ view_name='content-detail', lookup_field='pk' ) + tags = serializers.PrimaryKeyRelatedField(many=True, required=False) + authors = serializers.PrimaryKeyRelatedField(many=True, required=False) class Meta: model = Content
a56c744af8c1a96631d990c2a398aedcd24a78de
rtsp2snapshot.py
rtsp2snapshot.py
#!/usr/bin/env python import io import platform import shlex import subprocess from flask import Flask, request, send_file, send_from_directory app = Flask(__name__) tmp_dir = '/tmp/' tmp_filename = 'snapshot.jpg' @app.route('/<path:url>') def snapshot(url): freebsd_platform = platform.system() == 'FreeBSD' if request.query_string: url += '?' + request.query_string # TODO: Sanitize interpolated string cmd = 'ffmpeg -rtsp_transport tcp -i "rtsp://%s" -hide_banner -loglevel quiet -ss 00:00:01.500 -f image2 -vframes 1 -y ' % (url,) if freebsd_platform: cmd += tmp_dir + tmp_filename else: cmd += '-' p = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) p.wait() image = p.stdout.read() if freebsd_platform: return send_from_directory(tmp_dir, tmp_filename) return send_file(io.BytesIO(image), attachment_filename='snapshot.jpg', mimetype='image/jpeg') if __name__ == '__main__': app.run(debug=True, port=5000)
#!/usr/bin/env python import io import platform import shlex import subprocess from flask import Flask, request, send_file, send_from_directory app = Flask(__name__) tmp_dir = '/tmp/' tmp_filename = 'snapshot.jpg' @app.route('/<path:url>') def snapshot(url): freebsd_platform = platform.system() == 'FreeBSD' if request.query_string: url += '?' + request.query_string # TODO: Sanitize interpolated string cmd = 'ffmpeg -rtsp_transport tcp -i "rtsp://%s" -hide_banner -loglevel quiet -ss 00:00:01.500 -f image2 -vframes 1 -y ' % (url,) if freebsd_platform: cmd += tmp_dir + tmp_filename else: cmd += '-' p = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) p.wait() image = p.stdout.read() if freebsd_platform: return send_from_directory(tmp_dir, tmp_filename, mimetype='image/jpeg') return send_file(io.BytesIO(image), attachment_filename='snapshot.jpg', mimetype='image/jpeg') if __name__ == '__main__': app.run(debug=True, port=5000)
Set the response's mimetype. Motion cannot process HTTP responses without an image mimetype.
Set the response's mimetype. Motion cannot process HTTP responses without an image mimetype.
Python
bsd-3-clause
flebel/rtsp2snapshot
--- +++ @@ -27,7 +27,9 @@ p.wait() image = p.stdout.read() if freebsd_platform: - return send_from_directory(tmp_dir, tmp_filename) + return send_from_directory(tmp_dir, + tmp_filename, + mimetype='image/jpeg') return send_file(io.BytesIO(image), attachment_filename='snapshot.jpg', mimetype='image/jpeg')
e0ddd80ea2d23f9b5fc32dd8a5ea13f9cb30da49
app/packages/__init__.py
app/packages/__init__.py
from flask import Blueprint packages = Blueprint('packages', __name__) from . import views, models from utils import github_data def post_get_single(result=None, **kw): result.update(result.pop("get_json")) result.update(github_data(result['name'], result['author'], result['url'])) # runs for search request def post_get_many(result=None, search_params=None, **kw): for item in result["objects"]: item.update(item.pop("get_json")) def api_creator(apimanager): apimanager.create_api(models.Package, primary_key='name', methods=['GET'], include_methods=['get_json'], include_columns=[], postprocessors={ 'GET_SINGLE': [post_get_single], 'GET_MANY': [post_get_many] })
from flask import Blueprint packages = Blueprint('packages', __name__) from . import views, models from utils import github_data def post_get_single(result=None, **kw): result.update(result.pop("get_json")) result.update(github_data(result['name'], result['author'], result['url'])) # runs for search request def post_get_many(result=None, search_params=None, **kw): for item in result["objects"]: item.update(item.pop("get_json")) def search_filter(search_params=None, **kw): if (search_params is None) or search_params.get("name") is None: return def filter_string(name): filter = [] filter.append(dict(name='name', val='%' + name + '%', op='like' ) ) filter.append(dict(name="keywords__name", val=name, op="any" )) return filter search_params['filters'] = [] args = search_params['name'].split() for item in args: search_params['filters'].extend(filter_string(item)) search_params['disjunction'] = True def api_creator(apimanager): apimanager.create_api(models.Package, primary_key='name', methods=['GET'], include_methods=['get_json'], include_columns=[], postprocessors={ 'GET_SINGLE': [post_get_single], 'GET_MANY': [post_get_many] }) apimanager.create_api(models.Package, primary_key='name', collection_name='search', methods=['GET'], include_methods=['get_json'], include_columns=[], preprocessors={ 'GET_MANY': [search_filter] })
Add api for package search based on name and keywords
Add api for package search based on name and keywords
Python
bsd-2-clause
NikhilKalige/atom-website,NikhilKalige/atom-website,NikhilKalige/atom-website
--- +++ @@ -18,6 +18,31 @@ item.update(item.pop("get_json")) +def search_filter(search_params=None, **kw): + if (search_params is None) or search_params.get("name") is None: + return + + def filter_string(name): + filter = [] + filter.append(dict(name='name', + val='%' + name + '%', + op='like' + ) + ) + filter.append(dict(name="keywords__name", + val=name, + op="any" + )) + return filter + + search_params['filters'] = [] + args = search_params['name'].split() + for item in args: + search_params['filters'].extend(filter_string(item)) + + search_params['disjunction'] = True + + def api_creator(apimanager): apimanager.create_api(models.Package, primary_key='name', methods=['GET'], include_methods=['get_json'], @@ -26,3 +51,11 @@ 'GET_SINGLE': [post_get_single], 'GET_MANY': [post_get_many] }) + apimanager.create_api(models.Package, primary_key='name', + collection_name='search', + methods=['GET'], + include_methods=['get_json'], + include_columns=[], + preprocessors={ + 'GET_MANY': [search_filter] + })
9bfe2dbd37fa18ed7915e82dc8dc8515d7fe9a76
alfred_collector/__main__.py
alfred_collector/__main__.py
import argparse import yaml from .process import CollectorProcess def get_config(path): with open(path) as file: return yaml.load(file) def main(): parser = argparse.ArgumentParser() parser.add_argument('config') args = parser.parse_args() config = get_config(args.config) processes = [] database_uri = config['database_uri'] for socket_address in config['collectors']: process = CollectorProcess(database_uri, socket_address) process.start() processes.append(process) for process in processes: process.join() if __name__ == '__main__': main()
import argparse import signal import yaml from functools import partial from .process import CollectorProcess def get_config(path): with open(path) as file: return yaml.load(file) def terminate_processes(processes, signum, frame): for process in processes: if process is not None and process.is_alive(): process.terminate() process.join() def main(): parser = argparse.ArgumentParser() parser.add_argument('config') args = parser.parse_args() config = get_config(args.config) processes = [] database_uri = config['database_uri'] for socket_address in config['collectors']: process = CollectorProcess(database_uri, socket_address) process.start() processes.append(process) signal.signal(signal.SIGTERM, partial(terminate_processes, processes)) for process in processes: process.join() if __name__ == '__main__': main()
Terminate child processes on SIGTERM signal
Terminate child processes on SIGTERM signal
Python
isc
alfredhq/alfred-collector
--- +++ @@ -1,11 +1,20 @@ import argparse +import signal import yaml +from functools import partial from .process import CollectorProcess def get_config(path): with open(path) as file: return yaml.load(file) + + +def terminate_processes(processes, signum, frame): + for process in processes: + if process is not None and process.is_alive(): + process.terminate() + process.join() def main(): @@ -22,6 +31,8 @@ process.start() processes.append(process) + signal.signal(signal.SIGTERM, partial(terminate_processes, processes)) + for process in processes: process.join()
692fe65ca9d24286d10e542c5028924a22036362
tests/test_models.py
tests/test_models.py
import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == u'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != ''
# encoding: utf-8 from __future__ import unicode_literals import pytest import turbasen @pytest.fixture def configure_dev(): turbasen.configure(ENDPOINT_URL='http://dev.nasjonalturbase.no/') @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_get(configure_dev): sted = turbasen.Sted.get('52407fb375049e561500004e') assert sted.navn == u'Tjørnbrotbu' assert sted.ssr_id == 382116 @pytest.mark.skipif(turbasen.settings.Settings.API_KEY is None, reason="API key not set") def test_lookup(configure_dev): results = turbasen.Sted.lookup(pages=2) result_list = list(results) assert len(result_list) == turbasen.settings.Settings.LIMIT * 2 assert result_list[0].object_id != ''
Add encoding and unicode literals import
Add encoding and unicode literals import
Python
mit
Turbasen/turbasen.py
--- +++ @@ -1,3 +1,6 @@ +# encoding: utf-8 +from __future__ import unicode_literals + import pytest import turbasen
75080e6f0da4f699ef1eb89310847befeccfab40
skimage/filter/tests/test_filter_import.py
skimage/filter/tests/test_filter_import.py
from skimage._shared.utils import all_warnings, skimage_deprecation from numpy.testing import assert_warns def import_filter(): from skimage import filter as F assert('sobel' in dir(F)) def test_filter_import(): with all_warnings(): assert_warns(skimage_deprecation, import_filter)
from numpy.testing import assert_warns from warnings import catch_warnings, simplefilter def test_import_filter(): with catch_warnings(): simplefilter('ignore') from skimage import filter as F assert('sobel' in dir(F))
Check for deprecation on import is problematic. Rather just check that filter can be imported normally.
Check for deprecation on import is problematic. Rather just check that filter can be imported normally.
Python
bsd-3-clause
michaelaye/scikit-image,warmspringwinds/scikit-image,juliusbierk/scikit-image,michaelpacer/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,oew1v07/scikit-image,chriscrosscutler/scikit-image,pratapvardhan/scikit-image,robintw/scikit-image,paalge/scikit-image,vighneshbirodkar/scikit-image,youprofit/scikit-image,newville/scikit-image,blink1073/scikit-image,youprofit/scikit-image,keflavich/scikit-image,keflavich/scikit-image,oew1v07/scikit-image,paalge/scikit-image,michaelpacer/scikit-image,juliusbierk/scikit-image,bsipocz/scikit-image,Britefury/scikit-image,robintw/scikit-image,chriscrosscutler/scikit-image,WarrenWeckesser/scikits-image,jwiggins/scikit-image,pratapvardhan/scikit-image,ClinicalGraphics/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,ofgulban/scikit-image,WarrenWeckesser/scikits-image,paalge/scikit-image,GaZ3ll3/scikit-image,vighneshbirodkar/scikit-image,emon10005/scikit-image,Midafi/scikit-image,rjeli/scikit-image,bsipocz/scikit-image,Britefury/scikit-image,jwiggins/scikit-image,ofgulban/scikit-image,warmspringwinds/scikit-image,newville/scikit-image,dpshelio/scikit-image,ajaybhat/scikit-image,Midafi/scikit-image,bennlich/scikit-image,emon10005/scikit-image,blink1073/scikit-image,rjeli/scikit-image,GaZ3ll3/scikit-image,michaelaye/scikit-image,bennlich/scikit-image,Hiyorimi/scikit-image,dpshelio/scikit-image,Hiyorimi/scikit-image,ClinicalGraphics/scikit-image
--- +++ @@ -1,10 +1,9 @@ -from skimage._shared.utils import all_warnings, skimage_deprecation from numpy.testing import assert_warns +from warnings import catch_warnings, simplefilter -def import_filter(): - from skimage import filter as F +def test_import_filter(): + with catch_warnings(): + simplefilter('ignore') + from skimage import filter as F + assert('sobel' in dir(F)) - -def test_filter_import(): - with all_warnings(): - assert_warns(skimage_deprecation, import_filter)
b8ecb1e86fcbbda0f92314c90fb319c2c50fcf94
uchicagohvz/production_settings.py
uchicagohvz/production_settings.py
from local_settings import * settings.DEBUG = False ALLOWED_HOSTS = ['uchicagohvz.org'] # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'uchicagohvz', # Or path to database file if using sqlite3. 'USER': 'user', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # REST framework settings REST_FRAMEWORK = { 'DEFAULT_RENDERER_CLASSES': ( 'rest_framework.renderers.JSONRenderer', ) } # Mandrill email settings EMAIL_HOST = 'smtp.mandrillapp.com' from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD EMAIL_PORT = '587' EMAIL_USE_TLS = True
from local_settings import * DEBUG = False ALLOWED_HOSTS = ['uchicagohvz.org'] # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'uchicagohvz', # Or path to database file if using sqlite3. 'USER': 'user', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # REST framework settings REST_FRAMEWORK = { 'DEFAULT_RENDERER_CLASSES': ( 'rest_framework.renderers.JSONRenderer', ) } # Mandrill email settings EMAIL_HOST = 'smtp.mandrillapp.com' from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD EMAIL_PORT = '587' EMAIL_USE_TLS = True
Set DEBUG = False in production
Set DEBUG = False in production
Python
mit
kz26/uchicago-hvz,kz26/uchicago-hvz,kz26/uchicago-hvz
--- +++ @@ -1,6 +1,6 @@ from local_settings import * -settings.DEBUG = False +DEBUG = False ALLOWED_HOSTS = ['uchicagohvz.org']
14a0738ec836bd3369984835797f5002813b270a
nilearn/_utils/__init__.py
nilearn/_utils/__init__.py
from niimg_conversions import is_a_niimg, _get_shape, _repr_niimgs, \ copy_niimg, check_niimg, concat_niimgs, check_niimgs from numpy_conversions import as_ndarray from cache_mixin import CacheMixin
from .niimg_conversions import is_a_niimg, _get_shape, _repr_niimgs, \ copy_niimg, check_niimg, concat_niimgs, check_niimgs from .numpy_conversions import as_ndarray from .cache_mixin import CacheMixin
Fix imports to local paths
Fix imports to local paths
Python
bsd-3-clause
abenicho/isvr
--- +++ @@ -1,8 +1,8 @@ -from niimg_conversions import is_a_niimg, _get_shape, _repr_niimgs, \ +from .niimg_conversions import is_a_niimg, _get_shape, _repr_niimgs, \ copy_niimg, check_niimg, concat_niimgs, check_niimgs -from numpy_conversions import as_ndarray +from .numpy_conversions import as_ndarray -from cache_mixin import CacheMixin +from .cache_mixin import CacheMixin
23a8df19e272bf4a48d59629976fc0cd4a1b83eb
Settings/German_Signal/ModelConfiguration.py
Settings/German_Signal/ModelConfiguration.py
""" Normally, this files contains all necessary code to execute successfully the solution of the problem but in this case (because this version is not stable) all code is in "TFModel_backup.py" file. """
""" Normally, this files contains all necessary code to execute successfully the solution of the problem but in this case (because this version is not stable) all code is in "TFModel_backup.py" file. """ # TODO Define Code """ TFBooster Code to solve problem """ setting_object = SettingsObject.Settings(Dictionary.string_settings_german_signal_path) path_train_and_test_images = [setting_object.train_path,setting_object.test_path] number_of_classes = 59 # Start in 0 percentages_sets = None # Example labels_set = [Dictionary.string_labels_type_option_hierarchy] is_an_unique_csv = False # If this variable is true, then only one CSV file will be passed and it will be treated like # trainSet, validationSet(if necessary) and testSet known_data_type = '' # Contains the type of data if the data file contains an unique type of data. Examples: # Number # or Chars. reader_features = tfr.ReaderFeatures(set_data_files = path_train_and_test_images,number_of_classes = number_of_classes, labels_set = labels_set, is_unique_csv = is_an_unique_csv,known_data_type = known_data_type, percentages_sets = percentages_sets) """ Creating Reader from ReaderFeatures """ tf_reader = tfr.Reader(reader_features = reader_features) # Reader Object with all information """ Getting train, validation (if necessary) and test set. """ test_set = tf_reader.test_set # Test Set train_set = tf_reader.train_set # Train Set del reader_features del tf_reader models = models.TFModels(input=train_set[0],test=test_set[0], input_labels=train_set[1],test_labels=test_set[1], number_of_classes=number_of_classes, setting_object=setting_object) models.convolution_model_image()
Add TFBooster Code to Solve "German signal" problem
Add TFBooster Code to Solve "German signal" problem
Python
apache-2.0
Gabvaztor/TFBoost
--- +++ @@ -3,3 +3,40 @@ but in this case (because this version is not stable) all code is in "TFModel_backup.py" file. """ +# TODO Define Code +""" +TFBooster Code to solve problem +""" +setting_object = SettingsObject.Settings(Dictionary.string_settings_german_signal_path) + +path_train_and_test_images = [setting_object.train_path,setting_object.test_path] +number_of_classes = 59 # Start in 0 +percentages_sets = None # Example +labels_set = [Dictionary.string_labels_type_option_hierarchy] +is_an_unique_csv = False # If this variable is true, then only one CSV file will be passed and it will be treated like +# trainSet, validationSet(if necessary) and testSet +known_data_type = '' # Contains the type of data if the data file contains an unique type of data. Examples: # Number +# or Chars. + +reader_features = tfr.ReaderFeatures(set_data_files = path_train_and_test_images,number_of_classes = number_of_classes, + labels_set = labels_set, + is_unique_csv = is_an_unique_csv,known_data_type = known_data_type, + percentages_sets = percentages_sets) + +""" +Creating Reader from ReaderFeatures +""" +tf_reader = tfr.Reader(reader_features = reader_features) # Reader Object with all information + +""" +Getting train, validation (if necessary) and test set. +""" +test_set = tf_reader.test_set # Test Set +train_set = tf_reader.train_set # Train Set +del reader_features +del tf_reader + +models = models.TFModels(input=train_set[0],test=test_set[0], + input_labels=train_set[1],test_labels=test_set[1], + number_of_classes=number_of_classes, setting_object=setting_object) +models.convolution_model_image()
532c201053ae271544270035423f690b4774794a
swimlane/core/fields/usergroup.py
swimlane/core/fields/usergroup.py
from .base import MultiSelectField from swimlane.core.resources.usergroup import UserGroup class UserGroupField(MultiSelectField): """Manages getting/setting users from record User/Group fields""" field_type = 'Core.Models.Fields.UserGroupField, Core' supported_types = [UserGroup] def cast_to_python(self, value): """Convert JSON definition to UserGroup object""" # v2.x does not provide a distinction between users and groups at the field selection level, can only return # UserGroup instances instead of specific User or Group instances if value is not None: value = UserGroup(self.record._swimlane, value) return value def cast_to_swimlane(self, value): """Dump UserGroup back to JSON representation""" if value is not None: value = value.get_usergroup_selection() return value
from .base import MultiSelectField from swimlane.core.resources.usergroup import UserGroup class UserGroupField(MultiSelectField): """Manages getting/setting users from record User/Group fields""" field_type = 'Core.Models.Fields.UserGroupField, Core' supported_types = [UserGroup] def set_swimlane(self, value): """Workaround for reports returning an empty usergroup field as a single element list with no id/name""" if value == [{"$type": "Core.Models.Utilities.UserGroupSelection, Core"}]: value = [] return super(UserGroupField, self).set_swimlane(value) def cast_to_python(self, value): """Convert JSON definition to UserGroup object""" # v2.x does not provide a distinction between users and groups at the field selection level, can only return # UserGroup instances instead of specific User or Group instances if value is not None: value = UserGroup(self.record._swimlane, value) return value def cast_to_swimlane(self, value): """Dump UserGroup back to JSON representation""" if value is not None: value = value.get_usergroup_selection() return value
Fix multiselect user/group field when retrieving results from a report
Fix multiselect user/group field when retrieving results from a report
Python
mit
Swimlane/sw-python-client
--- +++ @@ -8,6 +8,13 @@ field_type = 'Core.Models.Fields.UserGroupField, Core' supported_types = [UserGroup] + + def set_swimlane(self, value): + """Workaround for reports returning an empty usergroup field as a single element list with no id/name""" + if value == [{"$type": "Core.Models.Utilities.UserGroupSelection, Core"}]: + value = [] + + return super(UserGroupField, self).set_swimlane(value) def cast_to_python(self, value): """Convert JSON definition to UserGroup object"""
53e8c14d774131503dbdefe6528cd1e26adbf30b
azure_nosetests.py
azure_nosetests.py
#!/usr/bin/env python import os.path, nose, glob, sys packages = [os.path.dirname(p) for p in glob.glob('azure*/setup.py')] sys.path += packages nose.main()
#!/usr/bin/env python import os.path, nose, glob, sys, pkg_resources packages = [os.path.dirname(p) for p in glob.glob('azure*/setup.py')] sys.path += packages # Declare it manually, because "azure-storage" is probably installed with pip pkg_resources.declare_namespace('azure') nose.main()
Allow Travis to load tests and use azure-storage installed from pip at the same time
Allow Travis to load tests and use azure-storage installed from pip at the same time
Python
mit
Azure/azure-sdk-for-python,Azure/azure-sdk-for-python,v-iam/azure-sdk-for-python,Azure/azure-sdk-for-python,AutorestCI/azure-sdk-for-python,lmazuel/azure-sdk-for-python,Azure/azure-sdk-for-python,SUSE/azure-sdk-for-python
--- +++ @@ -1,7 +1,9 @@ #!/usr/bin/env python -import os.path, nose, glob, sys +import os.path, nose, glob, sys, pkg_resources packages = [os.path.dirname(p) for p in glob.glob('azure*/setup.py')] sys.path += packages +# Declare it manually, because "azure-storage" is probably installed with pip +pkg_resources.declare_namespace('azure') nose.main()
ed9294c7ab0abf574f076464274d83f1e39b53cd
paws/handler.py
paws/handler.py
from .request import Request from .response import response class Handler(object): ''' Simple dispatcher class. ''' def __init__(self, event, context): self.request = Request(event, context) def __call__(self, event, context): func = getattr(self, self.event['httpMethod'], self.invalid) return func(self.request, *self.event['pathParameters']) def invalid(self, *args): return response(status=405)
from .request import Request from .response import response, Response import logging log = logging.getLogger() class Handler(object): ''' Simple dispatcher class. ''' def __init__(self, event, context): self.request = Request(event, context) def __call__(self, event, context): func = getattr(self, self.event['httpMethod'].lower(), self.invalid) try: resp = func(self.request, *self.event['pathParameters']) except Exception: import traceback log.error(self) log.error(traceback.format_exc()) return response(body='Internal server Error', status=500) if isinstance(resp, Response): resp = resp.render() return resp def __str__(self): return "<Request: {%s} %s (%r)" % ( self.request.method, self.request.path, self.requeste.params, ) def invalid(self, *args): # XXX Build list of valid methods? return response(status=405)
Handle and log exceptions. Render Response objects.
Handle and log exceptions. Render Response objects.
Python
bsd-3-clause
funkybob/paws
--- +++ @@ -1,5 +1,8 @@ from .request import Request -from .response import response +from .response import response, Response + +import logging +log = logging.getLogger() class Handler(object): @@ -10,8 +13,25 @@ self.request = Request(event, context) def __call__(self, event, context): - func = getattr(self, self.event['httpMethod'], self.invalid) - return func(self.request, *self.event['pathParameters']) + func = getattr(self, self.event['httpMethod'].lower(), self.invalid) + try: + resp = func(self.request, *self.event['pathParameters']) + except Exception: + import traceback + log.error(self) + log.error(traceback.format_exc()) + return response(body='Internal server Error', status=500) + if isinstance(resp, Response): + resp = resp.render() + return resp + + def __str__(self): + return "<Request: {%s} %s (%r)" % ( + self.request.method, + self.request.path, + self.requeste.params, + ) def invalid(self, *args): + # XXX Build list of valid methods? return response(status=405)
b5afdd604831f985427880537d37eb7a35addaa1
tests/functional/test_python_option.py
tests/functional/test_python_option.py
import json import os from pathlib import Path from venv import EnvBuilder from tests.lib import PipTestEnvironment, TestData def test_python_interpreter( script: PipTestEnvironment, tmpdir: Path, shared_data: TestData, ) -> None: env_path = os.fsdecode(tmpdir / "venv") env = EnvBuilder(with_pip=False) env.create(env_path) result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == [] script.pip( "--python", env_path, "install", "-f", shared_data.find_links, "--no-index", "simplewheel==1.0", ) result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == [{"name": "simplewheel", "version": "1.0"}] script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == []
import json import os from pathlib import Path from venv import EnvBuilder from tests.lib import PipTestEnvironment, TestData def test_python_interpreter( script: PipTestEnvironment, tmpdir: Path, shared_data: TestData, ) -> None: env_path = os.fspath(tmpdir / "venv") env = EnvBuilder(with_pip=False) env.create(env_path) result = script.pip("--python", env_path, "list", "--format=json") before = json.loads(result.stdout) # Ideally we would assert that before==[], but there's a problem in CI # that means this isn't true. See https://github.com/pypa/pip/pull/11326 # for details. script.pip( "--python", env_path, "install", "-f", shared_data.find_links, "--no-index", "simplewheel==1.0", ) result = script.pip("--python", env_path, "list", "--format=json") installed = json.loads(result.stdout) assert {"name": "simplewheel", "version": "1.0"} in installed script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == before
Fix test to cater for packages leaked into venv
Fix test to cater for packages leaked into venv
Python
mit
pfmoore/pip,pypa/pip,sbidoul/pip,pfmoore/pip,pradyunsg/pip,sbidoul/pip,pypa/pip,pradyunsg/pip
--- +++ @@ -11,12 +11,17 @@ tmpdir: Path, shared_data: TestData, ) -> None: - env_path = os.fsdecode(tmpdir / "venv") + env_path = os.fspath(tmpdir / "venv") env = EnvBuilder(with_pip=False) env.create(env_path) result = script.pip("--python", env_path, "list", "--format=json") - assert json.loads(result.stdout) == [] + before = json.loads(result.stdout) + + # Ideally we would assert that before==[], but there's a problem in CI + # that means this isn't true. See https://github.com/pypa/pip/pull/11326 + # for details. + script.pip( "--python", env_path, @@ -26,8 +31,11 @@ "--no-index", "simplewheel==1.0", ) + result = script.pip("--python", env_path, "list", "--format=json") - assert json.loads(result.stdout) == [{"name": "simplewheel", "version": "1.0"}] + installed = json.loads(result.stdout) + assert {"name": "simplewheel", "version": "1.0"} in installed + script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") - assert json.loads(result.stdout) == [] + assert json.loads(result.stdout) == before
4c986e7cedde18530745dca072e06659f1fb20a9
numpy/compat/__init__.py
numpy/compat/__init__.py
""" Compatibility module. This module contains duplicated code from Python itself or 3rd party extensions, which may be included for the following reasons: * compatibility * we may only need a small subset of the copied library/module """ from . import _inspect from . import _pep440 from . import py3k from ._inspect import getargspec, formatargspec from .py3k import * __all__ = [] __all__.extend(_inspect.__all__) __all__.extend(py3k.__all__)
""" Compatibility module. This module contains duplicated code from Python itself or 3rd party extensions, which may be included for the following reasons: * compatibility * we may only need a small subset of the copied library/module """ from . import _inspect from . import py3k from ._inspect import getargspec, formatargspec from .py3k import * __all__ = [] __all__.extend(_inspect.__all__) __all__.extend(py3k.__all__)
Remove numpy.compat._pep440 from default imports
PERF: Remove numpy.compat._pep440 from default imports The submoudle numpy.compat._pep440 is removed from the default import of numpy to reduce the import time. See #22061
Python
bsd-3-clause
endolith/numpy,numpy/numpy,numpy/numpy,mattip/numpy,endolith/numpy,mattip/numpy,mhvk/numpy,charris/numpy,charris/numpy,mattip/numpy,endolith/numpy,numpy/numpy,mhvk/numpy,mhvk/numpy,mhvk/numpy,charris/numpy,charris/numpy,endolith/numpy,mhvk/numpy,mattip/numpy,numpy/numpy
--- +++ @@ -9,7 +9,6 @@ """ from . import _inspect -from . import _pep440 from . import py3k from ._inspect import getargspec, formatargspec from .py3k import *
0707d920f37edb82d16ccabe1e8413ec16c47c0b
backend/mcapi/mcdir.py
backend/mcapi/mcdir.py
import utils from os import environ import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data' def for_uid(uidstr): pieces = uidstr.split('-') path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4]) utils.mkdirp(path) return path
import utils from os import environ import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data/materialscommons' def for_uid(uidstr): pieces = uidstr.split('-') path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4]) utils.mkdirp(path) return path
Change directory where data is written to.
Change directory where data is written to.
Python
mit
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
--- +++ @@ -3,7 +3,7 @@ import os.path -MCDIR = environ.get("MCDIR") or '/mcfs/data' +MCDIR = environ.get("MCDIR") or '/mcfs/data/materialscommons' def for_uid(uidstr):
f75a151b33635cad5604cb9d7f66fc043c4f972a
saleor/core/utils/json_serializer.py
saleor/core/utils/json_serializer.py
import json from django.core.serializers.base import DeserializationError from django.core.serializers.json import ( DjangoJSONEncoder, PythonDeserializer, Serializer as JsonSerializer) from prices import Money MONEY_TYPE = 'Money' class Serializer(JsonSerializer): def _init_options(self): super()._init_options() self.json_kwargs['cls'] = CustomJsonEncoder class CustomJsonEncoder(DjangoJSONEncoder): def default(self, obj): if isinstance(obj, Money): return { '_type': MONEY_TYPE, 'amount': obj.amount, 'currency': obj.currency} return super().default(obj) def object_hook(obj): if '_type' in obj and obj['_type'] == MONEY_TYPE: return Money(obj['amount'], obj['currency']) return obj def Deserializer(stream_or_string, **options): """Deserialize a stream or string of JSON data. This is a copy of Django implementation with additional argument <object_hook> in json.loads""" if not isinstance(stream_or_string, (bytes, str)): stream_or_string = stream_or_string.read() if isinstance(stream_or_string, bytes): stream_or_string = stream_or_string.decode() try: objects = json.loads(stream_or_string, object_hook=object_hook) yield from PythonDeserializer(objects, **options) except (GeneratorExit, DeserializationError): raise except Exception as exc: raise DeserializationError() from exc
import json from django.core.serializers.base import DeserializationError from django.core.serializers.json import ( DjangoJSONEncoder, PythonDeserializer, Serializer as JsonSerializer) from prices import Money MONEY_TYPE = 'Money' class Serializer(JsonSerializer): def _init_options(self): super()._init_options() self.json_kwargs['cls'] = CustomJsonEncoder class CustomJsonEncoder(DjangoJSONEncoder): def default(self, obj): if isinstance(obj, Money): return { '_type': MONEY_TYPE, 'amount': obj.amount, 'currency': obj.currency} return super().default(obj) def object_hook(obj): if '_type' in obj and obj['_type'] == MONEY_TYPE: return Money(obj['amount'], obj['currency']) return obj def Deserializer(stream_or_string, **options): """Deserialize a stream or string of JSON data. This is a slightly modified copy of Django implementation with additional argument <object_hook> in json.loads""" if not isinstance(stream_or_string, (bytes, str)): stream_or_string = stream_or_string.read() if isinstance(stream_or_string, bytes): stream_or_string = stream_or_string.decode() try: objects = json.loads(stream_or_string, object_hook=object_hook) yield from PythonDeserializer(objects, **options) except Exception as exc: # ugly construction to overcome pylint's warning # "The except handler raises immediately" if isinstance(exc, GeneratorExit, DeserializationError): raise raise DeserializationError() from exc
Fix except handler raises immediately
Fix except handler raises immediately
Python
bsd-3-clause
UITools/saleor,mociepka/saleor,UITools/saleor,UITools/saleor,UITools/saleor,maferelo/saleor,maferelo/saleor,mociepka/saleor,maferelo/saleor,mociepka/saleor,UITools/saleor
--- +++ @@ -30,8 +30,9 @@ def Deserializer(stream_or_string, **options): - """Deserialize a stream or string of JSON data. This is a copy of Django - implementation with additional argument <object_hook> in json.loads""" + """Deserialize a stream or string of JSON data. This is a slightly modified + copy of Django implementation with additional argument <object_hook> in + json.loads""" if not isinstance(stream_or_string, (bytes, str)): stream_or_string = stream_or_string.read() if isinstance(stream_or_string, bytes): @@ -39,7 +40,9 @@ try: objects = json.loads(stream_or_string, object_hook=object_hook) yield from PythonDeserializer(objects, **options) - except (GeneratorExit, DeserializationError): - raise except Exception as exc: + # ugly construction to overcome pylint's warning + # "The except handler raises immediately" + if isinstance(exc, GeneratorExit, DeserializationError): + raise raise DeserializationError() from exc
fb5c6d5288b6d69d16f917976ae068e9c52a6c7d
Lib/email/Iterators.py
Lib/email/Iterators.py
# Copyright (C) 2001,2002 Python Software Foundation # Author: barry@zope.com (Barry Warsaw) """Various types of useful iterators and generators. """ import sys try: from email._compat22 import body_line_iterator, typed_subpart_iterator except SyntaxError: # Python 2.1 doesn't have generators from email._compat21 import body_line_iterator, typed_subpart_iterator def _structure(msg, fp=None, level=0): """A handy debugging aid""" if fp is None: fp = sys.stdout tab = ' ' * (level * 4) print >> fp, tab + msg.get_content_type() if msg.is_multipart(): for subpart in msg.get_payload(): _structure(subpart, fp, level+1)
# Copyright (C) 2001-2004 Python Software Foundation # Author: Barry Warsaw <barry@python.org> """Various types of useful iterators and generators. """ import sys from cStringIO import StringIO # This function will become a method of the Message class def walk(self): """Walk over the message tree, yielding each subpart. The walk is performed in depth-first order. This method is a generator. """ yield self if self.is_multipart(): for subpart in self.get_payload(): for subsubpart in subpart.walk(): yield subsubpart # These two functions are imported into the Iterators.py interface module. # The Python 2.2 version uses generators for efficiency. def body_line_iterator(msg, decode=False): """Iterate over the parts, returning string payloads line-by-line. Optional decode (default False) is passed through to .get_payload(). """ for subpart in msg.walk(): payload = subpart.get_payload(decode=decode) if isinstance(payload, basestring): for line in StringIO(payload): yield line def typed_subpart_iterator(msg, maintype='text', subtype=None): """Iterate over the subparts with a given MIME type. Use `maintype' as the main MIME type to match against; this defaults to "text". Optional `subtype' is the MIME subtype to match against; if omitted, only the main type is matched. """ for subpart in msg.walk(): if subpart.get_content_maintype() == maintype: if subtype is None or subpart.get_content_subtype() == subtype: yield subpart def _structure(msg, fp=None, level=0, include_default=False): """A handy debugging aid""" if fp is None: fp = sys.stdout tab = ' ' * (level * 4) print >> fp, tab + msg.get_content_type(), if include_default: print '[%s]' % msg.get_default_type() else: print if msg.is_multipart(): for subpart in msg.get_payload(): _structure(subpart, fp, level+1, include_default)
Update to Python 2.3, getting rid of backward compatiblity crud. We don't need the _compat21 or _compat22 modules either.
Update to Python 2.3, getting rid of backward compatiblity crud. We don't need the _compat21 or _compat22 modules either.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
--- +++ @@ -1,26 +1,70 @@ -# Copyright (C) 2001,2002 Python Software Foundation -# Author: barry@zope.com (Barry Warsaw) +# Copyright (C) 2001-2004 Python Software Foundation +# Author: Barry Warsaw <barry@python.org> """Various types of useful iterators and generators. """ import sys - -try: - from email._compat22 import body_line_iterator, typed_subpart_iterator -except SyntaxError: - # Python 2.1 doesn't have generators - from email._compat21 import body_line_iterator, typed_subpart_iterator +from cStringIO import StringIO -def _structure(msg, fp=None, level=0): +# This function will become a method of the Message class +def walk(self): + """Walk over the message tree, yielding each subpart. + + The walk is performed in depth-first order. This method is a + generator. + """ + yield self + if self.is_multipart(): + for subpart in self.get_payload(): + for subsubpart in subpart.walk(): + yield subsubpart + + + + +# These two functions are imported into the Iterators.py interface module. +# The Python 2.2 version uses generators for efficiency. +def body_line_iterator(msg, decode=False): + """Iterate over the parts, returning string payloads line-by-line. + + Optional decode (default False) is passed through to .get_payload(). + """ + for subpart in msg.walk(): + payload = subpart.get_payload(decode=decode) + if isinstance(payload, basestring): + for line in StringIO(payload): + yield line + + +def typed_subpart_iterator(msg, maintype='text', subtype=None): + """Iterate over the subparts with a given MIME type. + + Use `maintype' as the main MIME type to match against; this defaults to + "text". Optional `subtype' is the MIME subtype to match against; if + omitted, only the main type is matched. + """ + for subpart in msg.walk(): + if subpart.get_content_maintype() == maintype: + if subtype is None or subpart.get_content_subtype() == subtype: + yield subpart + + + + +def _structure(msg, fp=None, level=0, include_default=False): """A handy debugging aid""" if fp is None: fp = sys.stdout tab = ' ' * (level * 4) - print >> fp, tab + msg.get_content_type() + print >> fp, tab + msg.get_content_type(), + if include_default: + print '[%s]' % msg.get_default_type() + else: + print if msg.is_multipart(): for subpart in msg.get_payload(): - _structure(subpart, fp, level+1) + _structure(subpart, fp, level+1, include_default)
f3fcddaf7aa4e081322db6779ce2ad6d7e0db10a
pytac/device.py
pytac/device.py
"""The device class used to represent a particular function of an accelerator element. A physical element in an accelerator may have multiple devices: an example at DLS is a sextupole magnet that contains also horizontal and vertical corrector magnets and a skew quadrupole. """ class Device(object): """A representation of a property of an element associated with a field. Typically a control system will be used to set and get values on a device. """ def is_enabled(self): """Whether the device is enabled. Returns: bool: whether the device is enabled. """ raise NotImplementedError() def set_value(self, value): """Set the value on the device. Args: value (float): the value to set. """ raise NotImplementedError() def get_value(self): """Read the value from the device. Returns: float: the value of the PV. """ raise NotImplementedError()
"""The device class used to represent a particular function of an accelerator element. A physical element in an accelerator may have multiple devices: an example at DLS is a sextupole magnet that contains also horizontal and vertical corrector magnets and a skew quadrupole. """ class Device(object): """A representation of a property of an element associated with a field. Typically a control system will be used to set and get values on a device. """ def is_enabled(self): """Whether the device is enabled. Returns: bool: whether the device is enabled. """ raise NotImplementedError() def set_value(self, value): """Set the value on the device. Args: value (float): the value to set. """ raise NotImplementedError() def get_value(self): """Read the value from the device. Returns: float: the value of the PV. """ raise NotImplementedError() class BasicDevice(Device): """A basic implementation of the device class. This device does not have a pv associated with it, nor does it interact with a simulator. In short this device acts as simple storage for data that rarely changes, as it is not affected by changes to other aspects of the accelerator. """ def __init__(self, value, enabled=True): """Args: value (?): can be a number, string or a list of strings or numbers. enabled (bool-like): Whether the device is enabled. May be a PvEnabler object. """ self.value = value self._enabled = enabled def is_enabled(self): """Whether the device is enabled. Returns: bool: whether the device is enabled. """ return bool(self._enabled) def set_value(self, value): """Set the value on the device. Args: value (?): the value to set. """ self.value = value def get_value(self): """Read the value from the device. Returns: ?: the value of the PV. """ return self.value
Add a code for a BasicDevice class.
Add a code for a BasicDevice class.
Python
apache-2.0
willrogers/pytac,willrogers/pytac
--- +++ @@ -13,7 +13,6 @@ Typically a control system will be used to set and get values on a device. """ - def is_enabled(self): """Whether the device is enabled. @@ -37,3 +36,43 @@ float: the value of the PV. """ raise NotImplementedError() + + +class BasicDevice(Device): + """A basic implementation of the device class. This device does not have a + pv associated with it, nor does it interact with a simulator. In short + this device acts as simple storage for data that rarely changes, as it is + not affected by changes to other aspects of the accelerator. + """ + def __init__(self, value, enabled=True): + """Args: + value (?): can be a number, string or a list of strings or numbers. + enabled (bool-like): Whether the device is enabled. May be a + PvEnabler object. + """ + self.value = value + self._enabled = enabled + + def is_enabled(self): + """Whether the device is enabled. + + Returns: + bool: whether the device is enabled. + """ + return bool(self._enabled) + + def set_value(self, value): + """Set the value on the device. + + Args: + value (?): the value to set. + """ + self.value = value + + def get_value(self): + """Read the value from the device. + + Returns: + ?: the value of the PV. + """ + return self.value
c3a689a1c1825058ddf7560dcbf3e8785b87b003
fdp/fdp.py
fdp/fdp.py
import connexion from fdp import config def create_app(host, port, graph_endpoint=None): config.init_fairgraph(host, port, graph_endpoint) app = connexion.FlaskApp(__name__, specification_dir='openapi/', debug=True) options = {"swagger_ui": True} app.add_api('openapi.yaml', options=options, arguments={'title': 'FDP server'}, strict_validation=True, validate_responses=True ) return app.app if __name__ == "__main__": host = '0.0.0.0' port = 80 app = create_app(host, port) app.run(host=host, port=port, debug=True)
import connexion from fdp import config def create_app(host, port, graph_endpoint=None): config.init_fairgraph(host, port, graph_endpoint) app = connexion.FlaskApp(__name__, specification_dir='openapi/', debug=True, options={"swagger_url": ""}) options = {"swagger_ui": True} app.add_api('openapi.yaml', options=options, arguments={'title': 'FDP server'}, strict_validation=True, validate_responses=True ) return app.app if __name__ == "__main__": host = '0.0.0.0' port = 80 app = create_app(host, port) app.run(host=host, port=port, debug=True)
Update the swagger API deployment to use the base URL instead of /ui to access the Swagger UI (it is less confusing than needing to know you need to go to /ui)
Update the swagger API deployment to use the base URL instead of /ui to access the Swagger UI (it is less confusing than needing to know you need to go to /ui)
Python
apache-2.0
NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint
--- +++ @@ -4,7 +4,7 @@ def create_app(host, port, graph_endpoint=None): config.init_fairgraph(host, port, graph_endpoint) - app = connexion.FlaskApp(__name__, specification_dir='openapi/', debug=True) + app = connexion.FlaskApp(__name__, specification_dir='openapi/', debug=True, options={"swagger_url": ""}) options = {"swagger_ui": True} app.add_api('openapi.yaml', options=options,
8dbe3e9e418c120d59bb95aa6ff8fb3ab382aac2
billjobs/tests/tests_export_account_email.py
billjobs/tests/tests_export_account_email.py
from django.test import TestCase from django.contrib.admin.sites import AdminSite from billjobs.admin import UserAdmin class EmailExportTestCase(TestCase): """ Tests for email account export """ def test_method_is_avaible(self): """ Test admin can select the action in dropdown list """ self.assertTrue(hasattr(UserAdmin, 'export_email')) def test_method_is_model_admin_action(self): """ Test method is an custom action for user admin """ self.assertTrue('export_email' in UserAdmin.actions)
from django.test import TestCase from django.contrib.admin.sites import AdminSite from billjobs.admin import UserAdmin class EmailExportTestCase(TestCase): """ Tests for email account export """ def test_method_is_avaible(self): """ Test admin can select the action in dropdown list """ self.assertTrue(hasattr(UserAdmin, 'export_email')) def test_method_is_model_admin_action(self): """ Test method is an custom action for user admin """ self.assertTrue('export_email' in UserAdmin.actions) def test_action_has_a_short_description(self): """ Test method has a short description """ self.assertEqual(UserAdmin.export_email.short_description, 'Export email of selected users')
Test export_email has a short description
Test export_email has a short description
Python
mit
ioO/billjobs
--- +++ @@ -12,3 +12,8 @@ def test_method_is_model_admin_action(self): """ Test method is an custom action for user admin """ self.assertTrue('export_email' in UserAdmin.actions) + + def test_action_has_a_short_description(self): + """ Test method has a short description """ + self.assertEqual(UserAdmin.export_email.short_description, + 'Export email of selected users')
3e71356de442e47bcb96ea311295bb447fd23341
bin/ogcserver-local.py
bin/ogcserver-local.py
#!/usr/bin/env python import os import sys import socket from os import path from pkg_resources import * if not len(sys.argv) > 1: sys.exit('Usage: %s <map.xml>' % os.path.basename(sys.argv[0])) sys.path.insert(0,os.path.abspath('.')) from ogcserver.wsgi import WSGIApp import ogcserver default_conf = resource_filename(ogcserver.__name__, 'default.conf') application = WSGIApp(default_conf,mapfile=sys.argv[1]) if __name__ == '__main__': from wsgiref.simple_server import make_server #if os.uname()[0] == 'Darwin': # host = socket.getfqdn() # yourname.local #else: # host = '0.0.0.0' host = '0.0.0.0' port = 8000 httpd = make_server(host, port, application) print "Listening at %s:%s...." % (host,port) httpd.serve_forever()
#!/usr/bin/env python import os import sys import socket from os import path from pkg_resources import * import argparse parser = argparse.ArgumentParser(description='Runs the ogcserver as WMS server') parser.add_argument('mapfile', type=str, help=''' A XML mapnik stylesheet ''') args = parser.parse_args() sys.path.insert(0,os.path.abspath('.')) from ogcserver.wsgi import WSGIApp import ogcserver default_conf = resource_filename(ogcserver.__name__, 'default.conf') application = WSGIApp(default_conf,args.mapfile) if __name__ == '__main__': from wsgiref.simple_server import make_server #if os.uname()[0] == 'Darwin': # host = socket.getfqdn() # yourname.local #else: # host = '0.0.0.0' host = '0.0.0.0' port = 8000 httpd = make_server(host, port, application) print "Listening at %s:%s...." % (host,port) httpd.serve_forever()
Use ArgumentParser to read paramaters
Use ArgumentParser to read paramaters
Python
bsd-3-clause
mapnik/OGCServer,pbabik/OGCServer,pbabik/OGCServer
--- +++ @@ -5,9 +5,15 @@ import socket from os import path from pkg_resources import * +import argparse -if not len(sys.argv) > 1: - sys.exit('Usage: %s <map.xml>' % os.path.basename(sys.argv[0])) +parser = argparse.ArgumentParser(description='Runs the ogcserver as WMS server') + +parser.add_argument('mapfile', type=str, help=''' +A XML mapnik stylesheet +''') + +args = parser.parse_args() sys.path.insert(0,os.path.abspath('.')) @@ -15,7 +21,7 @@ import ogcserver default_conf = resource_filename(ogcserver.__name__, 'default.conf') -application = WSGIApp(default_conf,mapfile=sys.argv[1]) +application = WSGIApp(default_conf,args.mapfile) if __name__ == '__main__': from wsgiref.simple_server import make_server
b855dbde90bfd5842ad292f5f424957df02c2fe0
myflaskapp/myflaskapp/item/models.py
myflaskapp/myflaskapp/item/models.py
"""User models.""" import datetime as dt from flask_login import UserMixin from myflaskapp.database import Column, Model, SurrogatePK, db, reference_col, relationship from myflaskapp.extensions import bcrypt class Item(SurrogatePK, Model): __tablename__ = 'items' pass
"""User models.""" import datetime as dt from flask_login import UserMixin from myflaskapp.database import Column, Model, SurrogatePK, db, reference_col, relationship from myflaskapp.extensions import bcrypt class Item(SurrogatePK, Model): __tablename__ = 'items' text = Column(db.String(80),nullable=True)
Add text field to Item model
Add text field to Item model
Python
mit
terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python
--- +++ @@ -8,4 +8,5 @@ class Item(SurrogatePK, Model): __tablename__ = 'items' - pass + text = Column(db.String(80),nullable=True) +
cb22f18186262bffbfa78b47f1d6e7c2a060d5ff
gearhorn/cmd.py
gearhorn/cmd.py
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import socket from gearhorn import worker def main(): parser = argparse.ArgumentParser() parser.add_argument('host', default=['localhost'], help='Gearman server(s)', nargs='*') parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN for storing' ' matchmaking data') opts = parser.parse_args() w = worker.GearhornWorker(client_id='gearhorn_%s' % socket.gethostname(), dsn=opts.sqlalchemy_dsn) for host in opts.host: w.addServer(host) try: while True: w.work() except Exception as e: print(str(e)) return -1 return 0
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import socket from gearhorn import worker def main(): parser = argparse.ArgumentParser() parser.add_argument('host', default=['localhost'], help='Gearman server(s)', nargs='*') parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN for storing' ' matchmaking data') opts = parser.parse_args() w = worker.GearhornWorker(client_id='gearhorn_%s' % socket.gethostname(), dsn=opts.sqlalchemy_dsn) for host in opts.host: if '/' in host: (host, port) = host.split('/') w.addServer(host, port=port) else: w.addServer(host) try: while True: w.work() except Exception as e: print(str(e)) return -1 return 0
Add port specification to gearhorn CLI
Add port specification to gearhorn CLI
Python
apache-2.0
SpamapS/gearhorn
--- +++ @@ -29,7 +29,11 @@ w = worker.GearhornWorker(client_id='gearhorn_%s' % socket.gethostname(), dsn=opts.sqlalchemy_dsn) for host in opts.host: - w.addServer(host) + if '/' in host: + (host, port) = host.split('/') + w.addServer(host, port=port) + else: + w.addServer(host) try: while True: w.work()
3020817d9edaa8b95c52ecd03f2c10e06f4fc5e7
hermes/feeds.py
hermes/feeds.py
from django.contrib.syndication.views import Feed from .models import Post from .settings import ( SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK, SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE ) class LatestPostFeed(Feed): title = SYNDICATION_FEED_TITLE link = SYNDICATION_FEED_LINK description = SYNDICATION_FEED_DESCRIPTION feed_type = SYNDICATION_FEED_TYPE def items(self): return Post.objects.recent() def item_title(self, item): return item.subject def item_description(self, item): return item.short def item_pubdate(self, item): return item.created_on def item_updateddate(self, item): return item.modified_on def item_categories(self, item): return [category.title for category in item.category.hierarchy()] def item_author_name(self, item): return "{first_name} {last_name}".format( first_name=item.author.first_name, last_name=item.author.last_name, ) def item_author_email(self, item): return item.author.email
from django.contrib.syndication.views import Feed from .models import Post from .settings import ( SYNDICATION_FEED_TITLE, SYNDICATION_FEED_LINK, SYNDICATION_FEED_DESCRIPTION, SYNDICATION_FEED_TYPE ) class LatestPostFeed(Feed): title = SYNDICATION_FEED_TITLE link = SYNDICATION_FEED_LINK description = SYNDICATION_FEED_DESCRIPTION feed_type = SYNDICATION_FEED_TYPE def items(self): return Post.objects.recent() def item_title(self, item): return item.subject def item_description(self, item): return item.rendered def item_pubdate(self, item): return item.created_on def item_updateddate(self, item): return item.modified_on def item_categories(self, item): return [category.title for category in item.category.hierarchy()] def item_author_name(self, item): return "{first_name} {last_name}".format( first_name=item.author.first_name, last_name=item.author.last_name, ) def item_author_email(self, item): return item.author.email
Add full body text to RSS
Add full body text to RSS
Python
mit
DemocracyClub/django-hermes,DemocracyClub/django-hermes
--- +++ @@ -20,7 +20,7 @@ return item.subject def item_description(self, item): - return item.short + return item.rendered def item_pubdate(self, item): return item.created_on
6a68ef52ab9e762860087f701eee15e11786ca71
k3d/__init__.py
k3d/__init__.py
from ipywidgets import DOMWidget from IPython.display import display from traitlets import Unicode, Bytes, Dict from .objects import Objects from .factory import Factory import base64, json, zlib class K3D(DOMWidget, Factory): _view_module = Unicode('nbextensions/k3d_widget/view', sync=True) _view_name = Unicode('K3DView', sync=True) _model_module = Unicode('nbextensions/k3d_widget/model', sync=True) _model_name = Unicode('K3DModel', sync=True) COMPRESSION_LEVEL = 1 data = Bytes(sync=True) parameters = Dict(sync=True) def __init__(self, antialias=False, background_color=0xFFFFFF, height=512): super(K3D, self).__init__() self.__objects = Objects(self.__show) self.on_displayed(lambda x: self.__objects.flush()) self.parameters = { 'antialias': antialias, 'backgroundColor': background_color, 'height': height, } def __add__(self, obj): self.__objects.add(obj) return self def display(self): display(self) def __show(self, obj): self.data = base64.b64encode(zlib.compress(json.dumps(obj, separators=(',', ':')), self.COMPRESSION_LEVEL))
from ipywidgets import DOMWidget from IPython.display import display from traitlets import Unicode, Bytes, Dict from .objects import Objects from .factory import Factory import base64, json, zlib class K3D(DOMWidget, Factory): _view_module = Unicode('nbextensions/k3d_widget/view', sync=True) _view_name = Unicode('K3DView', sync=True) _model_module = Unicode('nbextensions/k3d_widget/model', sync=True) _model_name = Unicode('K3DModel', sync=True) COMPRESSION_LEVEL = 1 data = Bytes(sync=True) parameters = Dict(sync=True) def __init__(self, antialias=False, background_color=0xFFFFFF, height=512): super(K3D, self).__init__() self.__objects = Objects(self.__show) self.__display_strategy = self.__display self.on_displayed(lambda x: self.__objects.flush()) self.parameters = { 'antialias': antialias, 'backgroundColor': background_color, 'height': height, } def __add__(self, obj): self.__objects.add(obj) return self def display(self): self.__display_strategy() def __display(self): display(self) self.__display_strategy = self.__pass def __show(self, obj): self.data = base64.b64encode(zlib.compress(json.dumps(obj, separators=(',', ':')), self.COMPRESSION_LEVEL)) def __pass(self): pass
Fix calling "display" method multiple times
Fix calling "display" method multiple times
Python
mit
K3D-tools/K3D-jupyter,K3D-tools/K3D-jupyter,K3D-tools/K3D-jupyter,K3D-tools/K3D-jupyter
--- +++ @@ -21,6 +21,7 @@ super(K3D, self).__init__() self.__objects = Objects(self.__show) + self.__display_strategy = self.__display self.on_displayed(lambda x: self.__objects.flush()) self.parameters = { @@ -34,7 +35,14 @@ return self def display(self): + self.__display_strategy() + + def __display(self): display(self) + self.__display_strategy = self.__pass def __show(self, obj): self.data = base64.b64encode(zlib.compress(json.dumps(obj, separators=(',', ':')), self.COMPRESSION_LEVEL)) + + def __pass(self): + pass
bc2d57bb36373eded3ae1cd82cd4b91bd5649b57
kyokai/route.py
kyokai/route.py
""" Module for Kyokai routes. """ import re class Route(object): """ A route is simply a wrapped coroutine object for a request. It takes in a regular expression as a matcher, for the path, and a list of accepted methods. """ def __init__(self, matcher: str, methods: list): """ Create a new Route. """ self.matcher = re.compile(matcher) self.allowed_methods = methods self._wrapped_coro = None def kyokai_match(self, path: str, method: str): """ Check if a given path matches the specified route. """ matched = self.matcher.match(path) in_m = method.lower() in [m.lower() for m in self.allowed_methods] return (matched and in_m) def __call__(self, coro): """ Sets the coroutine. """ self._wrapped_coro = coro async def invoke(self, request): """ Invoke the route, calling the underlying coroutine. """ # Extract match groups. matches = self.matcher.findall(request.path)
""" Module for Kyokai routes. """ import re from kyokai.exc import HTTPClientException, HTTPException class Route(object): """ A route is simply a wrapped coroutine object for a request. It takes in a regular expression as a matcher, for the path, and a list of accepted methods. """ def __init__(self, matcher: str, methods: list): """ Create a new Route. """ self.matcher = re.compile(matcher) self.allowed_methods = methods self._wrapped_coro = None def kyokai_match(self, path: str, method: str): """ Check if a given path matches the specified route. """ matched = self.matcher.match(path) in_m = method.lower() in [m.lower() for m in self.allowed_methods] return (matched and in_m) def __call__(self, coro): """ Sets the coroutine. """ self._wrapped_coro = coro async def invoke(self, request): """ Invoke the route, calling the underlying coroutine. """ # Extract match groups. matches = self.matcher.findall(request.path) # Invoke the coroutine. try: if matches: result = await self._wrapped_coro(request, *matches) else: result = await self._wrapped_coro(request) except Exception as e: if isinstance(e, HTTPClientException): raise else: raise HTTPException(500) return result
Make invoke() actually invoke the coroutine.
Make invoke() actually invoke the coroutine.
Python
mit
SunDwarf/Kyoukai
--- +++ @@ -2,6 +2,8 @@ Module for Kyokai routes. """ import re + +from kyokai.exc import HTTPClientException, HTTPException class Route(object): @@ -39,4 +41,16 @@ """ # Extract match groups. matches = self.matcher.findall(request.path) + # Invoke the coroutine. + try: + if matches: + result = await self._wrapped_coro(request, *matches) + else: + result = await self._wrapped_coro(request) + except Exception as e: + if isinstance(e, HTTPClientException): + raise + else: + raise HTTPException(500) + return result
40db9d64d616d99ec2464aff6fe5960943584ac3
rnacentral/apiv1/urls.py
rnacentral/apiv1/urls.py
""" Copyright [2009-2014] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from django.views.generic import TemplateView from django.conf.urls import patterns, url, include from rest_framework import routers from apiv1 import views router = routers.DefaultRouter() router.register(r'rna', views.RnaViewSet) router.register(r'accession', views.AccessionViewSet) urlpatterns = patterns('', url(r'^v1/', include(router.urls)), url(r'^v1/', include('rest_framework.urls', namespace='rest_framework_v1')), )
""" Copyright [2009-2014] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from django.views.generic import TemplateView from django.conf.urls import patterns, url, include from rest_framework import routers from apiv1 import views router = routers.DefaultRouter() router.register(r'rna', views.RnaViewSet) router.register(r'accession', views.AccessionViewSet) urlpatterns = patterns('', url(r'^v1/', include(router.urls)), url(r'^v1/', include('rest_framework.urls', namespace='rest_framework_v1')), url(r'^current/', include(router.urls)), url(r'^current/', include('rest_framework.urls', namespace='rest_framework_v1')), )
Add a stable url for the current API version
Add a stable url for the current API version
Python
apache-2.0
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
--- +++ @@ -23,4 +23,6 @@ urlpatterns = patterns('', url(r'^v1/', include(router.urls)), url(r'^v1/', include('rest_framework.urls', namespace='rest_framework_v1')), + url(r'^current/', include(router.urls)), + url(r'^current/', include('rest_framework.urls', namespace='rest_framework_v1')), )
a5898f8e5b2b25af472f1e2e5ce02626b86db5f2
tunneler/tests/test_models.py
tunneler/tests/test_models.py
from unittest import TestCase from ..models import Tunnel
from unittest import TestCase from ..models import Tunnel class TestModels(TestCase): def test_defaults(self): tunnel = Tunnel() self.assertEquals(tunnel.name, 'unnamed') self.assertEquals(tunnel.process, None) self.assertEqual(tunnel.local_port, 0) self.assertEqual(tunnel.host, 'somehost') self.assertEqual(tunnel.remote_port, 0) self.assertEqual(tunnel.user, 'somebody') self.assertEqual(tunnel.server, 'somewhere')
Add a basic test for models.
Add a basic test for models.
Python
isc
xoliver/tunneler,xoliver/tunneler
--- +++ @@ -1,3 +1,15 @@ from unittest import TestCase from ..models import Tunnel + + +class TestModels(TestCase): + def test_defaults(self): + tunnel = Tunnel() + self.assertEquals(tunnel.name, 'unnamed') + self.assertEquals(tunnel.process, None) + self.assertEqual(tunnel.local_port, 0) + self.assertEqual(tunnel.host, 'somehost') + self.assertEqual(tunnel.remote_port, 0) + self.assertEqual(tunnel.user, 'somebody') + self.assertEqual(tunnel.server, 'somewhere')
48bd50609fffb18dbab821522810ec587751434c
arguments.py
arguments.py
import argparse from settings import HONEYPORT """ Here we define command line arguments. `port` stands for port, to listen on. `-v` to increase verbose of the server """ def parse(): parser = argparse.ArgumentParser( description='Serve some sweet honey to the ubiquitous bots!', epilog='And that`s how you`d detect a sneaky chinese bot.', prog='mfh.py', ) client_group = parser.add_mutually_exclusive_group() client_group.add_argument( '-c', action='store_true', help='launch client with on port defined in settings', ) client_group.add_argument( '--client', help='port to start a client on', metavar='PORT', nargs='?', type=int, ) parser.add_argument( '-v', '--verbose', action='store_true', help='increase output verbosity', ) return parser.parse_args()
import argparse from settings import HONEYPORT """ Here we define command line arguments. `port` stands for port, to listen on. `-v` to increase verbose of the server """ def parse(): parser = argparse.ArgumentParser( description='Serve some sweet honey to the ubiquitous bots!', epilog='And that`s how you`d detect a sneaky chinese bot.', prog='mfh.py', ) client_group = parser.add_mutually_exclusive_group() client_group.add_argument( '-c', action='store_true', help='launch client with on port defined in settings', ) client_group.add_argument( '--client', help='port to start a client on', metavar='PORT', nargs='?', type=int, ) parser.add_argument( '-u', '--updater', action='store_true', help='enable self updating', ) parser.add_argument( '-v', '--verbose', action='store_true', help='increase output verbosity', ) return parser.parse_args()
Add option to enable self updating
Add option to enable self updating This is a customization option for more flexibility. If you want to autoupdate, then you can give a -u option. If not - don't.
Python
mit
Zloool/manyfaced-honeypot
--- +++ @@ -33,6 +33,13 @@ ) parser.add_argument( + '-u', + '--updater', + action='store_true', + help='enable self updating', + ) + + parser.add_argument( '-v', '--verbose', action='store_true',
db8b5bb3b596609e06adf51ed7d72b97fc786ad5
python/nagcat/__init__.py
python/nagcat/__init__.py
# Copyright 2008-2009 ITA Software, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Nagcat: The Nagios Helpful Pet""" from twisted.python import versions import twisted import coil # Make sure we have the right coil version _coil_version = getattr(coil, '__version_info__', (0,0)) if _coil_version < (0,3,14) or _coil_version <= (0,3,99): raise ImportError("coil 0.3.x >= 0.3.14 is required") # Require Twisted >= 8.2, older versions had problematic bugs if twisted.version < versions.Version('twisted', 8, 2, 0): raise ImportError("Twisted >= 8.2 is required")
# Copyright 2008-2009 ITA Software, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Nagcat: The Nagios Helpful Pet""" from twisted.python import versions import twisted import coil # Make sure we have the right coil version _coil_version = getattr(coil, '__version_info__', (0,0)) if _coil_version < (0,3,14) or _coil_version >= (0,3,99): raise ImportError("coil 0.3.x >= 0.3.14 is required") # Require Twisted >= 8.2, older versions had problematic bugs if twisted.version < versions.Version('twisted', 8, 2, 0): raise ImportError("Twisted >= 8.2 is required")
Fix my coil version check
Fix my coil version check
Python
apache-2.0
marineam/nagcat,marineam/nagcat,marineam/nagcat
--- +++ @@ -20,7 +20,7 @@ # Make sure we have the right coil version _coil_version = getattr(coil, '__version_info__', (0,0)) -if _coil_version < (0,3,14) or _coil_version <= (0,3,99): +if _coil_version < (0,3,14) or _coil_version >= (0,3,99): raise ImportError("coil 0.3.x >= 0.3.14 is required") # Require Twisted >= 8.2, older versions had problematic bugs
9f8f929b8fdc0ebfdb609621f4613d31b73639b0
sipa/utils/link_patch.py
sipa/utils/link_patch.py
import re from flask import request from markdown import Markdown from markdown.extensions import Extension from markdown.postprocessors import Postprocessor def absolute_path_replacer(match): """Correct the url in a regex match prepending the absolute path""" assert len(match.groups()) == 2 prefix = request.script_root if prefix.endswith("/"): prefix = prefix[:-1] return "{key}=\"{path}\"".format( key=match.group(1), path=prefix + match.group(2) ) class LinkPostprocessor(Postprocessor): def run(self, text): return re.sub( '(href|src)="(/[^"]*)"', absolute_path_replacer, text, flags=re.IGNORECASE, ) class AbsoluteLinkExtension(Extension): """ Add the absolute link patch to Markdown. """ def extendMarkdown(self, md: Markdown): """ Add an instance of TableProcessor to BlockParser. """ md.postprocessors.register( LinkPostprocessor(md), 'link_patch', 50, ) def makeExtension(*args, **kwargs): return AbsoluteLinkExtension(*args, **kwargs)
import re from flask import request from markdown import Markdown from markdown.extensions import Extension from markdown.postprocessors import Postprocessor def absolute_path_replacer(match): """Correct the url in a regex match prepending the absolute path""" assert len(match.groups()) == 2 prefix = request.script_root if prefix.endswith("/"): prefix = prefix[:-1] return "{key}=\"{path}\"".format( key=match.group(1), path=prefix + match.group(2) ) class LinkPostprocessor(Postprocessor): """A postprocessor fixing absolute links in the HTML result of a markdown render. This needs to be a postprocessor compared to a treeprocessor, because the link may be in a pure HTML block. Those blocks however are processed by means of the [`MarkdownInHtmlExtension`](https://python-markdown.github.io/extensions/md_in_html/), which replaces HTML by a tag in a preprocessing step and replaces this tag by the HTML in a postprocessing step. Therefore, the only way to catch these links is with a postprocessor and a regex. """ def run(self, text): return re.sub( '(href|src)="(/[^"]*)"', absolute_path_replacer, text, flags=re.IGNORECASE, ) class AbsoluteLinkExtension(Extension): """ Add the absolute link patch to Markdown. """ def extendMarkdown(self, md: Markdown): """ Add an instance of TableProcessor to BlockParser. """ # see https://python-markdown.github.io/extensions/api/#registries for what's happening here md.postprocessors.register( LinkPostprocessor(md), 'link_patch', # we need to run after `raw_html` (prio=30). See `LinkPostprocessor` docstring. 20, ) def makeExtension(*args, **kwargs): return AbsoluteLinkExtension(*args, **kwargs)
Fix priority of link postprocessor
Fix priority of link postprocessor Fixes #424
Python
mit
agdsn/sipa,agdsn/sipa,agdsn/sipa,agdsn/sipa
--- +++ @@ -21,6 +21,15 @@ class LinkPostprocessor(Postprocessor): + """A postprocessor fixing absolute links in the HTML result of a markdown render. + + This needs to be a postprocessor compared to a treeprocessor, because + the link may be in a pure HTML block. Those blocks however are processed by means + of the [`MarkdownInHtmlExtension`](https://python-markdown.github.io/extensions/md_in_html/), + which replaces HTML by a tag in a preprocessing step and replaces this tag by the HTML + in a postprocessing step. + Therefore, the only way to catch these links is with a postprocessor and a regex. + """ def run(self, text): return re.sub( '(href|src)="(/[^"]*)"', @@ -35,10 +44,12 @@ def extendMarkdown(self, md: Markdown): """ Add an instance of TableProcessor to BlockParser. """ + # see https://python-markdown.github.io/extensions/api/#registries for what's happening here md.postprocessors.register( LinkPostprocessor(md), 'link_patch', - 50, + # we need to run after `raw_html` (prio=30). See `LinkPostprocessor` docstring. + 20, )
0a0d6b87aac75f298194b43cfcea67b0d1651a97
knights/library.py
knights/library.py
from functools import partial class Library: ''' Container for registering tags and filters ''' def __init__(self): self.tags = {} self.filters = {} self.helpers = {} def tag(self, func=None, name=None): if func is None: return partial(self.tag, name=name) if name is None: name = func.__name__ self.tags[name] = func return func def helper(self, func=None, name=None): if func is None: return partial(self.helper, name=name) if name is None: name = func.__name__ self.helpers[name] = func return func
from functools import partial class Library: ''' Container for registering tags and helpers ''' def __init__(self): self.tags = {} self.helpers = {} def tag(self, func=None, name=None): if func is None: return partial(self.tag, name=name) if name is None: name = func.__name__ self.tags[name] = func return func def helper(self, func=None, name=None): if func is None: return partial(self.helper, name=name) if name is None: name = func.__name__ self.helpers[name] = func return func
Remove another reference to filter
Remove another reference to filter
Python
mit
funkybob/knights-templater,funkybob/knights-templater
--- +++ @@ -3,11 +3,10 @@ class Library: ''' - Container for registering tags and filters + Container for registering tags and helpers ''' def __init__(self): self.tags = {} - self.filters = {} self.helpers = {} def tag(self, func=None, name=None):
58caeb5953df611255f0b14b59694f60ba66345c
comics/comics/darklegacy.py
comics/comics/darklegacy.py
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "Dark Legacy" language = "en" url = "http://www.darklegacycomics.com/" start_date = "2006-01-01" rights = "Arad Kedar" class Crawler(CrawlerBase): history_capable_days = 29 * 7 # 7 weekly releases schedule = "Su" time_zone = "US/Pacific" def crawl(self, pub_date): feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml") for entry in feed.for_date(pub_date): title = entry.title page = self.parse_page(entry.link) url = page.src("img.comic-image") return CrawlerImage(url, title)
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "Dark Legacy" language = "en" url = "http://www.darklegacycomics.com/" start_date = "2006-01-01" rights = "Arad Kedar" class Crawler(CrawlerBase): history_capable_days = 33 * 7 # 33 weekly releases schedule = "Su" time_zone = "US/Pacific" def crawl(self, pub_date): feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml") for entry in feed.for_date(pub_date): title = entry.title page = self.parse_page(entry.link) url = page.src("img.comic-image") return CrawlerImage(url, title)
Adjust history capability for "Dark Legacy"
Adjust history capability for "Dark Legacy"
Python
agpl-3.0
jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics
--- +++ @@ -11,7 +11,7 @@ class Crawler(CrawlerBase): - history_capable_days = 29 * 7 # 7 weekly releases + history_capable_days = 33 * 7 # 33 weekly releases schedule = "Su" time_zone = "US/Pacific"
d1171066141418b5ca0dd6a8b996a00cb9e45d00
st2common/st2common/transport/execution.py
st2common/st2common/transport/execution.py
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # All Exchanges and Queues related to liveaction. from kombu import Exchange, Queue from st2common.transport import publishers HISTORY_XCHG = Exchange('st2.history', type='topic') class ActionExecutionPublisher(publishers.CUDPublisher): def __init__(self, url): super(ActionExecutionPublisher, self).__init__(url, HISTORY_XCHG) def get_queue(name=None, routing_key=None, exclusive=False): return Queue(name, HISTORY_XCHG, routing_key=routing_key, exclusive=exclusive)
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # All Exchanges and Queues related to liveaction. from kombu import Exchange, Queue from st2common.transport import publishers HISTORY_XCHG = Exchange('st2.execution', type='topic') class ActionExecutionPublisher(publishers.CUDPublisher): def __init__(self, url): super(ActionExecutionPublisher, self).__init__(url, HISTORY_XCHG) def get_queue(name=None, routing_key=None, exclusive=False): return Queue(name, HISTORY_XCHG, routing_key=routing_key, exclusive=exclusive)
Update name of rabbit queue
Update name of rabbit queue
Python
apache-2.0
alfasin/st2,pixelrebel/st2,pixelrebel/st2,Plexxi/st2,StackStorm/st2,Itxaka/st2,tonybaloney/st2,armab/st2,punalpatel/st2,peak6/st2,Plexxi/st2,nzlosh/st2,Itxaka/st2,grengojbo/st2,tonybaloney/st2,armab/st2,pinterb/st2,punalpatel/st2,StackStorm/st2,tonybaloney/st2,Itxaka/st2,Plexxi/st2,jtopjian/st2,nzlosh/st2,peak6/st2,Plexxi/st2,dennybaa/st2,alfasin/st2,StackStorm/st2,pixelrebel/st2,pinterb/st2,armab/st2,jtopjian/st2,lakshmi-kannan/st2,nzlosh/st2,jtopjian/st2,emedvedev/st2,peak6/st2,lakshmi-kannan/st2,StackStorm/st2,grengojbo/st2,pinterb/st2,punalpatel/st2,lakshmi-kannan/st2,dennybaa/st2,alfasin/st2,grengojbo/st2,emedvedev/st2,emedvedev/st2,dennybaa/st2,nzlosh/st2
--- +++ @@ -18,7 +18,7 @@ from kombu import Exchange, Queue from st2common.transport import publishers -HISTORY_XCHG = Exchange('st2.history', type='topic') +HISTORY_XCHG = Exchange('st2.execution', type='topic') class ActionExecutionPublisher(publishers.CUDPublisher):
111ea97340b600d24dd21316d55cf82fa3ece158
util/csrf.py
util/csrf.py
# Oxypanel # File: util/csrf.py # Desc: csrf protection! from uuid import uuid4 from flask import session, abort, request, Markup import config from app import app # Check all POST/PUT/DELETE's # 403 on failure @app.before_request def csrf_check(): # No check when debugging if config.DEBUG: return # TODO: Check referrer matches us # Check a valid csrf_token was presented if request.method in ['POST', 'PUT', 'DELETE']: token = session.pop('csrf_token', None) if not token or token != str(request.form.get('csrf_token')): abort(403) # Generate/store CSRF tokens def generate_csrf(): if 'csrf_token' not in session: session['csrf_token'] = str(uuid4()) return session['csrf_token'] app.jinja_env.globals['csrf_token'] = generate_csrf # Template shortcut def generate_csrf_input(): token = generate_csrf() return Markup('<input type="hidden" name="csrf_token" value="{0}" />'.format(token)) app.jinja_env.globals['csrf_input'] = generate_csrf_input
# Oxypanel # File: util/csrf.py # Desc: csrf protection! from uuid import uuid4 from flask import session, abort, request, Markup from app import app # Check all POST/PUT/DELETE's # 403 on failure @app.before_request def csrf_check(): # TODO: Check referrer matches us # Check a valid csrf_token was presented if request.method in ['POST', 'PUT', 'DELETE']: token = session.pop('csrf_token', None) if not token or token != str(request.form.get('csrf_token')): abort(401) # Generate/store CSRF tokens def generate_csrf(): if 'csrf_token' not in session: session['csrf_token'] = str(uuid4()) return session['csrf_token'] app.jinja_env.globals['csrf_token'] = generate_csrf # Template shortcut def generate_csrf_input(): token = generate_csrf() return Markup('<input type="hidden" name="csrf_token" value="{0}" />'.format(token)) app.jinja_env.globals['csrf_input'] = generate_csrf_input
Use 401 for CSRF failure. Always CSRF even in DEBUG
Use 401 for CSRF failure. Always CSRF even in DEBUG
Python
mit
oxyio/oxyio,oxyio/oxyio,oxyio/oxyio,oxyio/oxyio
--- +++ @@ -6,7 +6,6 @@ from flask import session, abort, request, Markup -import config from app import app @@ -14,17 +13,13 @@ # 403 on failure @app.before_request def csrf_check(): - # No check when debugging - if config.DEBUG: - return - # TODO: Check referrer matches us # Check a valid csrf_token was presented if request.method in ['POST', 'PUT', 'DELETE']: token = session.pop('csrf_token', None) if not token or token != str(request.form.get('csrf_token')): - abort(403) + abort(401) # Generate/store CSRF tokens
a962574f19673c17b033a50809040956a63970e5
test/pywertest.py
test/pywertest.py
#!/usr/bin/env python3 import sys import os import unittest import libaur.printer as printer import libaur.aur as aur class PrinterBadInput(unittest.TestCase): def test_string_input_dlpkgs(self): '''download_pkg should fail with string input as first arg''' self.assertRaises(TypeError, printer.download_pkgs, 'foo', '/tmp/.pywer_test_suite') def test_string_input_ppsi(self): '''pretty_print_simple_info should fail with string input as first arg''' self.assertRaises(TypeError, printer.pretty_print_simple_info, 'foo') def test_string_input_ppu(self): '''pretty_print_updpkgs should fail with string input pkgs arg''' self.assertRaises(TypeError, printer.pretty_print_updpkgs, pkgs='foo') # Add a mini-json server so that we can test output as well if __name__ == '__main__': unittest.main()
#!/usr/bin/env python3 import sys import os import unittest import libaur.printer as printer import libaur.aur as aur class PrinterBadInput(unittest.TestCase): def test_string_input_dlpkgs(self): '''download_pkg should fail with string input as first arg''' self.assertRaises(TypeError, printer.download_pkgs, 'foo', '/tmp/.pywer_test_suite') def test_string_input_ppsi(self): '''pretty_print_simple_info should fail with string input as first arg''' self.assertRaises(TypeError, printer.pretty_print_info, 'foo') def test_string_input_ppu(self): '''pretty_print_updpkgs should fail with string input pkgs arg''' self.assertRaises(TypeError, printer.pretty_print_updpkgs, pkgs='foo') # Add a mini-json server so that we can test output as well if __name__ == '__main__': unittest.main()
Change the info function name
test: Change the info function name
Python
mit
KaiSforza/pywer
--- +++ @@ -15,7 +15,7 @@ def test_string_input_ppsi(self): '''pretty_print_simple_info should fail with string input as first arg''' - self.assertRaises(TypeError, printer.pretty_print_simple_info, 'foo') + self.assertRaises(TypeError, printer.pretty_print_info, 'foo') def test_string_input_ppu(self): '''pretty_print_updpkgs should fail with string input pkgs arg'''
c56d9cded7190d524d3d4dbcd960a0b0fe8bf10c
apps/local_apps/account/middleware.py
apps/local_apps/account/middleware.py
from django.utils.cache import patch_vary_headers from django.utils import translation from account.models import Account class LocaleMiddleware(object): """ This is a very simple middleware that parses a request and decides what translation object to install in the current thread context depending on the user's account. This allows pages to be dynamically translated to the language the user desires (if the language is available, of course). """ def get_language_for_user(self, request): if request.user.is_authenticated(): try: account = Account.objects.get(user=request.user) return account.language except (Account.DoesNotExist, Account.MultipleObjectsReturned): pass return translation.get_language_from_request(request) def process_request(self, request): translation.activate(self.get_language_for_user(request)) request.LANGUAGE_CODE = translation.get_language() def process_response(self, request, response): patch_vary_headers(response, ('Accept-Language',)) response['Content-Language'] = translation.get_language() translation.deactivate() return response
from django.utils.cache import patch_vary_headers from django.utils import translation from account.models import Account class LocaleMiddleware(object): """ This is a very simple middleware that parses a request and decides what translation object to install in the current thread context depending on the user's account. This allows pages to be dynamically translated to the language the user desires (if the language is available, of course). """ def get_language_for_user(self, request): if request.user.is_authenticated(): try: account = Account.objects.get(user=request.user) return account.language except Account.DoesNotExist: pass return translation.get_language_from_request(request) def process_request(self, request): translation.activate(self.get_language_for_user(request)) request.LANGUAGE_CODE = translation.get_language() def process_response(self, request, response): patch_vary_headers(response, ('Accept-Language',)) response['Content-Language'] = translation.get_language() translation.deactivate() return response
Throw 500 error on multiple account in LocaleMiddleware so we can fix them.
Throw 500 error on multiple account in LocaleMiddleware so we can fix them. git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1120 45601e1e-1555-4799-bd40-45c8c71eef50
Python
mit
alex/pinax,amarandon/pinax,amarandon/pinax,alex/pinax,alex/pinax,amarandon/pinax,amarandon/pinax
--- +++ @@ -16,7 +16,7 @@ try: account = Account.objects.get(user=request.user) return account.language - except (Account.DoesNotExist, Account.MultipleObjectsReturned): + except Account.DoesNotExist: pass return translation.get_language_from_request(request)
5da30efc6cbbc58db60ba29643c56448b5a79e77
test/test_pipeline/components/test_base.py
test/test_pipeline/components/test_base.py
import unittest from autosklearn.pipeline.components.base import find_components, \ AutoSklearnClassificationAlgorithm class TestBase(unittest.TestCase): def test_find_components(self): c = find_components('dummy_components', 'dummy_components', AutoSklearnClassificationAlgorithm) self.assertEqual(len(c), 2) self.assertEqual(c['dummy_component_1'].__name__, 'DummyComponent1') self.assertEqual(c['dummy_component_2'].__name__, 'DummyComponent2')
import os import sys import unittest from autosklearn.pipeline.components.base import find_components, \ AutoSklearnClassificationAlgorithm this_dir = os.path.dirname(os.path.abspath(__file__)) sys.path.append(this_dir) class TestBase(unittest.TestCase): def test_find_components(self): c = find_components('dummy_components', os.path.join(this_dir, 'dummy_components'), AutoSklearnClassificationAlgorithm) self.assertEqual(len(c), 2) self.assertEqual(c['dummy_component_1'].__name__, 'DummyComponent1') self.assertEqual(c['dummy_component_2'].__name__, 'DummyComponent2')
FIX fix unit test by fixing import paths
FIX fix unit test by fixing import paths
Python
bsd-3-clause
automl/auto-sklearn,automl/auto-sklearn
--- +++ @@ -1,13 +1,19 @@ +import os +import sys import unittest from autosklearn.pipeline.components.base import find_components, \ AutoSklearnClassificationAlgorithm +this_dir = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(this_dir) + class TestBase(unittest.TestCase): def test_find_components(self): - c = find_components('dummy_components', 'dummy_components', + c = find_components('dummy_components', + os.path.join(this_dir, 'dummy_components'), AutoSklearnClassificationAlgorithm) self.assertEqual(len(c), 2) self.assertEqual(c['dummy_component_1'].__name__, 'DummyComponent1')
4f31fb7962a241c13251994c8659595fa2e0693e
cinder/__init__.py
cinder/__init__.py
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Root Cinder module.""" import os # Ensure compatibility issues are covered with pythondsn os.environ['EVENTLET_NO_GREENDNS'] = 'yes' # Make sure eventlet is loaded import eventlet # noqa
Set EVENTLET_NO_GREENDNS for IPv6 and dnspython compat
Set EVENTLET_NO_GREENDNS for IPv6 and dnspython compat If dnspython is present it causes socket.getaddrinfo() to be monkeypatched wiht code that does not work with IPv6. To get around this, the EVENTLET_NO_GREENDNS environment variable must be set before importing eventlet. This is resolved in the eventlet upstream code [1], but has not yet been released. Once our requirements have been raised past the 0.19.0 release with a version that includes this change we can remove this workaround. [1] https://github.com/eventlet/eventlet/commit/52b09becacd23f384cf69ae37d70c893c43e3b13 Change-Id: Id9fe265d67f6e9ea5090bebcacae4a7a9150c5c2 Closes-bug: #1471049
Python
apache-2.0
j-griffith/cinder,ge0rgi/cinder,openstack/cinder,j-griffith/cinder,eharney/cinder,Datera/cinder,Datera/cinder,phenoxim/cinder,mahak/cinder,openstack/cinder,phenoxim/cinder,mahak/cinder,eharney/cinder
--- +++ @@ -0,0 +1,21 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Root Cinder module.""" + +import os + +# Ensure compatibility issues are covered with pythondsn +os.environ['EVENTLET_NO_GREENDNS'] = 'yes' + +# Make sure eventlet is loaded +import eventlet # noqa
22de9f9d45edbf5eddea2940549efc225c805f02
tests/ext_i18n.py
tests/ext_i18n.py
from __future__ import unicode_literals from unittest import TestCase from wtforms import TextField, validators from wtforms.ext.i18n.utils import get_translations from wtforms.ext.i18n import form as i18n_form class I18NTest(TestCase): def test_failure(self): self.assertRaises(IOError, get_translations, []) def test_us_translation(self): translations = get_translations(['en_US']) self.assertEqual(translations.gettext('Invalid Mac address.'), 'Invalid MAC address.') class FormTest(TestCase): class F(i18n_form.Form): LANGUAGES = ['en_US', 'en'] a = TextField(validators=[validators.Required()]) def test_form(self): tcache = i18n_form.translations_cache tcache.clear() form = self.F() assert ('en_US', 'en') in tcache self.assertIs(form._get_translations(), tcache[('en_US', 'en')]) assert not form.validate() self.assertEqual(form.a.errors[0], 'This field is required.') form = self.F(LANGUAGES=['es']) assert len(tcache) == 2 assert ('es', ) in tcache assert not form.validate() self.assertEqual(form.a.errors[0], 'Este campo es obligatorio.') if __name__ == '__main__': from unittest import main main()
from __future__ import unicode_literals from unittest import TestCase from wtforms import TextField, validators from wtforms.ext.i18n.utils import get_translations from wtforms.ext.i18n import form as i18n_form class I18NTest(TestCase): def test_failure(self): self.assertRaises(IOError, get_translations, []) def test_us_translation(self): translations = get_translations(['en_US']) self.assertEqual(translations.gettext('Invalid Mac address.'), 'Invalid MAC address.') class FormTest(TestCase): class F(i18n_form.Form): LANGUAGES = ['en_US', 'en'] a = TextField(validators=[validators.Required()]) def test_form(self): tcache = i18n_form.translations_cache tcache.clear() form = self.F() assert ('en_US', 'en') in tcache assert form._get_translations() is tcache[('en_US', 'en')] assert not form.validate() self.assertEqual(form.a.errors[0], 'This field is required.') form = self.F(LANGUAGES=['es']) assert len(tcache) == 2 assert ('es', ) in tcache assert not form.validate() self.assertEqual(form.a.errors[0], 'Este campo es obligatorio.') if __name__ == '__main__': from unittest import main main()
Fix assert for python 2.6 compatibility
Fix assert for python 2.6 compatibility
Python
bsd-3-clause
Xender/wtforms,Aaron1992/wtforms,jmagnusson/wtforms,pawl/wtforms,cklein/wtforms,pawl/wtforms,crast/wtforms,hsum/wtforms,Aaron1992/wtforms,skytreader/wtforms,wtforms/wtforms,subyraman/wtforms
--- +++ @@ -26,7 +26,7 @@ form = self.F() assert ('en_US', 'en') in tcache - self.assertIs(form._get_translations(), tcache[('en_US', 'en')]) + assert form._get_translations() is tcache[('en_US', 'en')] assert not form.validate() self.assertEqual(form.a.errors[0], 'This field is required.')