commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
9904e3843b2efca908845d57033b13f35c2e2a4d
st2auth_pam_backend/__init__.py
st2auth_pam_backend/__init__.py
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pam_backend import PAMAuthenticationBackend __all__ = [ 'PAMAuthenticationBackend' ] __version__ = '0.1.0'
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from .pam_backend import PAMAuthenticationBackend __all__ = [ 'PAMAuthenticationBackend' ] __version__ = '0.1.0'
Fix code so import works under Python 3.
Fix code so import works under Python 3.
Python
apache-2.0
StackStorm/st2-auth-backend-pam,StackStorm/st2-auth-backend-pam
--- +++ @@ -13,7 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from pam_backend import PAMAuthenticationBackend +from __future__ import absolute_import + +from .pam_backend import PAMAuthenticationBackend __all__ = [ 'PAMAuthenticationBackend'
4b7466e3798dea0b3edf94c1e5cc376ba7615d2f
events/models.py
events/models.py
from django.db import models from django.conf import settings # Create your models here. #Events : # Des users peuvent participer à un event # Les gens peuvnet être "intéressés" # Utiliser https://github.com/thoas/django-sequere ? # API hackeragenda class Event(models.Model): STATUS_CHOICES = ( ("i", "in preparation"), ("r", "ready"), ("p", "planned"), ("j", "just an idea"), ) place = models.CharField(max_length=300) start = models.DateTimeField() stop = models.DateTimeField() title = models.CharField(max_length=300) status = models.CharField(max_length=1, choices=STATUS_CHOICES) organizer = models.ForeignKey(settings.AUTH_USER_MODEL)
from django.db import models from django.conf import settings # Create your models here. #Events : # Des users peuvent participer à un event # Les gens peuvnet être "intéressés" # Utiliser https://github.com/thoas/django-sequere ? # API hackeragenda class Event(models.Model): STATUS_CHOICES = ( ("i", "in preparation"), ("r", "ready"), ("p", "planned"), ("j", "just an idea"), ) place = models.CharField(max_length=300) start = models.DateTimeField() stop = models.DateTimeField() title = models.CharField(max_length=300) status = models.CharField(max_length=1, choices=STATUS_CHOICES) organizer = models.ForeignKey(settings.AUTH_USER_MODEL) description = models.TextField()
Add a description to an event
[add] Add a description to an event
Python
agpl-3.0
UrLab/incubator,UrLab/incubator,UrLab/incubator,UrLab/incubator
--- +++ @@ -21,3 +21,5 @@ title = models.CharField(max_length=300) status = models.CharField(max_length=1, choices=STATUS_CHOICES) organizer = models.ForeignKey(settings.AUTH_USER_MODEL) + description = models.TextField() +
ca4d5ac415c16594afff5e8c39c732f58e1e3de2
recommender/__init__.py
recommender/__init__.py
from .similarity_measure import ( cosine, euclidean_distance, pearson_correlation ) from .similar_item import ( find_similar_item, preferance_space_transform, user_match ) __all__=[ 'dataHandle', 'recommenderEngine', 'similarItem', 'similarityMeasure' ] __version__ = '1.0.0'
from .similarity_measure import ( cosine, euclidean_distance, pearson_correlation ) from .similar_item import ( find_similar_item, preference_space_transform, user_match ) __all__=[ 'dataHandle', 'recommenderEngine', 'similarItem', 'similarityMeasure' ] __version__ = '1.0.0'
Update function name to correct spelling
Update function name to correct spelling Signed-off-by: Tran Ly Vu <0555cc0f3d5a46ac8c0e84ddf31443494c66bd55@gmail.com>
Python
apache-2.0
tranlyvu/recommender
--- +++ @@ -6,7 +6,7 @@ from .similar_item import ( find_similar_item, - preferance_space_transform, + preference_space_transform, user_match )
b5acf414e9fcbecee8da15e2757a60ce10cc5c10
examples/last.py
examples/last.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from pymisp import PyMISP from keys import misp_url, misp_key import argparse import os import json # Usage for pipe masters: ./last.py -l 5h | jq . def init(url, key): return PyMISP(url, key, True, 'json') def download_last(m, last, out=None): result = m.download_last(last) if out is None: for e in result['response']: print(json.dumps(e) + '\n') else: with open(out, 'w') as f: for e in result['response']: f.write(json.dumps(e) + '\n') if __name__ == '__main__': parser = argparse.ArgumentParser(description='Download latest events from a MISP instance.') parser.add_argument("-l", "--last", required=True, help="can be defined in days, hours, minutes (for example 5d or 12h or 30m).") parser.add_argument("-o", "--output", help="Output file") args = parser.parse_args() if args.output is not None and os.path.exists(args.output): print('Output file already exists, abord.') exit(0) misp = init(misp_url, misp_key) download_last(misp, args.last, args.output)
#!/usr/bin/env python # -*- coding: utf-8 -*- from pymisp import PyMISP from keys import misp_url, misp_key import argparse import os import json # Usage for pipe masters: ./last.py -l 5h | jq . def init(url, key): return PyMISP(url, key, True, 'json') def download_last(m, last, out=None): result = m.download_last(last) if out is None: if 'response' in result: for e in result['response']: print(json.dumps(e) + '\n') else: print('No results for that time period') exit(0) else: with open(out, 'w') as f: for e in result['response']: f.write(json.dumps(e) + '\n') if __name__ == '__main__': parser = argparse.ArgumentParser(description='Download latest events from a MISP instance.') parser.add_argument("-l", "--last", required=True, help="can be defined in days, hours, minutes (for example 5d or 12h or 30m).") parser.add_argument("-o", "--output", help="Output file") args = parser.parse_args() if args.output is not None and os.path.exists(args.output): print('Output file already exists, abord.') exit(0) misp = init(misp_url, misp_key) download_last(misp, args.last, args.output)
Fix KeyError when no results in time period
Fix KeyError when no results in time period Fix a KeyError when no results were found for the specified time period.
Python
bsd-2-clause
pombredanne/PyMISP,iglocska/PyMISP
--- +++ @@ -18,8 +18,12 @@ def download_last(m, last, out=None): result = m.download_last(last) if out is None: - for e in result['response']: - print(json.dumps(e) + '\n') + if 'response' in result: + for e in result['response']: + print(json.dumps(e) + '\n') + else: + print('No results for that time period') + exit(0) else: with open(out, 'w') as f: for e in result['response']:
af21288fb4245fc56a0b182331cd4db724e05e62
app/accounts/admin.py
app/accounts/admin.py
from django.contrib import admin from .models import UserProfile admin.site.register(UserProfile)
from django.contrib import admin from .models import UserProfile @admin.register(UserProfile) class UserProfileAdmin(admin.ModelAdmin): fieldsets = [ ('User Profile', { 'fields': ('user', 'custom_auth_id', 'facebook_oauth_id', 'google_oauth_id', 'twitter_oauth_id',), 'description': 'This holds extra optional information about admin users.' }), ]
Add description for Userprofile model
Add description for Userprofile model
Python
mit
teamtaverna/core
--- +++ @@ -3,4 +3,12 @@ from .models import UserProfile -admin.site.register(UserProfile) +@admin.register(UserProfile) +class UserProfileAdmin(admin.ModelAdmin): + fieldsets = [ + ('User Profile', { + 'fields': ('user', 'custom_auth_id', 'facebook_oauth_id', + 'google_oauth_id', 'twitter_oauth_id',), + 'description': 'This holds extra optional information about admin users.' + }), + ]
4d2ef07c64603e99f05f2233382dc2a7c5bff5ba
website/members/tests.py
website/members/tests.py
from django.contrib.auth.models import User from django.test import TestCase from datetime import datetime from members.models import Member class MemberTest(TestCase): def setUp(self): self.user = User.objects.create(username='test') self.member = Member.objects.create(user=self.user) self.assertEqual(1, Member.objects.count()) def test_delete_cascade(self): self.user.delete() self.assertEqual( 0, Member.objects.count(), 'Deleting a user deletes the member information.' ) def test_user_link(self): self.assertEqual( self.user, self.member.user, 'Members are linked to a user object.' ) def test_print_person_number(self): self.member.birthday = datetime.strptime('09/07/1999', '%d/%m/%Y') self.member.person_number_ext = '1234' self.assertEqual( '19990709-1234', self.member.person_number(), 'Person numbers are printed as \'(year)(month)(day)-(ext)\'.' )
from django.contrib.auth.models import User from django.test import TestCase from datetime import datetime from members.models import Member, StudyProgram class MemberTest(TestCase): def setUp(self): self.user = User.objects.create(username='test') self.member = Member.objects.create(user=self.user) self.assertEqual(1, Member.objects.count()) def test_delete_cascade(self): self.user.delete() self.assertEqual( 0, Member.objects.count(), 'Deleting a user deletes the member information.' ) def test_user_link(self): self.assertEqual( self.user, self.member.user, 'Members are linked to a user object.' ) def test_print_person_number(self): self.member.birthday = datetime.strptime('09/07/1999', '%d/%m/%Y') self.member.person_number_ext = '1234' self.assertEqual( '19990709-1234', self.member.person_number(), 'Person numbers are printed as \'(year)(month)(day)-(ext)\'.' ) def test_study_deletion(self): study = StudyProgram.objects.create(name='subject') self.member.study = study self.member.save() study.delete() self.member.refresh_from_db() self.assertEqual( None, self.member.study, 'Deleting a study program resets the study for the members' )
Add test for StudyProgram deletion
:green_heart: Add test for StudyProgram deletion
Python
agpl-3.0
Dekker1/moore,UTNkar/moore,Dekker1/moore,UTNkar/moore,Dekker1/moore,UTNkar/moore,Dekker1/moore,UTNkar/moore
--- +++ @@ -2,7 +2,7 @@ from django.test import TestCase from datetime import datetime -from members.models import Member +from members.models import Member, StudyProgram class MemberTest(TestCase): @@ -31,3 +31,14 @@ '19990709-1234', self.member.person_number(), 'Person numbers are printed as \'(year)(month)(day)-(ext)\'.' ) + + def test_study_deletion(self): + study = StudyProgram.objects.create(name='subject') + self.member.study = study + self.member.save() + study.delete() + self.member.refresh_from_db() + self.assertEqual( + None, self.member.study, + 'Deleting a study program resets the study for the members' + )
57b45988072cdc57d90ea11d673b283a5473cd14
routes.py
routes.py
from flask import Flask, render_template, request from models import db from forms import SignupForm app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://localhost/strabo' db.init_app(app) app.secret_key = "" @app.route("/") def index(): return render_template("index.html") @app.route("/about") def about(): return render_template("about.html") @app.route("/signup", methods=["GET", "POST"]) def signup(): form = SignupForm() if request.method == "POST": if form.validate() == False: return render_template('signup.html', form=form) else: return "Success!" elif request.method == "GET": return render_template('signup.html', form=form) if __name__ == "__main__": app.run(debug=True)
from flask import Flask, render_template, request from models import db, User from forms import SignupForm app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://localhost/strabo' db.init_app(app) app.secret_key = "" @app.route("/") def index(): return render_template("index.html") @app.route("/about") def about(): return render_template("about.html") @app.route("/signup", methods=["GET", "POST"]) def signup(): form = SignupForm() if request.method == "POST": if form.validate() == False: return render_template('signup.html', form=form) else: newuser = User(form.first_name.data, form.last_name.data, form.email.data, form.password.data) db.session.add(newuser) db.session.commit() return 'Success!' elif request.method == "GET": return render_template('signup.html', form=form) if __name__ == "__main__": app.run(debug=True)
Add new user form data to db
Add new user form data to db
Python
apache-2.0
cristobal23/strabo,cristobal23/strabo
--- +++ @@ -1,5 +1,5 @@ from flask import Flask, render_template, request -from models import db +from models import db, User from forms import SignupForm app = Flask(__name__) @@ -25,7 +25,10 @@ if form.validate() == False: return render_template('signup.html', form=form) else: - return "Success!" + newuser = User(form.first_name.data, form.last_name.data, form.email.data, form.password.data) + db.session.add(newuser) + db.session.commit() + return 'Success!' elif request.method == "GET": return render_template('signup.html', form=form)
019c13489eceb315f7a0edb72296f32c35339d93
joulupukki/api/controllers/v3/v3.py
joulupukki/api/controllers/v3/v3.py
import importlib import pecan from joulupukki.api.controllers.v3.users import UsersController from joulupukki.api.controllers.v3.projects import ProjectsController from joulupukki.api.controllers.v3.stats import StatsController from joulupukki.api.controllers.v3.auth import AuthController authcontroller = importlib.import_module('joulupukki.api.controllers.v3.' + pecan.conf.auth) class V3Controller(object): """Version 3 API controller root.""" users = UsersController() projects = ProjectsController() stats = StatsController() auth = AuthController() # Handle github and gitlab auth if pecan.conf.auth is not None: try: externalservice = importlib.import_module('joulupukki.api.controllers.v3.' + pecan.conf.auth).ExternalServiceController() except Exception as exp: #TODO print(exp) pass
import importlib import pecan from joulupukki.api.controllers.v3.users import UsersController from joulupukki.api.controllers.v3.projects import ProjectsController from joulupukki.api.controllers.v3.stats import StatsController from joulupukki.api.controllers.v3.auth import AuthController class V3Controller(object): """Version 3 API controller root.""" users = UsersController() projects = ProjectsController() stats = StatsController() auth = AuthController() # Handle github and gitlab auth if pecan.conf.auth is not None: try: externalservice = importlib.import_module('joulupukki.api.controllers.v3.' + pecan.conf.auth).ExternalServiceController() except Exception as exp: #TODO print(exp) pass
Fix api starting with auth set
Fix api starting with auth set
Python
agpl-3.0
jlpk/joulupukki-api
--- +++ @@ -6,8 +6,6 @@ from joulupukki.api.controllers.v3.projects import ProjectsController from joulupukki.api.controllers.v3.stats import StatsController from joulupukki.api.controllers.v3.auth import AuthController - -authcontroller = importlib.import_module('joulupukki.api.controllers.v3.' + pecan.conf.auth) class V3Controller(object): """Version 3 API controller root."""
4257381997e8ac6968713f1bad96019f977bafc9
server.py
server.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import tweepy, time, sys, os from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('secrets.cfg') #enter the corresponding information from your Twitter application: CONSUMER_KEY = parser.get('bug_tracker', 'CONSUMER_KEY') CONSUMER_SECRET = parser.get('bug_tracker', 'CONSUMER_SECRET') ACCESS_KEY = parser.get('bug_tracker', 'ACCESS_KEY') ACCESS_SECRET = parser.get('bug_tracker', 'ACCESS_SECRET') auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) api = tweepy.API(auth) line = "Test tweet!" api.update_status(line)
#!/usr/bin/env python # -*- coding: utf-8 -*- import tweepy, time, sys, os from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('secrets.cfg') #enter the corresponding information from your Twitter application: CONSUMER_KEY = parser.get('Twitter', 'CONSUMER_KEY') CONSUMER_SECRET = parser.get('Twitter', 'CONSUMER_SECRET') ACCESS_KEY = parser.get('Twitter', 'ACCESS_KEY') ACCESS_SECRET = parser.get('Twitter', 'ACCESS_SECRET') auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) api = tweepy.API(auth) line = "Test tweet!" api.update_status(line)
Fix config parsing. Tweeting works
Fix config parsing. Tweeting works
Python
mit
premgane/agolo-twitterbot,premgane/agolo-twitterbot
--- +++ @@ -8,10 +8,10 @@ parser.read('secrets.cfg') #enter the corresponding information from your Twitter application: -CONSUMER_KEY = parser.get('bug_tracker', 'CONSUMER_KEY') -CONSUMER_SECRET = parser.get('bug_tracker', 'CONSUMER_SECRET') -ACCESS_KEY = parser.get('bug_tracker', 'ACCESS_KEY') -ACCESS_SECRET = parser.get('bug_tracker', 'ACCESS_SECRET') +CONSUMER_KEY = parser.get('Twitter', 'CONSUMER_KEY') +CONSUMER_SECRET = parser.get('Twitter', 'CONSUMER_SECRET') +ACCESS_KEY = parser.get('Twitter', 'ACCESS_KEY') +ACCESS_SECRET = parser.get('Twitter', 'ACCESS_SECRET') auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
9f512fd6f3c7d2928c66062002b18b7bb13a5653
linter.py
linter.py
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Jon LaBelle # Copyright (c) 2018 Jon LaBelle # # License: MIT # """This module exports the Markdownlint plugin class.""" from SublimeLinter.lint import NodeLinter, util class Markdownlint(NodeLinter): """Provides an interface to markdownlint.""" syntax = ('markdown', 'markdown gfm', 'multimarkdown', 'markdown extended') cmd = 'markdownlint' npm_name = 'markdownlint-cli' version_args = '--version' version_re = r'(?P<version>\d+\.\d+\.\d+)' version_requirement = '>= 0.6.0' check_version = True regex = r'.+?[:]\s(?P<line>\d+)[:]\s(?P<error>MD\d+)?[/]?(?P<message>.+)' multiline = False line_col_base = (1, 1) tempfile_suffix = 'md' error_stream = util.STREAM_STDERR selectors = {} word_re = None defaults = {} inline_settings = None inline_overrides = None comment_re = r'\s*/[/*]' config_file = ('--config', '.markdownlintrc', '~')
# # linter.py # Markdown Linter for SublimeLinter, a code checking framework # for Sublime Text 3 # # Written by Jon LaBelle # Copyright (c) 2018 Jon LaBelle # # License: MIT # """This module exports the Markdownlint plugin class.""" from SublimeLinter.lint import NodeLinter, util class Markdownlint(NodeLinter): """Provides an interface to markdownlint.""" syntax = ('markdown', 'markdown gfm', 'multimarkdown', 'markdown extended') cmd = 'markdownlint' npm_name = 'markdownlint-cli' version_args = '--version' version_re = r'(?P<version>\d+\.\d+\.\d+)' version_requirement = '>= 0.6.0' check_version = True regex = r'.+?[:]\s(?P<line>\d+)[:]\s(?P<error>MD\d+)?[/]?(?P<message>.+)' multiline = False line_col_base = (1, 1) tempfile_suffix = 'md' error_stream = util.STREAM_STDERR selectors = {} word_re = None defaults = {} inline_settings = None inline_overrides = None comment_re = r'\s*/[/*]' config_file = ('--config', '.markdownlintrc', '~')
Remove the "3" from SublimeLinter3
Remove the "3" from SublimeLinter3
Python
mit
jonlabelle/SublimeLinter-contrib-markdownlint,jonlabelle/SublimeLinter-contrib-markdownlint
--- +++ @@ -1,6 +1,7 @@ # # linter.py -# Linter for SublimeLinter3, a code checking framework for Sublime Text 3 +# Markdown Linter for SublimeLinter, a code checking framework +# for Sublime Text 3 # # Written by Jon LaBelle # Copyright (c) 2018 Jon LaBelle
5decd7e68c6454e455bc1debe232ea37f7260c58
mixins.py
mixins.py
class DepthSerializerMixin(object): """Custom method 'get_serializer_class', set attribute 'depth' based on query parameter in the url""" def get_serializer_class(self): serializer_class = self.serializer_class query_params = self.request.QUERY_PARAMS depth = query_params.get('__depth', None) serializer_class.Meta.depth = int(depth) if(depth != None and depth.isdigit()) else 0 return serializer_class
class DepthSerializerMixin(object): """Custom method 'get_serializer_class', set attribute 'depth' based on query parameter in the url""" def get_serializer_class(self, *args, **kwargs): serializer_class = super(DepthSerializerMixin, self).get_serializer_class(*args, **kwargs) query_params = self.request.QUERY_PARAMS depth = query_params.get('__depth', None) serializer_class.Meta.depth = int(depth) if(depth != None and depth.isdigit()) else 0 return serializer_class
Call method 'get_serializer_class' of the Class parent
Call method 'get_serializer_class' of the Class parent
Python
mit
krescruz/depth-serializer-mixin
--- +++ @@ -1,8 +1,8 @@ class DepthSerializerMixin(object): """Custom method 'get_serializer_class', set attribute 'depth' based on query parameter in the url""" - def get_serializer_class(self): - serializer_class = self.serializer_class + def get_serializer_class(self, *args, **kwargs): + serializer_class = super(DepthSerializerMixin, self).get_serializer_class(*args, **kwargs) query_params = self.request.QUERY_PARAMS depth = query_params.get('__depth', None) serializer_class.Meta.depth = int(depth) if(depth != None and depth.isdigit()) else 0
bed9e520a371a99132e05511f110a141d22d2a7f
tests/integration/test_proxy.py
tests/integration/test_proxy.py
# -*- coding: utf-8 -*- '''Test using a proxy.''' # External imports import multiprocessing import SocketServer import SimpleHTTPServer import pytest requests = pytest.importorskip("requests") from six.moves.urllib.request import urlopen # Internal imports import vcr class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler): ''' Simple proxy server. (from: http://effbot.org/librarybook/simplehttpserver.htm). ''' def do_GET(self): self.copyfile(urlopen(self.path), self.wfile) @pytest.yield_fixture(scope='session') def proxy_server(httpbin): httpd = SocketServer.ForkingTCPServer(('', 0), Proxy) proxy_process = multiprocessing.Process( target=httpd.serve_forever, ) proxy_process.start() yield 'http://{}:{}'.format(*httpd.server_address) proxy_process.terminate() def test_use_proxy(tmpdir, httpbin, proxy_server): '''Ensure that it works with a proxy.''' with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))): requests.get(httpbin.url, proxies={'http': proxy_server}) requests.get(httpbin.url, proxies={'http': proxy_server})
# -*- coding: utf-8 -*- '''Test using a proxy.''' # External imports import multiprocessing import pytest requests = pytest.importorskip("requests") from six.moves import socketserver, SimpleHTTPServer from six.moves.urllib.request import urlopen # Internal imports import vcr class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler): ''' Simple proxy server. (from: http://effbot.org/librarybook/simplehttpserver.htm). ''' def do_GET(self): self.copyfile(urlopen(self.path), self.wfile) @pytest.yield_fixture(scope='session') def proxy_server(httpbin): httpd = socketserver.ForkingTCPServer(('', 0), Proxy) proxy_process = multiprocessing.Process( target=httpd.serve_forever, ) proxy_process.start() yield 'http://{}:{}'.format(*httpd.server_address) proxy_process.terminate() def test_use_proxy(tmpdir, httpbin, proxy_server): '''Ensure that it works with a proxy.''' with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))): requests.get(httpbin.url, proxies={'http': proxy_server}) requests.get(httpbin.url, proxies={'http': proxy_server})
Fix `socketserver` for Python 3
Fix `socketserver` for Python 3
Python
mit
graingert/vcrpy,kevin1024/vcrpy,graingert/vcrpy,kevin1024/vcrpy
--- +++ @@ -3,11 +3,10 @@ # External imports import multiprocessing -import SocketServer -import SimpleHTTPServer import pytest requests = pytest.importorskip("requests") +from six.moves import socketserver, SimpleHTTPServer from six.moves.urllib.request import urlopen # Internal imports @@ -26,7 +25,7 @@ @pytest.yield_fixture(scope='session') def proxy_server(httpbin): - httpd = SocketServer.ForkingTCPServer(('', 0), Proxy) + httpd = socketserver.ForkingTCPServer(('', 0), Proxy) proxy_process = multiprocessing.Process( target=httpd.serve_forever, )
ff618ea57b8f3d71772bcef5f7fecf9eceae4e3d
scripts/upsrv_schema.py
scripts/upsrv_schema.py
#!/usr/bin/python # Copyright (c) 2006 rPath, Inc # All rights reserved import sys import os import pwd from conary.server import schema from conary.lib import cfgtypes from conary.repository.netrepos.netserver import ServerConfig from conary import dbstore cnrPath = '/srv/conary/repository.cnr' cfg = ServerConfig() try: cfg.read(cnrPath) except cfgtypes.CfgEnvironmentError: print "Error reading %s" % cnrPath sys.exit(1) db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0]) schema.loadSchema(db, doMigrate=True) if cfg.repositoryDB[0] == 'sqlite': os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2], pwd.getpwnam('apache')[3])
#!/usr/bin/python # Copyright (c) 2006 rPath, Inc # All rights reserved import sys import os import pwd from conary.server import schema from conary.lib import cfgtypes, tracelog from conary.repository.netrepos.netserver import ServerConfig from conary import dbstore cnrPath = '/srv/conary/repository.cnr' cfg = ServerConfig() tracelog.initLog(filename='stdout', level=2) try: cfg.read(cnrPath) except cfgtypes.CfgEnvironmentError: print "Error reading %s" % cnrPath sys.exit(1) db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0]) schema.loadSchema(db, doMigrate=True) if cfg.repositoryDB[0] == 'sqlite': os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2], pwd.getpwnam('apache')[3])
Set log level to 2 when migrating so there is some indication it is running
Set log level to 2 when migrating so there is some indication it is running
Python
apache-2.0
sassoftware/rbm,sassoftware/rbm,sassoftware/rbm
--- +++ @@ -7,13 +7,15 @@ import os import pwd from conary.server import schema -from conary.lib import cfgtypes +from conary.lib import cfgtypes, tracelog from conary.repository.netrepos.netserver import ServerConfig from conary import dbstore cnrPath = '/srv/conary/repository.cnr' cfg = ServerConfig() + +tracelog.initLog(filename='stdout', level=2) try: cfg.read(cnrPath)
85e77bc7a4706ed1b25d4d53e71ca22beafed411
sidertests/test_sider.py
sidertests/test_sider.py
import doctest import os def test_doctest_types(): from sider import types assert 0 == doctest.testmod(types)[0] def test_doctest_datetime(): from sider import datetime assert 0 == doctest.testmod(datetime)[0] exttest_count = 0 def test_ext(): from sider.ext import _exttest assert _exttest.ext_loaded == 'yes' assert exttest_count == 1 from sider import ext assert ext._exttest is _exttest try: import sider.ext._noexttest except ImportError as e: assert str(e) == "No module named 'sider.ext._noexttest'" else: assert False, 'it must raise ImportError' def test_print_version(): from sider.version import VERSION printed_version = os.popen('python -m sider.version').read().strip() assert printed_version == VERSION
import os exttest_count = 0 def test_ext(): from sider.ext import _exttest assert _exttest.ext_loaded == 'yes' assert exttest_count == 1 from sider import ext assert ext._exttest is _exttest try: import sider.ext._noexttest except ImportError as e: assert str(e) == "No module named 'sider.ext._noexttest'" else: assert False, 'it must raise ImportError' def test_print_version(): from sider.version import VERSION printed_version = os.popen('python -m sider.version').read().strip() assert printed_version == VERSION
Drop useless tests that invoking doctests
Drop useless tests that invoking doctests
Python
mit
longfin/sider,dahlia/sider,longfin/sider
--- +++ @@ -1,15 +1,4 @@ -import doctest import os - - -def test_doctest_types(): - from sider import types - assert 0 == doctest.testmod(types)[0] - - -def test_doctest_datetime(): - from sider import datetime - assert 0 == doctest.testmod(datetime)[0] exttest_count = 0
7b13749feea6c798fb6221ae78ba89033fbd2c45
tests/test_actions/test_init.py
tests/test_actions/test_init.py
from tests.test_actions import * from ltk import actions, exceptions import unittest class TestInitAction(unittest.TestCase): def test_uninitialized(self): # todo create dir outside so folder not initialized self.assertRaises(exceptions.UninitializedError, actions.Action, os.getcwd()) def test_init_host(self): create_config() action = actions.Action(os.getcwd()) assert action.host cleanup() def test_init_access_token(self): create_config() action = actions.Action(os.getcwd()) assert action.access_token cleanup() def test_init_project_id(self): create_config() action = actions.Action(os.getcwd()) assert action.project_id cleanup() def test_init_path(self): create_config() action = actions.Action(os.getcwd()) assert action.path cleanup() def test_init_community_id(self): create_config() action = actions.Action(os.getcwd()) assert action.community_id cleanup() def test_init_api(self): create_config() action = actions.Action(os.getcwd()) assert action.api cleanup() def test_init_doc_manager(self): create_config() action = actions.Action(os.getcwd()) assert action.doc_manager cleanup() # if __name__ == '__main__': # unittest.main()
from tests.test_actions import * from ltk import actions, exceptions import unittest class TestInitAction(unittest.TestCase): def test_uninitialized(self): # todo create dir outside so folder not initialized os.chdir('/') self.assertRaises(exceptions.UninitializedError, actions.Action, os.getcwd()) def test_init_host(self): create_config() action = actions.Action(os.getcwd()) assert action.host cleanup() def test_init_access_token(self): create_config() action = actions.Action(os.getcwd()) assert action.access_token cleanup() def test_init_project_id(self): create_config() action = actions.Action(os.getcwd()) assert action.project_id cleanup() def test_init_path(self): create_config() action = actions.Action(os.getcwd()) assert action.path cleanup() def test_init_community_id(self): create_config() action = actions.Action(os.getcwd()) assert action.community_id cleanup() def test_init_api(self): create_config() action = actions.Action(os.getcwd()) assert action.api cleanup() def test_init_doc_manager(self): create_config() action = actions.Action(os.getcwd()) assert action.doc_manager cleanup() if __name__ == '__main__': unittest.main()
Change directory to test uninitialized project
Change directory to test uninitialized project
Python
mit
Lingotek/translation-utility,Lingotek/translation-utility,Lingotek/client,Lingotek/filesystem-connector,Lingotek/client,Lingotek/filesystem-connector
--- +++ @@ -6,6 +6,7 @@ def test_uninitialized(self): # todo create dir outside so folder not initialized + os.chdir('/') self.assertRaises(exceptions.UninitializedError, actions.Action, os.getcwd()) def test_init_host(self): @@ -50,5 +51,5 @@ assert action.doc_manager cleanup() -# if __name__ == '__main__': -# unittest.main() +if __name__ == '__main__': + unittest.main()
1010cb2c4a4930254e2586949314aa0bb6b89b3d
tests/test_solver_constraint.py
tests/test_solver_constraint.py
import pytest from gaphas.solver import Constraint, MultiConstraint, Variable @pytest.fixture def handler(): events = [] def handler(e): events.append(e) handler.events = events # type: ignore[attr-defined] return handler def test_constraint_propagates_variable_changed(handler): v = Variable() c = Constraint(v) c.add_handler(handler) v.value = 3 assert handler.events == [c] def test_multi_constraint(handler): v = Variable() c = Constraint(v) m = MultiConstraint(c) m.add_handler(handler) v.value = 3 assert handler.events == [c]
import pytest from gaphas.solver import Constraint, MultiConstraint, Variable @pytest.fixture def handler(): events = [] def handler(e): events.append(e) handler.events = events # type: ignore[attr-defined] return handler def test_constraint_propagates_variable_changed(handler): v = Variable() c = Constraint(v) c.add_handler(handler) v.value = 3 assert handler.events == [c] def test_multi_constraint(handler): v = Variable() c = Constraint(v) m = MultiConstraint(c) m.add_handler(handler) v.value = 3 assert handler.events == [c] def test_default_constraint_can_not_solve(): v = Variable() c = Constraint(v) with pytest.raises(NotImplementedError): c.solve()
Test default case for constraint.solve()
Test default case for constraint.solve()
Python
lgpl-2.1
amolenaar/gaphas
--- +++ @@ -33,3 +33,11 @@ v.value = 3 assert handler.events == [c] + + +def test_default_constraint_can_not_solve(): + v = Variable() + c = Constraint(v) + + with pytest.raises(NotImplementedError): + c.solve()
2e040a77b70b4a07227f5aa3cb3aee6b8c84f4e0
src/livedumper/common.py
src/livedumper/common.py
"Common functions that may be used everywhere" from __future__ import print_function import os import sys from distutils.util import strtobool def yes_no_query(question): """Ask the user *question* for 'yes' or 'no'; ask again until user inputs a valid option. Returns: 'True' if user answered 'y', 'yes', 't', 'true', 'on' or '1'. 'False' if user answered 'n', 'no', 'f', 'false', 'off' or '0'. """ print("{} (y/n)".format(question), end=" "), while True: try: return strtobool(input().lower()) except ValueError: print("Please respond with 'y' or 'n'.") def ask_overwrite(dest): """Check if file *dest* exists. If 'True', asks if the user wants to overwrite it (just remove the file for later overwrite). """ msg = "File '{}' already exists. Overwrite file?".format(dest) if os.path.exists(dest): if yes_no_query(msg): os.remove(dest) else: sys.exit("Cancelling operation...")
"Common functions that may be used everywhere" from __future__ import print_function import os import sys from distutils.util import strtobool try: input = raw_input except NameError: pass def yes_no_query(question): """Ask the user *question* for 'yes' or 'no'; ask again until user inputs a valid option. Returns: 'True' if user answered 'y', 'yes', 't', 'true', 'on' or '1'. 'False' if user answered 'n', 'no', 'f', 'false', 'off' or '0'. """ print("{} (y/n)".format(question), end=" "), while True: try: return strtobool(input().lower()) except ValueError: print("Please respond with 'y' or 'n'.") def ask_overwrite(dest): """Check if file *dest* exists. If 'True', asks if the user wants to overwrite it (just remove the file for later overwrite). """ msg = "File '{}' already exists. Overwrite file?".format(dest) if os.path.exists(dest): if yes_no_query(msg): os.remove(dest) else: sys.exit("Cancelling operation...")
Fix Python 2 compatibility, again
Fix Python 2 compatibility, again
Python
bsd-2-clause
m45t3r/livedumper
--- +++ @@ -5,6 +5,11 @@ import os import sys from distutils.util import strtobool + +try: + input = raw_input +except NameError: + pass def yes_no_query(question):
a76101c9ad416323b9379d48adb61c804a5454c0
localized_fields/admin.py
localized_fields/admin.py
from . import widgets from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \ LocalizedFileField FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = { LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget}, LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget}, LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget}, LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget}, } class LocalizedFieldsAdminMixin: """Mixin for making the fancy widgets work in Django Admin.""" class Media: css = { 'all': ( 'localized_fields/localized-fields-admin.css', ) } js = ( 'localized_fields/localized-fields-admin.js', ) def __init__(self, *args, **kwargs): """Initializes a new instance of :see:LocalizedFieldsAdminMixin.""" super().__init__(*args, **kwargs) overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy() overrides.update(self.formfield_overrides) self.formfield_overrides = overrides
from django.contrib.admin import ModelAdmin from . import widgets from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \ LocalizedFileField FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = { LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget}, LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget}, LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget}, LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget}, } class LocalizedFieldsAdminMixin(ModelAdmin): """Mixin for making the fancy widgets work in Django Admin.""" class Media: css = { 'all': ( 'localized_fields/localized-fields-admin.css', ) } js = ( 'localized_fields/localized-fields-admin.js', ) def __init__(self, *args, **kwargs): """Initializes a new instance of :see:LocalizedFieldsAdminMixin.""" super().__init__(*args, **kwargs) overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy() overrides.update(self.formfield_overrides) self.formfield_overrides = overrides
Fix LocalizedFieldsAdminMixin not having a base class
Fix LocalizedFieldsAdminMixin not having a base class This was a breaking change and broke a lot of projects.
Python
mit
SectorLabs/django-localized-fields,SectorLabs/django-localized-fields,SectorLabs/django-localized-fields
--- +++ @@ -1,3 +1,5 @@ +from django.contrib.admin import ModelAdmin + from . import widgets from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \ LocalizedFileField @@ -11,7 +13,7 @@ } -class LocalizedFieldsAdminMixin: +class LocalizedFieldsAdminMixin(ModelAdmin): """Mixin for making the fancy widgets work in Django Admin.""" class Media:
8ccffcf02cd5ba8352bc8182d7be13ea015332ca
plinth/utils.py
plinth/utils.py
# # This file is part of Plinth. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ Miscelleneous utility method. """ import importlib def import_from_gi(library, version): """Import and return a GObject introspection library.""" try: import gi as package package_name = 'gi' except ImportError: import pgi as package package_name = 'pgi' package.require_version(library, version) return importlib.import_module(package_name + '.repository.' + library)
# # This file is part of Plinth. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ Miscelleneous utility method. """ import importlib from django.utils.functional import lazy def import_from_gi(library, version): """Import and return a GObject introspection library.""" try: import gi as package package_name = 'gi' except ImportError: import pgi as package package_name = 'pgi' package.require_version(library, version) return importlib.import_module(package_name + '.repository.' + library) def _format_lazy(string, *args, **kwargs): """Lazily format a lazy string.""" string = str(string) return string.format(*args, **kwargs) format_lazy = lazy(_format_lazy, str)
Add utility method to lazy format lazy string
Add utility method to lazy format lazy string This method is useful to format strings that are lazy (such as those in Forms).
Python
agpl-3.0
freedomboxtwh/Plinth,harry-7/Plinth,kkampardi/Plinth,vignanl/Plinth,vignanl/Plinth,freedomboxtwh/Plinth,kkampardi/Plinth,harry-7/Plinth,harry-7/Plinth,freedomboxtwh/Plinth,vignanl/Plinth,freedomboxtwh/Plinth,kkampardi/Plinth,kkampardi/Plinth,freedomboxtwh/Plinth,vignanl/Plinth,harry-7/Plinth,harry-7/Plinth,kkampardi/Plinth,vignanl/Plinth
--- +++ @@ -20,6 +20,7 @@ """ import importlib +from django.utils.functional import lazy def import_from_gi(library, version): @@ -34,3 +35,12 @@ package.require_version(library, version) return importlib.import_module(package_name + '.repository.' + library) + + +def _format_lazy(string, *args, **kwargs): + """Lazily format a lazy string.""" + string = str(string) + return string.format(*args, **kwargs) + + +format_lazy = lazy(_format_lazy, str)
3313d611d7cc66bf607a341a5d9a6a5d96dfbec5
clowder_server/emailer.py
clowder_server/emailer.py
import os import requests from django.core.mail import send_mail from clowder_account.models import ClowderUser ADMIN_EMAIL = 'admin@clowder.io' def send_alert(company, name): for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True): subject = 'FAILURE: %s' % (name) body = subject if user.company_id == 86: slack_token = os.getenv('PARKME_SLACK_TOKEN') url = 'https://hooks.slack.com/services/%s' % (slack_token) payload = {"username": "devopsbot", "text": body, "icon_emoji": ":robot_face:"} requests.post(url, json=payload) send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
import os import requests from django.core.mail import send_mail from clowder_account.models import ClowderUser ADMIN_EMAIL = 'admin@clowder.io' def send_alert(company, name): slack_sent = False for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True): subject = 'FAILURE: %s' % (name) body = subject if user.company_id == 86 and not slack_sent: slack_token = os.getenv('PARKME_SLACK_TOKEN') url = 'https://hooks.slack.com/services/%s' % (slack_token) payload = {"username": "clowder", "text": body, "icon_emoji": ":clowder:"} requests.post(url, json=payload) slack_sent = True send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
Rename bot and prevent channel spamming
Rename bot and prevent channel spamming
Python
agpl-3.0
keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server
--- +++ @@ -7,12 +7,14 @@ ADMIN_EMAIL = 'admin@clowder.io' def send_alert(company, name): + slack_sent = False for user in ClowderUser.objects.filter(company=company, allow_email_notifications=True): subject = 'FAILURE: %s' % (name) body = subject - if user.company_id == 86: + if user.company_id == 86 and not slack_sent: slack_token = os.getenv('PARKME_SLACK_TOKEN') url = 'https://hooks.slack.com/services/%s' % (slack_token) - payload = {"username": "devopsbot", "text": body, "icon_emoji": ":robot_face:"} + payload = {"username": "clowder", "text": body, "icon_emoji": ":clowder:"} requests.post(url, json=payload) + slack_sent = True send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
a8823bdc00c83c72352985706f6503557540ae9d
src/ocspdash/web/wsgi.py
src/ocspdash/web/wsgi.py
# -*- coding: utf-8 -*- """This file should be used to run the flask app with something like Gunicorn. For example: gunicorn -b 0.0.0.0:8000 ocspdash.web.run:app This file should NOT be imported anywhere, though, since it would instantiate the app. """ from ocspdash.web import create_application app = create_application()
# -*- coding: utf-8 -*- """This file should be used to run the flask app with something like Gunicorn. For example: gunicorn -b 0.0.0.0:8000 ocspdash.web.wsgi:app This file should NOT be imported anywhere, though, since it would instantiate the app. """ from ocspdash.web import create_application app = create_application()
Update Charlie's stupid wrong documentation.
Update Charlie's stupid wrong documentation. I can't promise it's right now, though, cuz I didn't test it either. 🎉🎉🎉
Python
mit
scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash
--- +++ @@ -2,7 +2,7 @@ """This file should be used to run the flask app with something like Gunicorn. -For example: gunicorn -b 0.0.0.0:8000 ocspdash.web.run:app +For example: gunicorn -b 0.0.0.0:8000 ocspdash.web.wsgi:app This file should NOT be imported anywhere, though, since it would instantiate the app. """
aafcc59ef14fe5af39a06e87bc44546a9da56fb6
lazy_helpers.py
lazy_helpers.py
# Lazy objects, for the serializer to find them we put them here class LazyDriver(object): _driver = None @classmethod def get(cls): if cls._driver is None: from selenium import webdriver # Configure headless mode options = webdriver.ChromeOptions() #Oops options.add_argument('headless') cls._driver = webdriver.Chrome(chrome_options=options) return cls._driver class LazyPool(object): _pool = None @classmethod def get(cls): if cls._pool is None: import urllib3 cls._pool = urllib3.PoolManager() return cls._pool
# Lazy objects, for the serializer to find them we put them here class LazyDriver(object): _driver = None @classmethod def get(cls): import os if cls._driver is None: from selenium import webdriver # Configure headless mode options = webdriver.ChromeOptions() #Oops options.add_argument('headless') chrome_options.add_argument('--ignore-certificate-errors') chrome_uptions.add_argument("--logs /tmp/chromelogpanda{0}.log".format(os.getpid())) cls._driver = webdriver.Chrome(chrome_options=options) return cls._driver class LazyPool(object): _pool = None @classmethod def get(cls): if cls._pool is None: import urllib3 cls._pool = urllib3.PoolManager() return cls._pool
Add some more arguments for chrome driver
Add some more arguments for chrome driver
Python
apache-2.0
holdenk/diversity-analytics,holdenk/diversity-analytics
--- +++ @@ -5,11 +5,14 @@ @classmethod def get(cls): + import os if cls._driver is None: from selenium import webdriver # Configure headless mode options = webdriver.ChromeOptions() #Oops options.add_argument('headless') + chrome_options.add_argument('--ignore-certificate-errors') + chrome_uptions.add_argument("--logs /tmp/chromelogpanda{0}.log".format(os.getpid())) cls._driver = webdriver.Chrome(chrome_options=options) return cls._driver
92438a5450bc644f066a941efe16ec07cf3c129a
httoop/codecs/codec.py
httoop/codecs/codec.py
# -*- coding: utf-8 -*- from httoop.util import Unicode class Codec(object): @classmethod def decode(cls, data, charset=None, mimetype=None): # pragma: no cover if isinstance(data, bytes): data = data.decode(charset) if charset is not None else data.decode() @classmethod def encode(cls, data, charset=None, mimetype=None): # pragma: no cover if isinstance(data, Unicode): data = data.encode(charset) if charset is not None else data.encode() return data @classmethod def iterencode(cls, data, charset=None, mimetype=None): # pragma: no cover return cls.encode(data, charset, mimetype) @classmethod def iterdecode(cls, data, charset=None, mimetype=None): # pragma: no cover return cls.decode(data, charset, mimetype)
# -*- coding: utf-8 -*- from httoop.util import Unicode class Codec(object): @classmethod def decode(cls, data, charset=None, mimetype=None): # pragma: no cover if isinstance(data, bytes): data = data.decode(charset or 'ascii') return data @classmethod def encode(cls, data, charset=None, mimetype=None): # pragma: no cover if isinstance(data, Unicode): data = data.encode(charset or 'ascii') return data @classmethod def iterencode(cls, data, charset=None, mimetype=None): # pragma: no cover return cls.encode(data, charset, mimetype) @classmethod def iterdecode(cls, data, charset=None, mimetype=None): # pragma: no cover return cls.decode(data, charset, mimetype)
Make encoding and decoding strict
Make encoding and decoding strict * programmers must know what kind of data they use * don't guess encodings anymore
Python
mit
spaceone/httoop,spaceone/httoop,spaceone/httoop
--- +++ @@ -7,12 +7,13 @@ @classmethod def decode(cls, data, charset=None, mimetype=None): # pragma: no cover if isinstance(data, bytes): - data = data.decode(charset) if charset is not None else data.decode() + data = data.decode(charset or 'ascii') + return data @classmethod def encode(cls, data, charset=None, mimetype=None): # pragma: no cover if isinstance(data, Unicode): - data = data.encode(charset) if charset is not None else data.encode() + data = data.encode(charset or 'ascii') return data @classmethod
c7723ff6d7f43330786e84802ef0bacf70d4ba67
instatrace/commands.py
instatrace/commands.py
# Copyright (C) 2010 Peter Teichman import logging import os import sys import time from .stats import Histogram, Statistics log = logging.getLogger("instatrace") class HistogramsCommand: @classmethod def add_subparser(cls, parser): subparser = parser.add_parser("histograms", help="Stat histograms") subparser.add_argument("file", nargs="+") subparser.set_defaults(run=cls.run) @staticmethod def run(args): stats = Statistics() for filename in args.file: count = 0 fd = open(filename) for line in fd.xreadlines(): line = line.strip() stat = line.split(" ", 2) stats.add_sample(stat[0], int(stat[1])) fd.close() names = stats.statistics.keys() names.sort() for name in names: histogram = stats.statistics.get(name) histogram.text(sys.stdout)
# Copyright (C) 2010 Peter Teichman import logging import os import sys import time from .stats import Histogram, Statistics log = logging.getLogger("instatrace") class HistogramsCommand: @classmethod def add_subparser(cls, parser): subparser = parser.add_parser("histograms", help="Stat histograms") subparser.add_argument("--filter", action="store_true", help="Filter out any lines that don't contain INSTATRACE") subparser.add_argument("file", nargs="+") subparser.set_defaults(run=cls.run, filter_marker="INSTATRACE: ") @staticmethod def run(args): stats = Statistics() for filename in args.file: count = 0 fd = open(filename) for line in fd.xreadlines(): if args.filter: pos = line.find(args.filter_marker) if pos == -1: continue line = line[pos+len(args.filter_marker):] line = line.strip() stat = line.split(" ", 2) stats.add_sample(stat[0], int(stat[1])) fd.close() names = stats.statistics.keys() names.sort() for name in names: histogram = stats.statistics.get(name) histogram.text(sys.stdout)
Add a --filter flag to histograms
Add a --filter flag to histograms This ignores any lines in the input that don't contain "INSTATRACE: " and removes anything preceding that string before handling the sample.
Python
mit
pteichman/instatrace
--- +++ @@ -13,8 +13,11 @@ @classmethod def add_subparser(cls, parser): subparser = parser.add_parser("histograms", help="Stat histograms") + subparser.add_argument("--filter", action="store_true", + help="Filter out any lines that don't contain INSTATRACE") subparser.add_argument("file", nargs="+") - subparser.set_defaults(run=cls.run) + subparser.set_defaults(run=cls.run, + filter_marker="INSTATRACE: ") @staticmethod def run(args): @@ -24,6 +27,12 @@ count = 0 fd = open(filename) for line in fd.xreadlines(): + if args.filter: + pos = line.find(args.filter_marker) + if pos == -1: + continue + line = line[pos+len(args.filter_marker):] + line = line.strip() stat = line.split(" ", 2)
504c7ad1a436af356ca73e2fe8934018e3a7547d
manage.py
manage.py
from vulyk.control import cli if __name__ == '__main__': cli()
#!/usr/bin/env python # -*- coding=utf-8 -*- from vulyk.control import cli if __name__ == '__main__': cli()
Make it more executable than it was
Make it more executable than it was
Python
bsd-3-clause
mrgambal/vulyk,mrgambal/vulyk,mrgambal/vulyk
--- +++ @@ -1,3 +1,6 @@ +#!/usr/bin/env python +# -*- coding=utf-8 -*- + from vulyk.control import cli if __name__ == '__main__':
d5a59b79a3b3d6c2209eb9dc486a40d635aa6778
solum/builder/config.py
solum/builder/config.py
# Copyright 2014 - Rackspace Hosting # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Pecan Application Configurations app = { 'root': 'solum.builder.controllers.root.RootController', 'modules': ['solum.builder'], 'debug': True, } # Custom Configurations must be in Python dictionary format:: # # foo = {'bar':'baz'} # # All configurations are accessible at:: # pecan.conf
# Copyright 2014 - Rackspace Hosting # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from solum.api import auth # Pecan Application Configurations app = { 'root': 'solum.builder.controllers.root.RootController', 'modules': ['solum.builder'], 'debug': True, 'hooks': [auth.AuthInformationHook()] } # Custom Configurations must be in Python dictionary format:: # # foo = {'bar':'baz'} # # All configurations are accessible at:: # pecan.conf
Add missing Auth hook to image builder
Add missing Auth hook to image builder Change-Id: I73f17c17a1f4d530c0351dacc2b10fbdcf3122e0
Python
apache-2.0
gilbertpilz/solum,stackforge/solum,gilbertpilz/solum,ed-/solum,openstack/solum,gilbertpilz/solum,openstack/solum,ed-/solum,stackforge/solum,devdattakulkarni/test-solum,gilbertpilz/solum,devdattakulkarni/test-solum,ed-/solum,ed-/solum
--- +++ @@ -12,11 +12,14 @@ # License for the specific language governing permissions and limitations # under the License. +from solum.api import auth + # Pecan Application Configurations app = { 'root': 'solum.builder.controllers.root.RootController', 'modules': ['solum.builder'], 'debug': True, + 'hooks': [auth.AuthInformationHook()] } # Custom Configurations must be in Python dictionary format::
51d371918d0ffb5cc96c6faa67fb0a5cd3cf58ae
manage.py
manage.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Management command entry point for working with migrations """ import sys import django from django.conf import settings INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sites", "addendum", ] try: import south # noqa except ImportError: pass else: INSTALLED_APPS += ['south'] settings.configure( DEBUG=True, USE_TZ=True, USE_I18N=True, DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, MIDDLEWARE_CLASSES=(), # Silence Django 1.7 warnings SITE_ID=1, FIXTURE_DIRS=['tests/fixtures'], INSTALLED_APPS=INSTALLED_APPS, ROOT_URLCONF="tests.urls", ) try: django.setup() except AttributeError: pass if __name__ == '__main__': from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Management command entry point for working with migrations """ import sys import django from django.conf import settings INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.admin", "django.contrib.contenttypes", "django.contrib.sites", "addendum", ] try: import south # noqa except ImportError: pass else: INSTALLED_APPS += ['south'] settings.configure( DEBUG=True, USE_TZ=True, USE_I18N=True, DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, MIDDLEWARE_CLASSES=(), # Silence Django 1.7 warnings SITE_ID=1, FIXTURE_DIRS=['tests/fixtures'], INSTALLED_APPS=INSTALLED_APPS, ROOT_URLCONF="tests.urls", ) try: django.setup() except AttributeError: pass if __name__ == '__main__': from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
Add contrib.admin to locally installed apps
Add contrib.admin to locally installed apps
Python
bsd-2-clause
bennylope/django-addendum,bennylope/django-addendum
--- +++ @@ -11,6 +11,7 @@ INSTALLED_APPS = [ "django.contrib.auth", + "django.contrib.admin", "django.contrib.contenttypes", "django.contrib.sites", "addendum",
b66b63d2a9a6919f3e735d46881740d27bcdc8a6
piper/process.py
piper/process.py
import subprocess as sub import logbook class Process(object): """ Helper class for running processes """ def __init__(self, cmd): self.cmd = cmd self.proc = None self.success = None self.log = logbook.Logger(self.__class__.__name__) def setup(self): """ Setup the Popen object used in execution """ self.log.info('Spawning handler for {0}...'.format(self.cmd)) self.popen = sub.Popen( self.cmd.split(), stdout=sub.PIPE, stderr=sub.PIPE, ) def run(self): self.log.info('Executing {0}'.format(self.cmd)) while not self.popen.poll(): # TODO: Gracefully handle stderr as well line = self.popen.stdout.readline() if not line: break self.log.info(line.decode('utf-8').rstrip()) exit = self.popen.wait() self.log.info('Exitcode {0}'.format(exit)) self.success = exit == 0 if not self.success: self.log.error(self.popen.stderr.read())
import subprocess as sub import logbook class Process(object): """ Helper class for running processes """ def __init__(self, cmd): self.cmd = cmd self.popen = None self.success = None self.log = logbook.Logger(self.cmd) def setup(self): """ Setup the Popen object used in execution """ self.log.debug('Spawning process handler') self.popen = sub.Popen( self.cmd.split(), stdout=sub.PIPE, stderr=sub.PIPE, ) def run(self): self.log.debug('Executing') while not self.popen.poll(): # TODO: Gracefully handle stderr as well line = self.popen.stdout.readline() if not line: break self.log.info(line.decode('utf-8').rstrip()) exit = self.popen.wait() self.log.debug('Exitcode {0}'.format(exit)) self.success = exit == 0 if not self.success: self.log.error(self.popen.stderr.read())
Change logging setup for Process()
Change logging setup for Process() Also fix usage of badly named .proc variable.
Python
mit
thiderman/piper
--- +++ @@ -12,9 +12,9 @@ def __init__(self, cmd): self.cmd = cmd - self.proc = None + self.popen = None self.success = None - self.log = logbook.Logger(self.__class__.__name__) + self.log = logbook.Logger(self.cmd) def setup(self): """ @@ -22,7 +22,7 @@ """ - self.log.info('Spawning handler for {0}...'.format(self.cmd)) + self.log.debug('Spawning process handler') self.popen = sub.Popen( self.cmd.split(), @@ -31,7 +31,7 @@ ) def run(self): - self.log.info('Executing {0}'.format(self.cmd)) + self.log.debug('Executing') while not self.popen.poll(): # TODO: Gracefully handle stderr as well @@ -43,7 +43,7 @@ self.log.info(line.decode('utf-8').rstrip()) exit = self.popen.wait() - self.log.info('Exitcode {0}'.format(exit)) + self.log.debug('Exitcode {0}'.format(exit)) self.success = exit == 0 if not self.success:
54a6e1463104b87a51d17f937c286721cf84466a
democracy_club/apps/donations/middleware.py
democracy_club/apps/donations/middleware.py
from django.http import HttpResponseRedirect from .forms import DonationForm from .helpers import GoCardlessHelper class DonationFormMiddleware(object): def get_initial(self): return { 'payment_type': 'subscription', 'amount': 10, } def form_valid(self, request, form): # Add the form info to the session request.session['donation_form'] = form.cleaned_data # Start the GoCardless process gc = GoCardlessHelper(request) # Make a customer object at GC's site first. redirect_url = gc.get_redirect_url() # Redirect to GoCardless return HttpResponseRedirect(redirect_url) def process_request(self, request): form_prefix = "donation_form" key_to_check = "{}-amount".format(form_prefix) if request.method == 'POST' and key_to_check in request.POST: form = DonationForm(data=request.POST, prefix=form_prefix) if form.is_valid(): return self.form_valid(request, form) else: form = DonationForm( initial=self.get_initial(), prefix=form_prefix) request.donation_form = form
from django.http import HttpResponseRedirect from .forms import DonationForm from .helpers import GoCardlessHelper, PAYMENT_UNITS class DonationFormMiddleware(object): def get_initial(self, request): suggested_donation = request.GET.get('suggested_donation', 3) form_initial = { 'payment_type': 'subscription', } if int(suggested_donation) in [x[0] for x in PAYMENT_UNITS]: form_initial['amount'] = suggested_donation else: form_initial['other_amount'] = suggested_donation return form_initial def form_valid(self, request, form): # Add the form info to the session request.session['donation_form'] = form.cleaned_data # Start the GoCardless process gc = GoCardlessHelper(request) # Make a customer object at GC's site first. redirect_url = gc.get_redirect_url() # Redirect to GoCardless return HttpResponseRedirect(redirect_url) def process_request(self, request): form_prefix = "donation_form" key_to_check = "{}-amount".format(form_prefix) if request.method == 'POST' and key_to_check in request.POST: form = DonationForm(data=request.POST, prefix=form_prefix) if form.is_valid(): return self.form_valid(request, form) else: form = DonationForm( initial=self.get_initial(request), prefix=form_prefix ) request.donation_form = form
Allow altering the donation amount via a link and default to £3
Allow altering the donation amount via a link and default to £3
Python
bsd-3-clause
DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website
--- +++ @@ -1,15 +1,21 @@ from django.http import HttpResponseRedirect from .forms import DonationForm -from .helpers import GoCardlessHelper +from .helpers import GoCardlessHelper, PAYMENT_UNITS class DonationFormMiddleware(object): - def get_initial(self): - return { + def get_initial(self, request): + suggested_donation = request.GET.get('suggested_donation', 3) + form_initial = { 'payment_type': 'subscription', - 'amount': 10, } + if int(suggested_donation) in [x[0] for x in PAYMENT_UNITS]: + form_initial['amount'] = suggested_donation + else: + form_initial['other_amount'] = suggested_donation + + return form_initial def form_valid(self, request, form): # Add the form info to the session @@ -34,5 +40,7 @@ return self.form_valid(request, form) else: form = DonationForm( - initial=self.get_initial(), prefix=form_prefix) + initial=self.get_initial(request), + prefix=form_prefix + ) request.donation_form = form
7fe8df63288d72fba98fee2cf73a16c0a0b8e326
tests/functional/conftest.py
tests/functional/conftest.py
import httpretty import pytest import webtest from via.app import create_app @pytest.fixture def test_app(pyramid_settings): return webtest.TestApp(create_app(None, **pyramid_settings)) @pytest.fixture def checkmate_pass(pyramid_settings): httpretty.register_uri( httpretty.GET, "http://localhost:9099/api/check", status=204, body=None )
import httpretty import pytest import webtest from via.app import create_app @pytest.fixture def test_app(pyramid_settings): return webtest.TestApp(create_app(None, **pyramid_settings)) @pytest.fixture def checkmate_pass(pyramid_settings): httpretty.register_uri( httpretty.GET, "http://localhost:9099/api/check", status=204, body="" )
Work around a `httpretty` bug when returning None as the body
Work around a `httpretty` bug when returning None as the body I'm not sure if this is a bug or not. The docs state this must be a string. So this is as empty as we can get.
Python
bsd-2-clause
hypothesis/via,hypothesis/via,hypothesis/via
--- +++ @@ -13,5 +13,5 @@ @pytest.fixture def checkmate_pass(pyramid_settings): httpretty.register_uri( - httpretty.GET, "http://localhost:9099/api/check", status=204, body=None + httpretty.GET, "http://localhost:9099/api/check", status=204, body="" )
c0c1f964892289dd240de4d6121ebdda6c1753c1
penchy/jvms.py
penchy/jvms.py
class JVM(object): """ Base class for JVMs. Inheriting classes must implement: - ``get_commandline(*args, **options)`` to return a commandline that contains the options and runs the JVM """ def get_commandline(self, *args, **options): """ Return a commandline that can be executed by ``subprocess.Popen``. :param args: positional arguments, will be at the end :param options: options which should be presend in the command line :returns: commandline suitable for ``subprocess.Popen`` :rtype: list """ raise NotImplementedError("get_commandline has to be implemented by actual jvms") class OpenJDK(JVM): #TODO pass class J9(JVM): #TODO pass class Jikes(JVM): #TODO pass class SunClient(JVM): #TODO pass class SunServer(JVM): #TODO pass
class JVM(object): """ This class represents a JVM. """ def __init__(self, path, options=""): """ :param path: path to jvm executable relative to basepath :param options: string of options that will be passed to jvm """ self.basepath = '/' self.path = path # XXX: a passed classpath must be filtered and readded before run self.options = options def configure(self, *args): """ Configure jvm options that allows `args` to run :param *args: :class:`Tool` or :class:`Program` instances that should be run. """ #TODO pass def run(self): """ Run the jvm with the current configuration. """ #TODO pass @property def cmdline(self): #TODO pass class WrappedJVM(JVM): """ This class is an abstract base class for a JVM that is wrapped by another Program. Inheriting classes must expose this attributes: - ``out``: dictionary that maps logical output names to paths of output files - ``exports``: set of logical outputs (valid keys for ``out``) """ def __init__(self): """ Inheriting classes must: - have compatible arguments with JVM.__init__ - call JVM.__init__ """ raise NotImplementedError("must be implemented") def run(self): """ Run with wrapping. """ raise NotImplementedError("must be implemented") class ValgrindJVM(WrappedJVM): """ This class represents a JVM which is called by valgrind. """ #TODO pass
Move to new jvm specification.
Move to new jvm specification. Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com>
Python
mit
fhirschmann/penchy,fhirschmann/penchy
--- +++ @@ -1,38 +1,69 @@ class JVM(object): """ - Base class for JVMs. + This class represents a JVM. + """ - Inheriting classes must implement: - - ``get_commandline(*args, **options)`` to return a commandline that - contains the options and runs the JVM + def __init__(self, path, options=""): + """ + :param path: path to jvm executable relative to basepath + :param options: string of options that will be passed to jvm + """ + + self.basepath = '/' + self.path = path + # XXX: a passed classpath must be filtered and readded before run + self.options = options + + def configure(self, *args): + """ + Configure jvm options that allows `args` to run + + :param *args: :class:`Tool` or :class:`Program` instances that should be run. + """ + #TODO + pass + + def run(self): + """ + Run the jvm with the current configuration. + """ + #TODO + pass + + @property + def cmdline(self): + #TODO + pass + +class WrappedJVM(JVM): """ - def get_commandline(self, *args, **options): + This class is an abstract base class for a JVM that is wrapped by another + Program. + + Inheriting classes must expose this attributes: + + - ``out``: dictionary that maps logical output names to paths of output + files + - ``exports``: set of logical outputs (valid keys for ``out``) + """ + def __init__(self): """ - Return a commandline that can be executed by ``subprocess.Popen``. + Inheriting classes must: - :param args: positional arguments, will be at the end - :param options: options which should be presend in the command line - :returns: commandline suitable for ``subprocess.Popen`` - :rtype: list + - have compatible arguments with JVM.__init__ + - call JVM.__init__ """ - raise NotImplementedError("get_commandline has to be implemented by actual jvms") + raise NotImplementedError("must be implemented") -class OpenJDK(JVM): + def run(self): + """ + Run with wrapping. + """ + raise NotImplementedError("must be implemented") + +class ValgrindJVM(WrappedJVM): + """ + This class represents a JVM which is called by valgrind. + """ #TODO pass - -class J9(JVM): - #TODO - pass - -class Jikes(JVM): - #TODO - pass - -class SunClient(JVM): - #TODO - pass - -class SunServer(JVM): - #TODO - pass
3ca9ae145e70a3339028d9de55544da739a86899
cura/CameraAnimation.py
cura/CameraAnimation.py
# Copyright (c) 2015 Ultimaker B.V. # Cura is released under the terms of the AGPLv3 or higher. from PyQt5.QtCore import QVariantAnimation, QEasingCurve from PyQt5.QtGui import QVector3D from UM.Math.Vector import Vector from UM.Logger import Logger class CameraAnimation(QVariantAnimation): def __init__(self, parent = None): super().__init__(parent) self._camera_tool = None self.setDuration(500) self.setEasingCurve(QEasingCurve.InOutQuad) def setCameraTool(self, camera_tool): self._camera_tool = camera_tool def setStart(self, start): Logger.log("d", "Camera start: %s %s %s" % (start.x, start.y, start.z)) vec = QVector3D() #QVector3D(start.x, start.y, start.z) vec.setX(start.x) vec.setY(start.y) vec.setZ(start.z) Logger.log("d", "setStartValue...") self.setStartValue(vec) def setTarget(self, target): Logger.log("d", "Camera end: %s %s %s" % (target.x, target.y, target.z)) self.setEndValue(QVector3D(target.x, target.y, target.z)) def updateCurrentValue(self, value): self._camera_tool.setOrigin(Vector(value.x(), value.y(), value.z()))
# Copyright (c) 2015 Ultimaker B.V. # Cura is released under the terms of the AGPLv3 or higher. from PyQt5.QtCore import QVariantAnimation, QEasingCurve from PyQt5.QtGui import QVector3D from UM.Math.Vector import Vector from UM.Logger import Logger class CameraAnimation(QVariantAnimation): def __init__(self, parent = None): super().__init__(parent) self._camera_tool = None self.setDuration(500) self.setEasingCurve(QEasingCurve.InOutQuad) def setCameraTool(self, camera_tool): self._camera_tool = camera_tool def setStart(self, start): self.setStartValue(QVector3D(start.x, start.y, start.z)) def setTarget(self, target): self.setEndValue(QVector3D(target.x, target.y, target.z)) def updateCurrentValue(self, value): self._camera_tool.setOrigin(Vector(value.x(), value.y(), value.z()))
Undo logging and splitting up QVector3D. CURA-3334
Undo logging and splitting up QVector3D. CURA-3334
Python
agpl-3.0
hmflash/Cura,fieldOfView/Cura,Curahelper/Cura,ynotstartups/Wanhao,hmflash/Cura,Curahelper/Cura,fieldOfView/Cura,ynotstartups/Wanhao
--- +++ @@ -20,16 +20,9 @@ self._camera_tool = camera_tool def setStart(self, start): - Logger.log("d", "Camera start: %s %s %s" % (start.x, start.y, start.z)) - vec = QVector3D() #QVector3D(start.x, start.y, start.z) - vec.setX(start.x) - vec.setY(start.y) - vec.setZ(start.z) - Logger.log("d", "setStartValue...") - self.setStartValue(vec) + self.setStartValue(QVector3D(start.x, start.y, start.z)) def setTarget(self, target): - Logger.log("d", "Camera end: %s %s %s" % (target.x, target.y, target.z)) self.setEndValue(QVector3D(target.x, target.y, target.z)) def updateCurrentValue(self, value):
b870f61d131483dd42b3302057351f2461b2cfc6
tests/test_enrichment_fdr.py
tests/test_enrichment_fdr.py
import os def test(): """Test to find this error below. Traceback (most recent call last): File "../scripts/find_enrichment.py", line 130, in <module> study=study, methods=methods) File "../scripts/../goatools/go_enrichment.py", line 93, in __init__ self.run_study(study) File "../scripts/../goatools/go_enrichment.py", line 129, in run_study p_val_distribution = calc_qval(study_count, study_n, UnboundLocalError: local variable 'study_count' referenced before assignment """ os.system("python {SCR} --alpha=0.05 {STUDY} {POP} {ASSN} --fdr --obo={OBO}".format( SCR="../scripts/find_enrichment.py", OBO="../go-basic.obo", STUDY="data/study_unknown", POP="../data/population", ASSN="../data/association")) if __name__ == '__main__': test()
import os def test(): """Ensure that a study with only unknown GO Terms will run gracefully.""" os.system("python {SCR} --alpha=0.05 {STUDY} {POP} {ASSN} --fdr --obo={OBO}".format( SCR="../scripts/find_enrichment.py", OBO="../go-basic.obo", STUDY="data/study_unknown", POP="../data/population", ASSN="../data/association")) if __name__ == '__main__': test()
Make Test description more elegant.
Make Test description more elegant.
Python
bsd-2-clause
fidelram/goatools,mfiers/goatools,lileiting/goatools,tanghaibao/goatools,mfiers/goatools,tanghaibao/goatools,fidelram/goatools,lileiting/goatools
--- +++ @@ -2,17 +2,7 @@ import os def test(): - """Test to find this error below. - - Traceback (most recent call last): - File "../scripts/find_enrichment.py", line 130, in <module> - study=study, methods=methods) - File "../scripts/../goatools/go_enrichment.py", line 93, in __init__ - self.run_study(study) - File "../scripts/../goatools/go_enrichment.py", line 129, in run_study - p_val_distribution = calc_qval(study_count, study_n, - UnboundLocalError: local variable 'study_count' referenced before assignment - """ + """Ensure that a study with only unknown GO Terms will run gracefully.""" os.system("python {SCR} --alpha=0.05 {STUDY} {POP} {ASSN} --fdr --obo={OBO}".format( SCR="../scripts/find_enrichment.py", OBO="../go-basic.obo",
fda563e9661c0a65256ba6b1a7416a0f4171f18e
sentence_transformers/readers/InputExample.py
sentence_transformers/readers/InputExample.py
from typing import Union, List class InputExample: """ Structure for one input example with texts, the label and a unique id """ def __init__(self, guid: str = '', texts: Union[List[str], List[int]] = [], label: Union[int, float] = None): """ Creates one InputExample with the given texts, guid and label str.strip() is called on both texts. :param guid id for the example :param texts the texts for the example :param label the label for the example """ self.guid = guid self.texts = [text.strip() if isinstance(text, str) else text for text in texts] self.label = label def __str__(self): return "<InputExample> label: {}, texts: {}".format(str(self.label), "; ".join(self.texts))
from typing import Union, List class InputExample: """ Structure for one input example with texts, the label and a unique id """ def __init__(self, guid: str = '', texts: List[str] = None, texts_tokenized: List[List[int]] = None, label: Union[int, float] = None): """ Creates one InputExample with the given texts, guid and label str.strip() is called on both texts. :param guid id for the example :param texts the texts for the example :param texts_tokenized Optional: Texts that are already tokenized. If texts_tokenized is passed, texts must not be passed. :param label the label for the example """ self.guid = guid self.texts = [text.strip() for text in texts] if texts is not None else texts self.texts_tokenized = texts_tokenized self.label = label def __str__(self): return "<InputExample> label: {}, texts: {}".format(str(self.label), "; ".join(self.texts))
Add field for pre-tokenized texts
Add field for pre-tokenized texts
Python
apache-2.0
UKPLab/sentence-transformers
--- +++ @@ -5,7 +5,7 @@ """ Structure for one input example with texts, the label and a unique id """ - def __init__(self, guid: str = '', texts: Union[List[str], List[int]] = [], label: Union[int, float] = None): + def __init__(self, guid: str = '', texts: List[str] = None, texts_tokenized: List[List[int]] = None, label: Union[int, float] = None): """ Creates one InputExample with the given texts, guid and label @@ -15,11 +15,14 @@ id for the example :param texts the texts for the example + :param texts_tokenized + Optional: Texts that are already tokenized. If texts_tokenized is passed, texts must not be passed. :param label the label for the example """ self.guid = guid - self.texts = [text.strip() if isinstance(text, str) else text for text in texts] + self.texts = [text.strip() for text in texts] if texts is not None else texts + self.texts_tokenized = texts_tokenized self.label = label def __str__(self):
08199327c411663a199ebf36379e88a514935399
chdb.py
chdb.py
import sqlite3 DB_FILENAME = 'citationhunt.sqlite3' def init_db(): return sqlite3.connect(DB_FILENAME) def reset_db(): db = init_db() with db: db.execute(''' DROP TABLE categories ''') db.execute(''' DROP TABLE articles ''') db.execute(''' DROP TABLE snippets ''') db.execute(''' DROP TABLE articles_categories ''') db.execute(''' CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT) ''') db.execute(''' INSERT INTO categories VALUES ("unassigned", "unassigned") ''') db.execute(''' CREATE TABLE articles_categories (article_id TEXT, category_id TEXT, FOREIGN KEY(article_id) REFERENCES articles(page_id) ON DELETE CASCADE, FOREIGN KEY(category_id) REFERENCES categories(id) ON DELETE CASCADE) ''') db.execute(''' CREATE TABLE articles (page_id TEXT PRIMARY KEY, url TEXT, title TEXT) ''') db.execute(''' CREATE TABLE snippets (id TEXT PRIMARY KEY, snippet TEXT, section TEXT, article_id TEXT, FOREIGN KEY(article_id) REFERENCES articles(page_id) ON DELETE CASCADE) ''') return db def create_indices(): db = init_db() db.execute('''CREATE INDEX IF NOT EXISTS snippets_articles ON snippets(article_id);''')
import sqlite3 DB_FILENAME = 'citationhunt.sqlite3' def init_db(): return sqlite3.connect(DB_FILENAME) def reset_db(): db = init_db() with db: db.execute(''' DROP TABLE IF EXISTS categories ''') db.execute(''' DROP TABLE IF EXISTS articles ''') db.execute(''' DROP TABLE IF EXISTS snippets ''') db.execute(''' DROP TABLE IF EXISTS articles_categories ''') db.execute(''' CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT) ''') db.execute(''' INSERT INTO categories VALUES ("unassigned", "unassigned") ''') db.execute(''' CREATE TABLE articles_categories (article_id TEXT, category_id TEXT, FOREIGN KEY(article_id) REFERENCES articles(page_id) ON DELETE CASCADE, FOREIGN KEY(category_id) REFERENCES categories(id) ON DELETE CASCADE) ''') db.execute(''' CREATE TABLE articles (page_id TEXT PRIMARY KEY, url TEXT, title TEXT) ''') db.execute(''' CREATE TABLE snippets (id TEXT PRIMARY KEY, snippet TEXT, section TEXT, article_id TEXT, FOREIGN KEY(article_id) REFERENCES articles(page_id) ON DELETE CASCADE) ''') return db def create_indices(): db = init_db() db.execute('''CREATE INDEX IF NOT EXISTS snippets_articles ON snippets(article_id);''')
Revert "Remove IF EXISTS from DROP TABLE when resetting the db."
Revert "Remove IF EXISTS from DROP TABLE when resetting the db." This reverts commit a7dce25964cd740b0d0db86b255ede60c913e73d.
Python
mit
jhsoby/citationhunt,Stryn/citationhunt,jhsoby/citationhunt,Stryn/citationhunt,jhsoby/citationhunt,jhsoby/citationhunt,Stryn/citationhunt,Stryn/citationhunt
--- +++ @@ -10,16 +10,16 @@ with db: db.execute(''' - DROP TABLE categories + DROP TABLE IF EXISTS categories ''') db.execute(''' - DROP TABLE articles + DROP TABLE IF EXISTS articles ''') db.execute(''' - DROP TABLE snippets + DROP TABLE IF EXISTS snippets ''') db.execute(''' - DROP TABLE articles_categories + DROP TABLE IF EXISTS articles_categories ''') db.execute(''' CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT)
3e28adb3b32e1c88e9295c44e79840ebfe67f83f
py/foxgami/db.py
py/foxgami/db.py
import functools from sqlalchemy import create_engine @functools.lru_cache() def engine(): return create_engine('postgresql://foxgami:foxgami@localhost/foxgami') def query(sql, args=()): e = engine() result = e.execute(sql, tuple(args)) if result: return list(result) def query_single(sql, args=()): rows = list(query(sql, args)) if len(rows) >= 1: return rows[0] else: return None
import functools from sqlalchemy import create_engine @functools.lru_cache() def engine(): return create_engine('postgresql://foxgami:foxgami@localhost/foxgami') def query(sql, args=()): e = engine() result = e.execute(sql, tuple(args)) if result.returns_rows: return list(result) def query_single(sql, args=()): rows = list(query(sql, args)) if len(rows) >= 1: return rows[0] else: return None
Use .returns_rows to determine if we should return list type
Use .returns_rows to determine if we should return list type
Python
mit
flubstep/foxgami.com,flubstep/foxgami.com
--- +++ @@ -10,7 +10,7 @@ def query(sql, args=()): e = engine() result = e.execute(sql, tuple(args)) - if result: + if result.returns_rows: return list(result)
b3f3325484426e2f77dc2df092c316ed38584970
test/proper_noun_test.py
test/proper_noun_test.py
from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentance(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentance(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_munroe_with_proper_nouns(): result = munroe_score("Eilis is a small girl") assert result["score"] == 0.75
from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentance(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentance(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_munroe_with_proper_nouns(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0
Update test now 'is' is a common word
Update test now 'is' is a common word
Python
mit
ejh243/MunroeJargonProfiler,ejh243/MunroeJargonProfiler
--- +++ @@ -13,4 +13,4 @@ def test_munroe_with_proper_nouns(): result = munroe_score("Eilis is a small girl") - assert result["score"] == 0.75 + assert result["score"] == 1.0
50d8ad485549159d2186df2b6b01aee21e51cbc2
notebooks/machine_learning/track_meta.py
notebooks/machine_learning/track_meta.py
# See also examples/example_track/example_meta.py for a longer, commented example track = dict( author_username='dansbecker', ) lessons = [ dict(topic='How Models Work'), dict(topic='Explore Your Data') ] notebooks = [ dict( filename='tut1.ipynb', lesson_idx=0, type='tutorial', dataset_sources= ["dansbecker/melbourne-housing-snapshot"], competition_sources=["home-data-for-ml-course"], ), dict( filename='tut2.ipynb', lesson_idx=1, type='tutorial', dataset_sources= ["dansbecker/melbourne-housing-snapshot"], competition_sources=["home-data-for-ml-course"], ), dict( filename='ex2.ipynb', lesson_idx=1, type='exercise', dataset_sources= ["dansbecker/melbourne-housing-snapshot"], competition_sources=["home-data-for-ml-course"], scriptid=1258954 ) ]
# See also examples/example_track/example_meta.py for a longer, commented example track = dict( author_username='dansbecker', ) lessons = [ dict(topic='how models work'), dict(topic='exploring your data'), dict(topic='building your first machine learning model'), ] notebooks = [ dict( filename='tut1.ipynb', lesson_idx=0, type='tutorial', dataset_sources= ["dansbecker/melbourne-housing-snapshot"], competition_sources=["home-data-for-ml-course"], ), dict( filename='tut2.ipynb', lesson_idx=1, type='tutorial', dataset_sources= ["dansbecker/melbourne-housing-snapshot"], competition_sources=["home-data-for-ml-course"], ), dict( filename='ex2.ipynb', lesson_idx=1, type='exercise', dataset_sources= ["dansbecker/melbourne-housing-snapshot"], competition_sources=["home-data-for-ml-course"], scriptid=1258954, ), dict( filename='tut3.ipynb', lesson_idx=2, type='tutorial', dataset_sources= ["dansbecker/melbourne-housing-snapshot"], competition_sources=["home-data-for-ml-course"], ), dict( filename='ex3.ipynb', lesson_idx=2, type='exercise', dataset_sources= ["dansbecker/melbourne-housing-snapshot"], competition_sources=["home-data-for-ml-course"], scriptid=1, ), ]
Add third lesson and reword lesson topics
Add third lesson and reword lesson topics
Python
apache-2.0
Kaggle/learntools,Kaggle/learntools
--- +++ @@ -4,8 +4,9 @@ ) lessons = [ - dict(topic='How Models Work'), - dict(topic='Explore Your Data') + dict(topic='how models work'), + dict(topic='exploring your data'), + dict(topic='building your first machine learning model'), ] notebooks = [ @@ -29,6 +30,21 @@ type='exercise', dataset_sources= ["dansbecker/melbourne-housing-snapshot"], competition_sources=["home-data-for-ml-course"], - scriptid=1258954 - ) + scriptid=1258954, + ), + dict( + filename='tut3.ipynb', + lesson_idx=2, + type='tutorial', + dataset_sources= ["dansbecker/melbourne-housing-snapshot"], + competition_sources=["home-data-for-ml-course"], + ), + dict( + filename='ex3.ipynb', + lesson_idx=2, + type='exercise', + dataset_sources= ["dansbecker/melbourne-housing-snapshot"], + competition_sources=["home-data-for-ml-course"], + scriptid=1, + ), ]
939a3be5b24715aae5fd334e6529ec96e1612def
allauth/socialaccount/providers/reddit/provider.py
allauth/socialaccount/providers/reddit/provider.py
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider class RedditAccount(ProviderAccount): def to_str(self): dflt = super(RedditAccount, self).to_str() name = self.account.extra_data.get("name", dflt) return name class RedditProvider(OAuth2Provider): id = "reddit" name = "Reddit" account_class = RedditAccount def extract_uid(self, data): return data["name"] def extract_common_fields(self, data): return dict(name=data.get("name")) def get_default_scope(self): scope = ["identity"] return scope provider_classes = [RedditProvider]
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider class RedditAccount(ProviderAccount): def to_str(self): dflt = super(RedditAccount, self).to_str() name = self.account.extra_data.get("name", dflt) return name class RedditProvider(OAuth2Provider): id = "reddit" name = "Reddit" account_class = RedditAccount def extract_uid(self, data): return data["name"] def extract_common_fields(self, data): return dict(username=data.get("name")) def get_default_scope(self): scope = ["identity"] return scope provider_classes = [RedditProvider]
Use Reddit name as username
chore(reddit): Use Reddit name as username Using Reddit's screen name as username instead of first name will allow the sign to skip one more field.
Python
mit
pennersr/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth,rsalmaso/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth
--- +++ @@ -18,7 +18,7 @@ return data["name"] def extract_common_fields(self, data): - return dict(name=data.get("name")) + return dict(username=data.get("name")) def get_default_scope(self): scope = ["identity"]
d0380db930dbf145108a7ef0330dd19475f7fdee
test_arrange_schedule.py
test_arrange_schedule.py
from arrange_schedule import * def test_read_system_setting(): keys = ['board_py_dir','shutdown','max_db_log','min_db_activity'] system_setting = read_system_setting() for key in keys: assert key in system_setting return system_setting def test_crawler_cwb_img(system_setting): send_msg = {} send_msg['server_dir'] = system_setting['board_py_dir'] send_msg['user_id'] = 1 receive_msg = crawler_cwb_img(send_msg) assert receive_msg['result'] == 'success' if __name__ == "__main__": system_setting = test_read_system_setting() test_crawler_cwb_img(system_setting) print("All test passed")
from arrange_schedule import * def test_read_system_setting(): keys = ['board_py_dir','shutdown','max_db_log','min_db_activity'] system_setting = read_system_setting() for key in keys: assert key in system_setting return system_setting def test_read_arrange_mode(): keys = ['arrange_sn','arrange_mode','condition'] receive_msg = read_arrange_mode() for key in keys: assert key in receive_msg def test_crawler_cwb_img(system_setting): send_msg = {} send_msg['server_dir'] = system_setting['board_py_dir'] send_msg['user_id'] = 1 receive_msg = crawler_cwb_img(send_msg) assert receive_msg['result'] == 'success' if __name__ == "__main__": system_setting = test_read_system_setting() test_read_arrange_mode() test_crawler_cwb_img(system_setting) print("All test passed")
Add test case for read_arrange_mode()
Add test case for read_arrange_mode()
Python
apache-2.0
Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,stvreumi/electronic-blackboard,chenyang14/electronic-blackboard,SWLBot/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,chenyang14/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,chenyang14/electronic-blackboard,SWLBot/electronic-blackboard
--- +++ @@ -7,6 +7,12 @@ for key in keys: assert key in system_setting return system_setting + +def test_read_arrange_mode(): + keys = ['arrange_sn','arrange_mode','condition'] + receive_msg = read_arrange_mode() + for key in keys: + assert key in receive_msg def test_crawler_cwb_img(system_setting): send_msg = {} @@ -18,5 +24,6 @@ if __name__ == "__main__": system_setting = test_read_system_setting() + test_read_arrange_mode() test_crawler_cwb_img(system_setting) print("All test passed")
3d46ec43570013bd68135126127c4027e25e3cfa
shapely/geos.py
shapely/geos.py
""" Exports the libgeos_c shared lib, GEOS-specific exceptions, and utilities. """ import atexit from ctypes import CDLL, CFUNCTYPE, c_char_p import os, sys # The GEOS shared lib if os.name == 'nt': dll = 'libgeos_c-1.dll' else: dll = 'libgeos_c.so' lgeos = CDLL(dll) # Exceptions class ReadingError(Exception): pass class DimensionError(Exception): pass class TopologicalError(Exception): pass class PredicateError(Exception): pass # GEOS error handlers, which currently do nothing. def error_handler(fmt, list): pass error_h = CFUNCTYPE(None, c_char_p, c_char_p)(error_handler) def notice_handler(fmt, list): pass notice_h = CFUNCTYPE(None, c_char_p, c_char_p)(notice_handler) # Init geos, and register a cleanup function lgeos.initGEOS(notice_h, error_h) atexit.register(lgeos.finishGEOS)
""" Exports the libgeos_c shared lib, GEOS-specific exceptions, and utilities. """ import atexit from ctypes import CDLL, CFUNCTYPE, c_char_p import sys # The GEOS shared lib if sys.platform == 'win32': dll = 'libgeos_c-1.dll' elif sys.platform == 'darwin': dll = 'libgeos_c.dylib' else: dll = 'libgeos_c.so' lgeos = CDLL(dll) # Exceptions class ReadingError(Exception): pass class DimensionError(Exception): pass class TopologicalError(Exception): pass class PredicateError(Exception): pass # GEOS error handlers, which currently do nothing. def error_handler(fmt, list): pass error_h = CFUNCTYPE(None, c_char_p, c_char_p)(error_handler) def notice_handler(fmt, list): pass notice_h = CFUNCTYPE(None, c_char_p, c_char_p)(notice_handler) # Init geos, and register a cleanup function lgeos.initGEOS(notice_h, error_h) atexit.register(lgeos.finishGEOS)
Add untested support for the darwin platform
Add untested support for the darwin platform git-svn-id: 30e8e193f18ae0331cc1220771e45549f871ece9@762 b426a367-1105-0410-b9ff-cdf4ab011145
Python
bsd-3-clause
abali96/Shapely,jdmcbr/Shapely,abali96/Shapely,mouadino/Shapely,jdmcbr/Shapely,mouadino/Shapely,mindw/shapely,mindw/shapely
--- +++ @@ -4,12 +4,14 @@ import atexit from ctypes import CDLL, CFUNCTYPE, c_char_p -import os, sys +import sys # The GEOS shared lib -if os.name == 'nt': +if sys.platform == 'win32': dll = 'libgeos_c-1.dll' +elif sys.platform == 'darwin': + dll = 'libgeos_c.dylib' else: dll = 'libgeos_c.so' lgeos = CDLL(dll)
4c3e9723f67448e93da65ff10142a98176cebe9b
publishconf.py
publishconf.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'https://pappasam.github.io' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = False DISQUS_SITENAME = "pappasam-github-io" GOOGLE_ANALYTICS = "UA-117115805-1"
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'https://softwarejourneyman.com' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = False DISQUS_SITENAME = "pappasam-github-io" GOOGLE_ANALYTICS = "UA-117115805-1"
Change publish site to softwarejourneyman.com
Change publish site to softwarejourneyman.com
Python
mit
pappasam/pappasam.github.io,pappasam/pappasam.github.io
--- +++ @@ -7,7 +7,7 @@ sys.path.append(os.curdir) from pelicanconf import * -SITEURL = 'https://pappasam.github.io' +SITEURL = 'https://softwarejourneyman.com' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml'
e4427016abdc7ef146cd7550f2ac1dace07be442
plinky.py
plinky.py
from flask import Flask app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == "__main__": app.run(debug=True)
from flask import Flask app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == "__main__": app.run()
Remove debug flag from app
Remove debug flag from app
Python
mit
RaspberryPiFoundation/plinky,CodeClub/plinky,codecleaner/plinky,codecleaner/plinky,CodeClub/plinky,martinpeck/plinky,martinpeck/plinky,RaspberryPiFoundation/plinky,RaspberryPiFoundation/plinky
--- +++ @@ -6,4 +6,4 @@ return "Hello World!" if __name__ == "__main__": - app.run(debug=True) + app.run()
5998d66442ac0881309005a7bdbedc4ff91b0ea6
hs_core/management/commands/solr_queries.py
hs_core/management/commands/solr_queries.py
""" This prints the state of a facet query. It is used for debugging the faceting system. """ from django.core.management.base import BaseCommand from haystack.query import SearchQuerySet from hs_core.discovery_parser import ParseSQ class Command(BaseCommand): help = "Print debugging information about logical files." def add_arguments(self, parser): # a list of resource id's: none does nothing. parser.add_argument('queries', nargs='*', type=str) def handle(self, *args, **options): if len(options['queries']) > 0: # an array of resource short_id to check. query = ' '.join(options['queries']) sqs = SearchQuerySet().all() parser = ParseSQ() parsed = parser.parse(query) sqs = sqs.filter(parsed) print("QUERY '{}' PARSED {}".format(query, str(parsed))) for result in list(sqs): stored = result.get_stored_fields() print(" {}: {} {} {} {}".format( unicode(stored['short_id']).encode('ascii', 'replace'), unicode(stored['title']).encode('ascii', 'replace'), unicode(stored['author']).encode('ascii', 'replace'), unicode(stored['created']).encode('ascii', 'replace'), unicode(stored['modified']).encode('ascii', 'replace'))) else: print("no queries to try") query = 'author:"Tarboton, David"' parser = ParseSQ() parsed = parser.parse(query) print("QUERY '{}' PARSED {}".format(query, str(parsed)))
""" This prints the state of a facet query. It is used for debugging the faceting system. """ from django.core.management.base import BaseCommand from haystack.query import SearchQuerySet from hs_core.discovery_parser import ParseSQ class Command(BaseCommand): help = "Print debugging information about logical files." def add_arguments(self, parser): # a list of resource id's: none does nothing. parser.add_argument('queries', nargs='*', type=str) def handle(self, *args, **options): if len(options['queries']) > 0: # an array of resource short_id to check. query = ' '.join(options['queries']) sqs = SearchQuerySet().all() parser = ParseSQ() parsed = parser.parse(query) sqs = sqs.filter(parsed) print("QUERY '{}' PARSED {}".format(query, str(parsed))) for result in list(sqs): stored = result.get_stored_fields() print(" {}: {} {} {} {}".format( unicode(stored['short_id']).encode('ascii', 'replace'), unicode(stored['title']).encode('ascii', 'replace'), unicode(stored['author']).encode('ascii', 'replace'), unicode(stored['created']).encode('ascii', 'replace'), unicode(stored['modified']).encode('ascii', 'replace'))) else: print("no queries to try")
Clean up response to no queries.
Clean up response to no queries.
Python
bsd-3-clause
hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare
--- +++ @@ -35,7 +35,3 @@ else: print("no queries to try") - query = 'author:"Tarboton, David"' - parser = ParseSQ() - parsed = parser.parse(query) - print("QUERY '{}' PARSED {}".format(query, str(parsed)))
67f3694254e08331152cd410dec128c11e965222
daisyproducer/settings.py
daisyproducer/settings.py
from settings_common import * PACKAGE_VERSION = "0.5" DEBUG = TEMPLATE_DEBUG = True DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline') EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp') SERVE_STATIC_FILES = True # the following is an idea from https://code.djangoproject.com/wiki/SplitSettings # We have both local settings and common settings. They are used as follows: # - common settings are shared data between normal settings and unit test settings # - local settings are used on productive servers to keep the local # settings such as db passwords, etc out of version control try: from settings_local import * except ImportError: pass
from settings_common import * PACKAGE_VERSION = "0.5" DEBUG = TEMPLATE_DEBUG = True DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline') EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp') SERVE_STATIC_FILES = True # the following is an idea from https://code.djangoproject.com/wiki/SplitSettings # We have both local settings and common settings. They are used as follows: # - common settings are shared data between normal settings and unit test settings # - local settings are used on productive servers to keep the local # settings such as db passwords, etc out of version control try: from settings_local import * except ImportError: pass
Fix the path to external tools
Fix the path to external tools
Python
agpl-3.0
sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer
--- +++ @@ -4,8 +4,8 @@ DEBUG = TEMPLATE_DEBUG = True -DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline') -EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp') +DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline') +EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp') SERVE_STATIC_FILES = True
9fd54adcbd1d21232306d15dc7c6a786c867e455
src/som/compiler/sourcecode_compiler.py
src/som/compiler/sourcecode_compiler.py
import os def compile_class_from_file(path, filename, system_class, universe): return _SourcecodeCompiler().compile(path, filename, system_class, universe) def compile_class_from_string(stmt, system_class, universe): return _SourcecodeCompiler().compile_class_string(stmt, system_class, universe) class _SourcecodeCompiler(object): def __init__(self): self._parser = None def compile(self, path, filename, system_class, universe): result = system_class fname = path + os.pathsep + filename + ".som" self._parser = Parser(FileReader(fname), universe) result = self._compile(system_class, universe) cname = result.get_name() cnameC = cname.get_string() if filename != cnameC: raise ValueError("File name " + filename + " does not match class name " + cnameC) return result def compile_class_string(self, stream, system_class, universe): self._parser = Parser(StringReader(stream), universe) result = self._compile(system_class, universe) return result def _compile(self, system_class, universe): cgc = ClassGenerationContext(universe) result = system_class self._parser.classdef(cgc) if not system_class: result = cgc.assemble() else: cgc.assemble_system_class(result) return result
import os from StringIO import StringIO def compile_class_from_file(path, filename, system_class, universe): return _SourcecodeCompiler().compile(path, filename, system_class, universe) def compile_class_from_string(stmt, system_class, universe): return _SourcecodeCompiler().compile_class_string(stmt, system_class, universe) class _SourcecodeCompiler(object): def __init__(self): self._parser = None def compile(self, path, filename, system_class, universe): fname = path + os.sep + filename + ".som" with open(fname, "r") as input_file: self._parser = Parser(input_file, universe) result = self._compile(system_class, universe) cname = result.get_name() cnameC = cname.get_string() if filename != cnameC: raise ValueError("File name " + filename + " does not match class name " + cnameC) return result def compile_class_string(self, stream, system_class, universe): self._parser = Parser(StringIO(stream), universe) result = self._compile(system_class, universe) return result def _compile(self, system_class, universe): cgc = ClassGenerationContext(universe) result = system_class self._parser.classdef(cgc) if not system_class: result = cgc.assemble() else: cgc.assemble_system_class(result) return result
Use Python file objects directly as input
Use Python file objects directly as input - fix wrong separator between path and filename Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
Python
mit
SOM-st/PySOM,SOM-st/RPySOM,smarr/RTruffleSOM,smarr/PySOM,SOM-st/RTruffleSOM,smarr/PySOM,SOM-st/RPySOM,SOM-st/PySOM,smarr/RTruffleSOM,SOM-st/RTruffleSOM
--- +++ @@ -1,4 +1,5 @@ import os +from StringIO import StringIO def compile_class_from_file(path, filename, system_class, universe): return _SourcecodeCompiler().compile(path, filename, system_class, universe) @@ -12,13 +13,11 @@ self._parser = None def compile(self, path, filename, system_class, universe): - result = system_class + fname = path + os.sep + filename + ".som" - fname = path + os.pathsep + filename + ".som" - - self._parser = Parser(FileReader(fname), universe) - - result = self._compile(system_class, universe) + with open(fname, "r") as input_file: + self._parser = Parser(input_file, universe) + result = self._compile(system_class, universe) cname = result.get_name() cnameC = cname.get_string() @@ -29,7 +28,7 @@ return result def compile_class_string(self, stream, system_class, universe): - self._parser = Parser(StringReader(stream), universe) + self._parser = Parser(StringIO(stream), universe) result = self._compile(system_class, universe) return result
9b10bd93191913aaedaa28fc693620a6c2e6d203
pml/load_csv.py
pml/load_csv.py
import os import csv from pml import lattice, element, device def load(directory, name, control_system): lat = lattice.Lattice(name, control_system) with open(os.path.join(directory, 'elements.csv')) as elements: csv_reader = csv.DictReader(elements) for item in csv_reader: e = element.Element(item['name'], float(item['length']), item['type'], None) e.add_to_family(item['type']) lat.add_element(e) with open(os.path.join(directory, 'devices.csv')) as devices: csv_reader = csv.DictReader(devices) for item in csv_reader: d = device.Device(None, item['get_pv'], item['set_pv']) lat[int(item['id']) - 1].add_device(item['field'], d, None) with open(os.path.join(directory, 'families.csv')) as families: csv_reader = csv.DictReader(families) for item in csv_reader: lat[int(item['id']) - 1].add_to_family(item['family']) return lat
import os import csv from pml import lattice, element, device def load(directory, mode, control_system): lat = lattice.Lattice(mode, control_system) with open(os.path.join(directory, mode, 'elements.csv')) as elements: csv_reader = csv.DictReader(elements) for item in csv_reader: e = element.Element(item['name'], float(item['length']), item['type'], None) e.add_to_family(item['type']) lat.add_element(e) with open(os.path.join(directory, mode, 'devices.csv')) as devices: csv_reader = csv.DictReader(devices) for item in csv_reader: d = device.Device(None, item['get_pv'], item['set_pv']) lat[int(item['id']) - 1].add_device(item['field'], d, None) with open(os.path.join(directory, mode, 'families.csv')) as families: csv_reader = csv.DictReader(families) for item in csv_reader: lat[int(item['id']) - 1].add_to_family(item['family']) return lat
Simplify the way modes are loaded into a lattice
Simplify the way modes are loaded into a lattice
Python
apache-2.0
willrogers/pml,willrogers/pml
--- +++ @@ -3,9 +3,9 @@ from pml import lattice, element, device -def load(directory, name, control_system): - lat = lattice.Lattice(name, control_system) - with open(os.path.join(directory, 'elements.csv')) as elements: +def load(directory, mode, control_system): + lat = lattice.Lattice(mode, control_system) + with open(os.path.join(directory, mode, 'elements.csv')) as elements: csv_reader = csv.DictReader(elements) for item in csv_reader: e = element.Element(item['name'], float(item['length']), @@ -13,13 +13,13 @@ e.add_to_family(item['type']) lat.add_element(e) - with open(os.path.join(directory, 'devices.csv')) as devices: + with open(os.path.join(directory, mode, 'devices.csv')) as devices: csv_reader = csv.DictReader(devices) for item in csv_reader: d = device.Device(None, item['get_pv'], item['set_pv']) lat[int(item['id']) - 1].add_device(item['field'], d, None) - with open(os.path.join(directory, 'families.csv')) as families: + with open(os.path.join(directory, mode, 'families.csv')) as families: csv_reader = csv.DictReader(families) for item in csv_reader: lat[int(item['id']) - 1].add_to_family(item['family'])
1619ba48666be69710cd6bcbffe663cd1f7c1066
troposphere/codeguruprofiler.py
troposphere/codeguruprofiler.py
# Copyright (c) 2020, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. from . import AWSObject class ProfilingGroup(AWSObject): resource_type = "AWS::CodeGuruProfiler::ProfilingGroup" props = { 'AgentPermissions': (dict, False), 'ProfilingGroupName': (basestring, True), }
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 16.1.0 from . import AWSObject class ProfilingGroup(AWSObject): resource_type = "AWS::CodeGuruProfiler::ProfilingGroup" props = { 'AgentPermissions': (dict, False), 'ComputePlatform': (basestring, False), 'ProfilingGroupName': (basestring, True), }
Add AWS::CodeGuruProfiler::ProfilingGroup.ComputePlatform per 2020-07-09 update
Add AWS::CodeGuruProfiler::ProfilingGroup.ComputePlatform per 2020-07-09 update
Python
bsd-2-clause
cloudtools/troposphere,cloudtools/troposphere
--- +++ @@ -1,7 +1,11 @@ -# Copyright (c) 2020, Mark Peek <mark@peek.org> +# Copyright (c) 2012-2019, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. +# +# *** Do not modify - this file is autogenerated *** +# Resource specification version: 16.1.0 + from . import AWSObject @@ -11,5 +15,6 @@ props = { 'AgentPermissions': (dict, False), + 'ComputePlatform': (basestring, False), 'ProfilingGroupName': (basestring, True), }
227ae986590bf2d5daa5aef028f5f4cd4c1e8917
tests/xtests/base.py
tests/xtests/base.py
from django.test import TestCase from django.contrib.auth.models import User from django.test.client import RequestFactory class BaseTest(TestCase): def setUp(self): self.factory = RequestFactory() def _create_superuser(self, username): return User.objects.create(username=username, is_superuser=True) def _mocked_request(self, url, user='admin'): request = self.factory.get(url) request.user = isinstance(user, User) and user or self._create_superuser(user) return request
from django.test import TestCase from django.contrib.auth.models import User from django.test.client import RequestFactory class BaseTest(TestCase): def setUp(self): self.factory = RequestFactory() def _create_superuser(self, username): return User.objects.create(username=username, is_superuser=True) def _mocked_request(self, url, user='admin'): request = self.factory.get(url) request.user = isinstance(user, User) and user or self._create_superuser(user) request.session = {} return request
Add session in tests Mock HttpRequest
Add session in tests Mock HttpRequest
Python
bsd-3-clause
alexsilva/django-xadmin,jneight/django-xadmin,merlian/django-xadmin,pobear/django-xadmin,cupen/django-xadmin,wbcyclist/django-xadmin,cgcgbcbc/django-xadmin,pobear/django-xadmin,t0nyren/django-xadmin,t0nyren/django-xadmin,marguslaak/django-xadmin,t0nyren/django-xadmin,vincent-fei/django-xadmin,iedparis8/django-xadmin,vincent-fei/django-xadmin,taxido/django-xadmin,zhiqiangYang/django-xadmin,vincent-fei/django-xadmin,tvrcopgg/edm_xadmin,hochanh/django-xadmin,sshwsfc/xadmin,Keleir/django-xadmin,huaishan/django-xadmin,t0nyren/django-xadmin,taxido/django-xadmin,jneight/django-xadmin,wbcyclist/django-xadmin,huaishan/django-xadmin,cgcgbcbc/django-xadmin,sshwsfc/xadmin,AndyHelix/django-xadmin,hochanh/django-xadmin,f1aky/xadmin,cupen/django-xadmin,AndyHelix/django-xadmin,pobear/django-xadmin,huaishan/django-xadmin,alexsilva/django-xadmin,sshwsfc/django-xadmin,AndyHelix/django-xadmin,sshwsfc/xadmin,tvrcopgg/edm_xadmin,marguslaak/django-xadmin,merlian/django-xadmin,cupen/django-xadmin,huaishan/django-xadmin,jneight/django-xadmin,cgcgbcbc/django-xadmin,zhiqiangYang/django-xadmin,vincent-fei/django-xadmin,ly0/xxadmin,Keleir/django-xadmin,sshwsfc/django-xadmin,tvrcopgg/edm_xadmin,wbcyclist/django-xadmin,zhiqiangYang/django-xadmin,marguslaak/django-xadmin,hochanh/django-xadmin,hochanh/django-xadmin,ly0/xxadmin,f1aky/xadmin,iedparis8/django-xadmin,merlian/django-xadmin,alexsilva/django-xadmin,alexsilva/django-xadmin,marguslaak/django-xadmin,sshwsfc/django-xadmin,AndyHelix/django-xadmin,sshwsfc/xadmin,cupen/django-xadmin,merlian/django-xadmin,zhiqiangYang/django-xadmin,f1aky/xadmin,sshwsfc/django-xadmin,Keleir/django-xadmin,pobear/django-xadmin,taxido/django-xadmin,ly0/xxadmin,taxido/django-xadmin,f1aky/xadmin,iedparis8/django-xadmin,tvrcopgg/edm_xadmin,Keleir/django-xadmin,ly0/xxadmin
--- +++ @@ -13,4 +13,5 @@ def _mocked_request(self, url, user='admin'): request = self.factory.get(url) request.user = isinstance(user, User) and user or self._create_superuser(user) + request.session = {} return request
58846603f8a5310bb0e6e1eaa9f9f599c315b041
django_webtest/response.py
django_webtest/response.py
# -*- coding: utf-8 -*- from django.test import Client from django.http import SimpleCookie from webtest import TestResponse from django_webtest.compat import urlparse class DjangoWebtestResponse(TestResponse): """ WebOb's Response quacking more like django's HttpResponse. This is here to make more django's TestCase asserts work, not to provide a generally useful proxy. """ streaming = False @property def status_code(self): return self.status_int @property def _charset(self): return self.charset @property def content(self): return self.body @property def client(self): client = Client() client.cookies = SimpleCookie() for k,v in self.test_app.cookies.items(): client.cookies[k] = v return client def __getitem__(self, item): item = item.lower() if item == 'location': # django's test response returns location as http://testserver/, # WebTest returns it as http://localhost:80/ e_scheme, e_netloc, e_path, e_query, e_fragment = urlparse.urlsplit(self.location) if e_netloc == 'localhost:80': e_netloc = 'testserver' return urlparse.urlunsplit([e_scheme, e_netloc, e_path, e_query, e_fragment]) for header, value in self.headerlist: if header.lower() == item: return value raise KeyError(item)
# -*- coding: utf-8 -*- from django.test import Client from django.http import SimpleCookie from webtest import TestResponse from django_webtest.compat import urlparse class DjangoWebtestResponse(TestResponse): """ WebOb's Response quacking more like django's HttpResponse. This is here to make more django's TestCase asserts work, not to provide a generally useful proxy. """ streaming = False @property def status_code(self): return self.status_int @property def _charset(self): return self.charset @property def content(self): return self.body @property def url(self): return self['location'] @property def client(self): client = Client() client.cookies = SimpleCookie() for k,v in self.test_app.cookies.items(): client.cookies[k] = v return client def __getitem__(self, item): item = item.lower() if item == 'location': # django's test response returns location as http://testserver/, # WebTest returns it as http://localhost:80/ e_scheme, e_netloc, e_path, e_query, e_fragment = urlparse.urlsplit(self.location) if e_netloc == 'localhost:80': e_netloc = 'testserver' return urlparse.urlunsplit([e_scheme, e_netloc, e_path, e_query, e_fragment]) for header, value in self.headerlist: if header.lower() == item: return value raise KeyError(item)
Add url property to DjangoWebtestResponse so assertRedirects works in 1.6.
Add url property to DjangoWebtestResponse so assertRedirects works in 1.6.
Python
mit
kmike/django-webtest,helenst/django-webtest,vaad2/django-webtest,django-webtest/django-webtest,abbottc/django-webtest,kharandziuk/django-webtest,abbottc/django-webtest,MikeAmy/django-webtest,andrewyoung1991/django-webtest,helenst/django-webtest,yrik/django-webtest,andrewyoung1991/django-webtest,andriisoldatenko/django-webtest,larssos/django-webtest,django-webtest/django-webtest,kmike/django-webtest,wbbradley/django-webtest,andriisoldatenko/django-webtest
--- +++ @@ -26,6 +26,10 @@ return self.body @property + def url(self): + return self['location'] + + @property def client(self): client = Client() client.cookies = SimpleCookie()
124487f204c5dedea471bd2c45ad8b929ff7fae0
app/clients/sms/loadtesting.py
app/clients/sms/loadtesting.py
import logging from flask import current_app from app.clients.sms.firetext import ( FiretextClient ) logger = logging.getLogger(__name__) class LoadtestingClient(FiretextClient): ''' Loadtest sms client. ''' def init_app(self, config, statsd_client, *args, **kwargs): super(FiretextClient, self).__init__(*args, **kwargs) self.current_app = current_app self.api_key = config.config.get('LOADTESTING_API_KEY') self.from_number = config.config.get('LOADTESTING_NUMBER') self.name = 'loadtesting' self.url = "https://www.firetext.co.uk/api/sendsms/json" self.statsd_client = statsd_client
import logging from flask import current_app from app.clients.sms.firetext import ( FiretextClient ) logger = logging.getLogger(__name__) class LoadtestingClient(FiretextClient): ''' Loadtest sms client. ''' def init_app(self, config, statsd_client, *args, **kwargs): super(FiretextClient, self).__init__(*args, **kwargs) self.current_app = current_app self.api_key = config.config.get('LOADTESTING_API_KEY') self.from_number = config.config.get('FROM_NUMBER') self.name = 'loadtesting' self.url = "https://www.firetext.co.uk/api/sendsms/json" self.statsd_client = statsd_client
Fix from number on Load testing client
Fix from number on Load testing client
Python
mit
alphagov/notifications-api,alphagov/notifications-api
--- +++ @@ -18,7 +18,7 @@ super(FiretextClient, self).__init__(*args, **kwargs) self.current_app = current_app self.api_key = config.config.get('LOADTESTING_API_KEY') - self.from_number = config.config.get('LOADTESTING_NUMBER') + self.from_number = config.config.get('FROM_NUMBER') self.name = 'loadtesting' self.url = "https://www.firetext.co.uk/api/sendsms/json" self.statsd_client = statsd_client
b394f79132d952be20baf15725715691ace69ced
web/slas-web/web/urls.py
web/slas-web/web/urls.py
"""web URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin urlpatterns = [ url(r'^general/', include('general.urls', namespace='general')), url(r'^apache/', include('apache.urls', namespace='apache')), url(r'^bash/', include('bash.urls', namespace='bash')), url(r'^admin/', include(admin.site.urls)), # index url(r'^$', 'general.views.status', name='index'), url(r'^user/login/$', 'web.views.user_login'), url(r'^user/auth$', 'web.views.user_auth'), url(r'^user/logout/$', 'web.views.user_logout'), url(r'^user/invalid_login/$', 'web.views.user_invalid_login'), ]
"""web URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin urlpatterns = [ url(r'^general/', include('general.urls', namespace='general')), url(r'^apache/', include('apache.urls', namespace='apache')), url(r'^bash/', include('bash.urls', namespace='bash')), url(r'^admin/', include(admin.site.urls)), # index url(r'^$', 'general.views.status', name='index'), url(r'^user/login/$', 'web.views.user_login'), url(r'^user/auth$', 'web.views.user_auth'), url(r'^user/logout/$', 'web.views.user_logout'), url(r'^user/invalid_login/$', 'web.views.user_invalid_login'), ] admin.site.site_header = 'SLAS web module administration tool'
Change web admin page title
Change web admin page title
Python
mit
chyla/slas,chyla/pat-lms,chyla/slas,chyla/pat-lms,chyla/slas,chyla/pat-lms,chyla/slas,chyla/slas,chyla/pat-lms,chyla/pat-lms,chyla/slas,chyla/pat-lms,chyla/slas,chyla/pat-lms
--- +++ @@ -29,3 +29,5 @@ url(r'^user/logout/$', 'web.views.user_logout'), url(r'^user/invalid_login/$', 'web.views.user_invalid_login'), ] + +admin.site.site_header = 'SLAS web module administration tool'
e836f3c558085aa0a1275546ac45b8146254ee6b
test/default.py
test/default.py
from mock import MagicMock import pbclient class TestDefault(object): """Test class for pbs.helpers.""" error = {"action": "GET", "exception_cls": "NotFound", "exception_msg": "(NotFound)", "status": "failed", "status_code": 404, "target": "/api/app"} config = MagicMock() config.server = 'http://server' config.api_key = 'apikey' config.pbclient = pbclient config.project = {'name': 'name', 'description': 'description', 'short_name': 'short_name'} def tearDown(self): self.error['status'] = 'failed'
"""Test module for pbs client.""" from mock import MagicMock import pbclient class TestDefault(object): """Test class for pbs.helpers.""" config = MagicMock() config.server = 'http://server' config.api_key = 'apikey' config.pbclient = pbclient config.project = {'name': 'name', 'description': 'description', 'short_name': 'short_name'} def tearDown(self): """Tear down method.""" self.error['status'] = 'failed' @property def error(self, action='GET', exception_cls='NotFound', exception_msg='(NotFound)', status='failed', status_code=404, target='/api/app'): """Error property.""" return {'action': action, 'exception_cls': exception_cls, 'exception_msg': exception_msg, 'status': status, 'status_code': status_code, 'target': target}
Refactor error as a property.
Refactor error as a property.
Python
agpl-3.0
PyBossa/pbs,PyBossa/pbs,PyBossa/pbs
--- +++ @@ -1,23 +1,35 @@ +"""Test module for pbs client.""" from mock import MagicMock import pbclient + + class TestDefault(object): """Test class for pbs.helpers.""" - - error = {"action": "GET", - "exception_cls": "NotFound", - "exception_msg": "(NotFound)", - "status": "failed", - "status_code": 404, - "target": "/api/app"} config = MagicMock() config.server = 'http://server' config.api_key = 'apikey' config.pbclient = pbclient config.project = {'name': 'name', - 'description': 'description', - 'short_name': 'short_name'} + 'description': 'description', + 'short_name': 'short_name'} def tearDown(self): + """Tear down method.""" self.error['status'] = 'failed' + + @property + def error(self, action='GET', + exception_cls='NotFound', + exception_msg='(NotFound)', + status='failed', + status_code=404, + target='/api/app'): + """Error property.""" + return {'action': action, + 'exception_cls': exception_cls, + 'exception_msg': exception_msg, + 'status': status, + 'status_code': status_code, + 'target': target}
fc561301c3a3aea79043348a01e6a468a5693d3e
tests/test_importable.py
tests/test_importable.py
"""Basic set of tests to ensure entire code base is importable""" import pytest def test_importable(): """Simple smoketest to ensure all isort modules are importable""" import isort import isort._future import isort._future._dataclasses import isort._version import isort.api import isort.comments import isort.compat import isort.exceptions import isort.finders import isort.format import isort.hooks import isort.isort import isort.logo import isort.main import isort.output import isort.parse import isort.profiles import isort.pylama_isort import isort.sections import isort.settings import isort.setuptools_commands import isort.sorting import isort.stdlibs import isort.stdlibs.all import isort.stdlibs.py2 import isort.stdlibs.py3 import isort.stdlibs.py27 import isort.stdlibs.py35 import isort.stdlibs.py36 import isort.stdlibs.py37 import isort.utils import isort.wrap import isort.wrap_modes with pytest.raises(SystemExit): import isort.__main__
"""Basic set of tests to ensure entire code base is importable""" import pytest def test_importable(): """Simple smoketest to ensure all isort modules are importable""" import isort import isort._future import isort._future._dataclasses import isort._version import isort.api import isort.comments import isort.exceptions import isort.finders import isort.format import isort.hooks import isort.isort import isort.logo import isort.main import isort.output import isort.parse import isort.profiles import isort.pylama_isort import isort.sections import isort.settings import isort.setuptools_commands import isort.sorting import isort.stdlibs import isort.stdlibs.all import isort.stdlibs.py2 import isort.stdlibs.py3 import isort.stdlibs.py27 import isort.stdlibs.py35 import isort.stdlibs.py36 import isort.stdlibs.py37 import isort.utils import isort.wrap import isort.wrap_modes with pytest.raises(SystemExit): import isort.__main__
Remove no longer needed import check
Remove no longer needed import check
Python
mit
PyCQA/isort,PyCQA/isort
--- +++ @@ -10,7 +10,6 @@ import isort._version import isort.api import isort.comments - import isort.compat import isort.exceptions import isort.finders import isort.format
e3aa12af05003222b295a4cea39a1c05c911024a
main.py
main.py
from connect_four import ConnectFour def main(): """ Play a game! """ connect_four = ConnectFour() # start the game connect_four.start() if __name__ == "__main__": # Default "main method" idiom. main()
from connect_four import ConnectFour def main(): """ Play a game! """ connect_four = ConnectFour() menu_choice = 1 while menu_choice == 1: # start the game connect_four.start_new() # menu print("Menu") print("1 - Play again") print("2 - Quit") menu_choice = int(raw_input("choice : ")) if __name__ == "__main__": main()
Add menu to start new game and quit
Add menu to start new game and quit
Python
mit
LouisBarranqueiro/ia-connect-four-game
--- +++ @@ -1,11 +1,20 @@ from connect_four import ConnectFour + def main(): """ Play a game! """ connect_four = ConnectFour() - # start the game - connect_four.start() + menu_choice = 1 + while menu_choice == 1: + # start the game + connect_four.start_new() + # menu + print("Menu") + print("1 - Play again") + print("2 - Quit") + menu_choice = int(raw_input("choice : ")) -if __name__ == "__main__": # Default "main method" idiom. + +if __name__ == "__main__": main()
08c2e9144e605063ac3c6313efe639eb7139ba75
main.py
main.py
# Fox, rewritten in Python for literally no reason at all. import discord import asyncio print("Just a moment, Fox is initializing...") fox = discord.Client() @fox.event async def on_ready(): print('Fox is ready!') print('Fox username: ' + fox.user.name) print('Fox user ID: ' + fox.user.id) print('------')
# Fox, rewritten in Python for literally no reason at all. import discord import asyncio import plugins import plugins.core print("Just a moment, Fox is initializing...") fox = discord.Client() @fox.event async def on_ready(): print('Fox is ready!') print('Fox username: ' + fox.user.name) print('Fox user ID: ' + fox.user.id) print('------')
Add import statements for plugin system
Add import statements for plugin system Signed-off-by: Reed <f5cabf8735907151a446812c9875d6c0c712d847@plusreed.com>
Python
mit
plusreed/foxpy
--- +++ @@ -1,10 +1,15 @@ # Fox, rewritten in Python for literally no reason at all. + import discord import asyncio +import plugins +import plugins.core + print("Just a moment, Fox is initializing...") fox = discord.Client() + @fox.event async def on_ready():
ddc82357cafbf58822f4d98f484fbe4dd860743e
sqlviz.py
sqlviz.py
#! usr/bin/env python3 from docopt import docopt from matplotlib import pyplot import re class Schema: """ Wraps the SQL source code for a schema and provides methods to get information about that schema. """ table_def = re.compile(r"CREATE TABLE|create table") def __init__(self, source): """ Creates a new instance of Schema for the specified source code string. """ self.source = source def n_tables(self): """ Returns the number of tables defined in the schema """ return len(table_def.findall(source)) def n_keys(self): """ Returns the number of keys defined in the schema """ pass #TODO: not yet implementend def n_datatypes(self): """ Returns the number of each data type in the schema. """ pass #TODO: not yet implementend def lengths(self): """ Returns a dictionary mapping each data type in the schema to a list of the lengths of those data types. """ pass #TODO: not yet implementend
#! usr/bin/env python3 from docopt import docopt from matplotlib import pyplot import re class Schema: """ Wraps the SQL source code for a schema and provides methods to get information about that schema. """ table_def = re.compile(r"CREATE TABLE|create table") def __init__(self, source): """ Creates a new instance of Schema for the specified source code string. """ self.source = source def n_tables(self): """ Returns the number of tables defined in the schema """ return len(Schema.table_def.findall(source)) def n_keys(self): """ Returns the number of keys defined in the schema """ pass #TODO: not yet implementend def n_datatypes(self): """ Returns the number of each data type in the schema. """ pass #TODO: not yet implementend def lengths(self): """ Returns a dictionary mapping each data type in the schema to a list of the lengths of those data types. """ pass #TODO: not yet implementend
Fix reference to static var
Fix reference to static var
Python
mit
hawkw/sqlviz
--- +++ @@ -21,7 +21,7 @@ """ Returns the number of tables defined in the schema """ - return len(table_def.findall(source)) + return len(Schema.table_def.findall(source)) def n_keys(self): """
cc08100734df4eea053758a04610d889ced8c476
dataportal/utils/diagnostics.py
dataportal/utils/diagnostics.py
from __future__ import (absolute_import, division, print_function, unicode_literals) from collections import OrderedDict import importlib import sys import six def watermark(): """ Give the version of each of the dependencies -- useful for bug reports. Returns ------- result : dict mapping the name of each package to its version string or, if an optional dependency is not installed, None """ packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims', 'pyyaml', 'metadatastore', 'filestore', 'channelarchiver', 'bubblegum'] result = OrderedDict() for package_name in packages: try: package = importlib.import_module(package_name) except ImportError: result[package_name] = None else: try: version = package.__version__ except AttributeError as err: version = "FAILED TO DETECT: {0}".format(err) result[package_name] = version # enaml provides its version differently try: import enaml except ImportError: result['enaml'] = None else: from enaml.version import version_info result['enaml'] = _make_version_string(version_info) # ...as does Python version_info = sys.version_info result['python'] = _make_version_string(version_info) return result def _make_version_string(version_info): version_string = '.'.join(map(str, [version_info[0], version_info[1], version_info[2]])) return version_string
from __future__ import (absolute_import, division, print_function, unicode_literals) from collections import OrderedDict import importlib import sys import six def watermark(): """ Give the version of each of the dependencies -- useful for bug reports. Returns ------- result : dict mapping the name of each package to its version string or, if an optional dependency is not installed, None """ packages = ['six', 'numpy', 'scipy', 'matplotlib', 'pandas', 'pims', 'pyyaml', 'metadatastore', 'filestore', 'channelarchiver', 'bubblegum'] result = OrderedDict() for package_name in packages: try: package = importlib.import_module(package_name) version = package.__version__ except ImportError: result[package_name] = None except Exception as err: version = "FAILED TO DETECT: {0}".format(err) result[package_name] = version # enaml provides its version differently try: import enaml from enaml.version import version_info version = _make_version_string(version_info) except ImportError: version = None except Exception as err: version = "FAILED TO DETECT: {0}".format(err) result['enaml'] = version # ...as does Python version_info = sys.version_info result['python'] = _make_version_string(version_info) return result def _make_version_string(version_info): version_string = '.'.join(map(str, [version_info[0], version_info[1], version_info[2]])) return version_string
Make watermark more resilient to err on import.
ENN: Make watermark more resilient to err on import.
Python
bsd-3-clause
tacaswell/dataportal,NSLS-II/dataportal,tacaswell/dataportal,danielballan/dataportal,danielballan/datamuxer,danielballan/datamuxer,ericdill/datamuxer,danielballan/dataportal,ericdill/databroker,ericdill/datamuxer,NSLS-II/dataportal,NSLS-II/datamuxer,ericdill/databroker
--- +++ @@ -23,23 +23,23 @@ for package_name in packages: try: package = importlib.import_module(package_name) + version = package.__version__ except ImportError: result[package_name] = None - else: - try: - version = package.__version__ - except AttributeError as err: - version = "FAILED TO DETECT: {0}".format(err) + except Exception as err: + version = "FAILED TO DETECT: {0}".format(err) result[package_name] = version # enaml provides its version differently try: import enaml + from enaml.version import version_info + version = _make_version_string(version_info) except ImportError: - result['enaml'] = None - else: - from enaml.version import version_info - result['enaml'] = _make_version_string(version_info) + version = None + except Exception as err: + version = "FAILED TO DETECT: {0}".format(err) + result['enaml'] = version # ...as does Python version_info = sys.version_info
9ea9b0bed617dc8a309c0d2dd90f02ffbc34edbc
client/bin/daemon.py
client/bin/daemon.py
#!/usr/bin/python import time import subprocess from os import path, chdir, getcwd import requests from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler class ProjectEventHandler(FileSystemEventHandler): def on_any_event(self, event): print('Dispatching request.') # Find the git root # TODO this could be made more efficient with popen cwd = getcwd() chdir(path.dirname(event.src_path)) repo_root = subprocess.check_output(['git', 'rev-parse', '--show-toplevel'], universal_newlines=True) repo_url = subprocess.check_output(['git', 'remote', 'get-url', 'origin'], universal_newlines=True) chdir(cwd) payload = { 'action': 'edit', 'repo_url': repo_url } r = requests.post('http://localhost:8000', json=payload) print('realtime.recurse.com client starting up...') event_handler = ProjectEventHandler() observer = Observer() observer.schedule(event_handler, path='.', recursive=True) observer.start() print('Listening for filesystem events.') try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join()
#!/usr/bin/python import time import subprocess from os import path, chdir, getcwd import requests from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler class ProjectEventHandler(FileSystemEventHandler): def on_any_event(self, event): print('Dispatching request.') # Find the git root # TODO this could be made more efficient with popen cwd = getcwd() chdir(path.dirname(event.src_path)) repo_root = subprocess.check_output(['git', 'rev-parse', '--show-toplevel'], universal_newlines=True) repo_url = subprocess.check_output(['git', 'remote', 'get-url', 'origin'], universal_newlines=True) chdir(cwd) payload = { 'action': 'edit', 'url': repo_url } r = requests.post('http://localhost:8000/api/people/aj', json=payload) print(r.status_code, r.reason) print('realtime.recurse.com client starting up...') event_handler = ProjectEventHandler() observer = Observer() observer.schedule(event_handler, path='.', recursive=True) observer.start() print('Listening for filesystem events.') try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join()
Fix POSTing from the client
Fix POSTing from the client
Python
agpl-3.0
strugee/realtime.recurse.com,strugee/realtime.recurse.com,strugee/realtime.recurse.com
--- +++ @@ -18,8 +18,9 @@ repo_url = subprocess.check_output(['git', 'remote', 'get-url', 'origin'], universal_newlines=True) chdir(cwd) - payload = { 'action': 'edit', 'repo_url': repo_url } - r = requests.post('http://localhost:8000', json=payload) + payload = { 'action': 'edit', 'url': repo_url } + r = requests.post('http://localhost:8000/api/people/aj', json=payload) + print(r.status_code, r.reason) print('realtime.recurse.com client starting up...')
c95fdbeb145e5bcef2ded646c2319b58ae9e996d
rpg_base/urls.py
rpg_base/urls.py
from django.conf.urls import include, url from rpg_base.views import * urlpatterns = [ # Examples: # url(r'^$', 'django_rpg.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^campaign/$', campaign.index, name='index'), url(r'^campaign/(?P<pk>[0-9]+)/$', campaign.view, name='campaign_view'), url(r'^campaign/(?P<pk>[0-9]+)/character/$', character.index, name='character_index'), url(r'^campaign/(?P<pk>[0-9]+)/character/(?P<character_pk>[0-9]+)$', character.view, name='character_view'), # TODO This is going to get messy super quick. ]
from django.conf.urls import include, url from rpg_base.views import * urlpatterns = [ # Examples: # url(r'^$', 'django_rpg.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^campaign/$', campaign.index, name='campaign_index'), url(r'^campaign/(?P<pk>[0-9]+)/$', campaign.view, name='campaign_view'), url(r'^campaign/(?P<pk>[0-9]+)/character/$', character.index, name='character_index'), url(r'^campaign/(?P<pk>[0-9]+)/character/(?P<character_pk>[0-9]+)$', character.view, name='character_view'), # TODO This is going to get messy super quick. ]
Change name for campaign index url
Change name for campaign index url
Python
mit
ncphillips/django_rpg,ncphillips/django_rpg
--- +++ @@ -6,7 +6,7 @@ # url(r'^$', 'django_rpg.views.home', name='home'), # url(r'^blog/', include('blog.urls')), - url(r'^campaign/$', campaign.index, name='index'), + url(r'^campaign/$', campaign.index, name='campaign_index'), url(r'^campaign/(?P<pk>[0-9]+)/$', campaign.view, name='campaign_view'), url(r'^campaign/(?P<pk>[0-9]+)/character/$', character.index, name='character_index'), url(r'^campaign/(?P<pk>[0-9]+)/character/(?P<character_pk>[0-9]+)$', character.view, name='character_view'),
5203ecdaf839f58e7f00ef74fec9dbecbeb52583
tests/backends/__init__.py
tests/backends/__init__.py
from mopidy.models import Track class BaseCurrentPlaylistControllerTest(object): uris = [] backend_class = None def setUp(self): self.backend = self.backend_class() def test_add(self): playlist = self.backend.current_playlist for uri in self.uris: playlist.add(uri) self.assertEqual(uri, playlist.tracks[-1].uri) def test_add_at_position(self): playlist = self.backend.current_playlist for uri in self.uris: playlist.add(uri, 0) self.assertEqual(uri, playlist.tracks[0].uri) # FIXME test other placements class BasePlaybackControllerTest(object): backend_class = None def setUp(self): self.backend = self.backend_class() def test_play_with_no_current_track(self): playback = self.backend.playback self.assertEqual(playback.state, playback.STOPPED) result = playback.play() self.assertEqual(result, False) self.assertEqual(playback.state, playback.STOPPED) def test_next(self): playback = self.backend.playback current_song = playback.playlist_position playback.next() self.assertEqual(playback.playlist_position, current_song+1)
from mopidy.models import Track class BaseCurrentPlaylistControllerTest(object): uris = [] backend_class = None def setUp(self): self.backend = self.backend_class() def test_uri_set(self): self.assert_(self.uris) def test_add(self): playlist = self.backend.current_playlist for uri in self.uris: playlist.add(uri) self.assertEqual(uri, playlist.tracks[-1].uri) def test_add_at_position(self): playlist = self.backend.current_playlist for uri in self.uris: playlist.add(uri, 0) self.assertEqual(uri, playlist.tracks[0].uri) # FIXME test other placements class BasePlaybackControllerTest(object): backend_class = None def setUp(self): self.backend = self.backend_class() def test_play_with_no_current_track(self): playback = self.backend.playback self.assertEqual(playback.state, playback.STOPPED) result = playback.play() self.assertEqual(result, False) self.assertEqual(playback.state, playback.STOPPED) def test_next(self): playback = self.backend.playback current_song = playback.playlist_position playback.next() self.assertEqual(playback.playlist_position, current_song+1)
Add test to check that uris are set
Add test to check that uris are set
Python
apache-2.0
kingosticks/mopidy,abarisain/mopidy,mopidy/mopidy,kingosticks/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,ZenithDK/mopidy,jodal/mopidy,pacificIT/mopidy,diandiankan/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,diandiankan/mopidy,abarisain/mopidy,ZenithDK/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,diandiankan/mopidy,bencevans/mopidy,adamcik/mopidy,priestd09/mopidy,mokieyue/mopidy,adamcik/mopidy,liamw9534/mopidy,jcass77/mopidy,vrs01/mopidy,kingosticks/mopidy,bacontext/mopidy,SuperStarPL/mopidy,mopidy/mopidy,SuperStarPL/mopidy,jcass77/mopidy,tkem/mopidy,hkariti/mopidy,mokieyue/mopidy,priestd09/mopidy,vrs01/mopidy,tkem/mopidy,dbrgn/mopidy,swak/mopidy,hkariti/mopidy,priestd09/mopidy,dbrgn/mopidy,ZenithDK/mopidy,ZenithDK/mopidy,bacontext/mopidy,hkariti/mopidy,vrs01/mopidy,mokieyue/mopidy,woutervanwijk/mopidy,mopidy/mopidy,ali/mopidy,pacificIT/mopidy,rawdlite/mopidy,hkariti/mopidy,jmarsik/mopidy,ali/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,bencevans/mopidy,mokieyue/mopidy,swak/mopidy,swak/mopidy,rawdlite/mopidy,rawdlite/mopidy,vrs01/mopidy,jmarsik/mopidy,adamcik/mopidy,jmarsik/mopidy,tkem/mopidy,diandiankan/mopidy,quartz55/mopidy,woutervanwijk/mopidy,bencevans/mopidy,glogiotatidis/mopidy,rawdlite/mopidy,pacificIT/mopidy,ali/mopidy,bencevans/mopidy,glogiotatidis/mopidy,jcass77/mopidy,quartz55/mopidy,bacontext/mopidy,liamw9534/mopidy,quartz55/mopidy,quartz55/mopidy,bacontext/mopidy,jodal/mopidy,swak/mopidy,tkem/mopidy,jodal/mopidy,ali/mopidy
--- +++ @@ -6,6 +6,9 @@ def setUp(self): self.backend = self.backend_class() + + def test_uri_set(self): + self.assert_(self.uris) def test_add(self): playlist = self.backend.current_playlist
462b6878507e3928068745cccc80720e8699dafa
server.py
server.py
from flask import Flask from SPARQLWrapper import SPARQLWrapper, JSON from flask import request from flask.ext.cors import CORS app = Flask(__name__) CORS(app) @app.route('/') def hello_world(): auth = request.authorization sparql = SPARQLWrapper('https://knowledgestore2.fbk.eu/nwr/dutchhouse/sparql') sparql.setQuery(""" SELECT * WHERE {dbpedia:Barack_Obama rdfs:label ?label . } LIMIT 100 """) sparql.setCredentials(auth.username, auth.password) sparql.setReturnFormat(JSON) results = sparql.query().convert() r = ['{} {}'.format(k, v) for k, v in results.iteritems()] return ' '.join(r) if __name__ == '__main__': app.run(debug=True)
from flask import Flask from SPARQLWrapper import SPARQLWrapper, JSON from flask import request, jsonify from flask.ext.cors import CORS app = Flask(__name__) CORS(app) @app.route('/') def hello_world(): auth = request.authorization sparql = SPARQLWrapper('https://knowledgestore2.fbk.eu/nwr/dutchhouse/sparql') sparql.setQuery(""" SELECT * WHERE {dbpedia:Barack_Obama rdfs:label ?label . } LIMIT 100 """) sparql.setCredentials(auth.username, auth.password) sparql.setReturnFormat(JSON) results = sparql.query().convert() return jsonify(**results) if __name__ == '__main__': app.run(debug=True)
Return json object in flask app
Return json object in flask app Instead of a string, the flask app now returns the json object retrieved from the knowledge store.
Python
apache-2.0
NLeSC/EmbodiedEmotions,NLeSC/EmbodiedEmotions,NLeSC/UncertaintyVisualization,NLeSC/UncertaintyVisualization,NLeSC/EmbodiedEmotions
--- +++ @@ -1,6 +1,6 @@ from flask import Flask from SPARQLWrapper import SPARQLWrapper, JSON -from flask import request +from flask import request, jsonify from flask.ext.cors import CORS app = Flask(__name__) @@ -16,8 +16,7 @@ sparql.setCredentials(auth.username, auth.password) sparql.setReturnFormat(JSON) results = sparql.query().convert() - r = ['{} {}'.format(k, v) for k, v in results.iteritems()] - return ' '.join(r) + return jsonify(**results) if __name__ == '__main__': app.run(debug=True)
c5f75072707dbe9a723ffbff71ab01d0519b6baa
tools/generateDataset.py
tools/generateDataset.py
import numpy as np import os import sys import time from unrealcv import client class Dataset(object): def __init__(self,folder,nberOfImages): self.folder=folder self.nberOfImages=nberOfImages self.client.connect() def scan(): try: p=self.client.request('vget /camera/0/lit') a=p.split('/').pop() p=self.client.request('vget /camera/0/object_mask '+a) print p except Exception,e: print 'Image not saved: error occured, '+str(e)
import numpy as np import os import sys import time from unrealcv import client class Dataset(object): def __init__(self,folder,nberOfImages): self.folder=folder self.nberOfImages=nberOfImages self.client.connect() def scan(): try: p=self.client.request('vget /camera/0/lit') a=p.split('/').pop() p=self.client.request('vget /camera/0/object_mask '+a) print p except Exception,e: print 'Image could not be saved not saved: error occured, '+str(e)
Structure of data generator iimproved
:muscle: Structure of data generator iimproved Structure of dataset generator improved
Python
bsd-2-clause
fkenghagho/RobotVQA
--- +++ @@ -19,5 +19,5 @@ p=self.client.request('vget /camera/0/object_mask '+a) print p except Exception,e: - print 'Image not saved: error occured, '+str(e) + print 'Image could not be saved not saved: error occured, '+str(e)
0f2bc9cc1216dfd1e5a8f2aa8467428dc2be6781
scikits/learn/pyem/misc.py
scikits/learn/pyem/misc.py
# Last Change: Sat Jun 09 07:00 PM 2007 J #======================================================== # Constants used throughout the module (def args, etc...) #======================================================== # This is the default dimension for representing confidence ellipses DEF_VIS_DIM = [0, 1] DEF_ELL_NP = 100 DEF_LEVEL = 0.39 #===================================================================== # "magic number", that is number used to control regularization and co # Change them at your risk ! #===================================================================== # max deviation allowed when comparing double (this is actually stupid, # I should actually use a number of decimals) _MAX_DBL_DEV = 1e-10 # max conditional number allowed _MAX_COND = 1e8 _MIN_INV_COND = 1/_MAX_COND # Default alpha for regularization _DEF_ALPHA = 1e-1 # Default min delta for regularization _MIN_DBL_DELTA = 1e-5
# Last Change: Sat Jun 09 08:00 PM 2007 J #======================================================== # Constants used throughout the module (def args, etc...) #======================================================== # This is the default dimension for representing confidence ellipses DEF_VIS_DIM = (0, 1) DEF_ELL_NP = 100 DEF_LEVEL = 0.39 #===================================================================== # "magic number", that is number used to control regularization and co # Change them at your risk ! #===================================================================== # max deviation allowed when comparing double (this is actually stupid, # I should actually use a number of decimals) _MAX_DBL_DEV = 1e-10 # max conditional number allowed _MAX_COND = 1e8 _MIN_INV_COND = 1/_MAX_COND # Default alpha for regularization _DEF_ALPHA = 1e-1 # Default min delta for regularization _MIN_DBL_DELTA = 1e-5
Set def arguments to immutable to avoid nasty side effect.
Set def arguments to immutable to avoid nasty side effect. From: cdavid <cdavid@cb17146a-f446-4be1-a4f7-bd7c5bb65646> git-svn-id: a2d1b0e147e530765aaf3e1662d4a98e2f63c719@110 22fbfee3-77ab-4535-9bad-27d1bd3bc7d8
Python
bsd-3-clause
f3r/scikit-learn,cainiaocome/scikit-learn,tmhm/scikit-learn,schets/scikit-learn,chrsrds/scikit-learn,theoryno3/scikit-learn,harshaneelhg/scikit-learn,waterponey/scikit-learn,mayblue9/scikit-learn,larsmans/scikit-learn,florian-f/sklearn,IshankGulati/scikit-learn,etkirsch/scikit-learn,arabenjamin/scikit-learn,IssamLaradji/scikit-learn,terkkila/scikit-learn,fabianp/scikit-learn,rsivapr/scikit-learn,Obus/scikit-learn,xubenben/scikit-learn,ashhher3/scikit-learn,B3AU/waveTree,rahuldhote/scikit-learn,frank-tancf/scikit-learn,billy-inn/scikit-learn,JsNoNo/scikit-learn,zaxtax/scikit-learn,meduz/scikit-learn,treycausey/scikit-learn,stylianos-kampakis/scikit-learn,shusenl/scikit-learn,tomlof/scikit-learn,liberatorqjw/scikit-learn,ElDeveloper/scikit-learn,rahuldhote/scikit-learn,AlexRobson/scikit-learn,sgenoud/scikit-learn,Adai0808/scikit-learn,PatrickOReilly/scikit-learn,0asa/scikit-learn,JosmanPS/scikit-learn,MatthieuBizien/scikit-learn,etkirsch/scikit-learn,hlin117/scikit-learn,nrhine1/scikit-learn,ndingwall/scikit-learn,mrshu/scikit-learn,rishikksh20/scikit-learn,CforED/Machine-Learning,PatrickOReilly/scikit-learn,shangwuhencc/scikit-learn,ogrisel/scikit-learn,ndingwall/scikit-learn,jpautom/scikit-learn,trungnt13/scikit-learn,yonglehou/scikit-learn,sinhrks/scikit-learn,plissonf/scikit-learn,vshtanko/scikit-learn,pkruskal/scikit-learn,krez13/scikit-learn,sanketloke/scikit-learn,nomadcube/scikit-learn,sumspr/scikit-learn,liberatorqjw/scikit-learn,MartinSavc/scikit-learn,tdhopper/scikit-learn,rishikksh20/scikit-learn,Aasmi/scikit-learn,nesterione/scikit-learn,mlyundin/scikit-learn,anntzer/scikit-learn,phdowling/scikit-learn,huzq/scikit-learn,lin-credible/scikit-learn,abhishekkrthakur/scikit-learn,aetilley/scikit-learn,abhishekgahlot/scikit-learn,loli/semisupervisedforests,vibhorag/scikit-learn,bigdataelephants/scikit-learn,schets/scikit-learn,florian-f/sklearn,shikhardb/scikit-learn,ilyes14/scikit-learn,robbymeals/scikit-learn,stylianos-kampakis/scikit-learn,ChanderG/scikit-learn,hitszxp/scikit-learn,joernhees/scikit-learn,ycaihua/scikit-learn,alexeyum/scikit-learn,UNR-AERIAL/scikit-learn,giorgiop/scikit-learn,xavierwu/scikit-learn,smartscheduling/scikit-learn-categorical-tree,DonBeo/scikit-learn,mhue/scikit-learn,vortex-ape/scikit-learn,idlead/scikit-learn,bigdataelephants/scikit-learn,mehdidc/scikit-learn,chrisburr/scikit-learn,BiaDarkia/scikit-learn,vinayak-mehta/scikit-learn,nvoron23/scikit-learn,aewhatley/scikit-learn,nhejazi/scikit-learn,eg-zhang/scikit-learn,carrillo/scikit-learn,dsullivan7/scikit-learn,Fireblend/scikit-learn,AlexRobson/scikit-learn,shyamalschandra/scikit-learn,espg/scikit-learn,harshaneelhg/scikit-learn,AlexandreAbraham/scikit-learn,chrisburr/scikit-learn,btabibian/scikit-learn,altairpearl/scikit-learn,TomDLT/scikit-learn,aabadie/scikit-learn,akionakamura/scikit-learn,arahuja/scikit-learn,cainiaocome/scikit-learn,mblondel/scikit-learn,CforED/Machine-Learning,nesterione/scikit-learn,clemkoa/scikit-learn,DSLituiev/scikit-learn,bnaul/scikit-learn,MechCoder/scikit-learn,bigdataelephants/scikit-learn,cauchycui/scikit-learn,samzhang111/scikit-learn,hlin117/scikit-learn,ilo10/scikit-learn,beepee14/scikit-learn,aewhatley/scikit-learn,vermouthmjl/scikit-learn,jaidevd/scikit-learn,glouppe/scikit-learn,LohithBlaze/scikit-learn,russel1237/scikit-learn,sumspr/scikit-learn,walterreade/scikit-learn,JosmanPS/scikit-learn,rajat1994/scikit-learn,3manuek/scikit-learn,mattgiguere/scikit-learn,mxjl620/scikit-learn,samuel1208/scikit-learn,loli/sklearn-ensembletrees,cybernet14/scikit-learn,Barmaley-exe/scikit-learn,marcocaccin/scikit-learn,sgenoud/scikit-learn,shangwuhencc/scikit-learn,vigilv/scikit-learn,Srisai85/scikit-learn,xwolf12/scikit-learn,fyffyt/scikit-learn,pypot/scikit-learn,rohanp/scikit-learn,simon-pepin/scikit-learn,jzt5132/scikit-learn,equialgo/scikit-learn,pythonvietnam/scikit-learn,cl4rke/scikit-learn,clemkoa/scikit-learn,pythonvietnam/scikit-learn,larsmans/scikit-learn,jereze/scikit-learn,rohanp/scikit-learn,quheng/scikit-learn,jlegendary/scikit-learn,kagayakidan/scikit-learn,Titan-C/scikit-learn,jaidevd/scikit-learn,espg/scikit-learn,andaag/scikit-learn,vigilv/scikit-learn,mojoboss/scikit-learn,xzh86/scikit-learn,abimannans/scikit-learn,Garrett-R/scikit-learn,zorroblue/scikit-learn,AnasGhrab/scikit-learn,huobaowangxi/scikit-learn,Nyker510/scikit-learn,LohithBlaze/scikit-learn,Barmaley-exe/scikit-learn,altairpearl/scikit-learn,pv/scikit-learn,TomDLT/scikit-learn,AIML/scikit-learn,MatthieuBizien/scikit-learn,ssaeger/scikit-learn,abhishekkrthakur/scikit-learn,walterreade/scikit-learn,ephes/scikit-learn,RomainBrault/scikit-learn,jayflo/scikit-learn,ningchi/scikit-learn,yyjiang/scikit-learn,wanggang3333/scikit-learn,Garrett-R/scikit-learn,luo66/scikit-learn,betatim/scikit-learn,r-mart/scikit-learn,siutanwong/scikit-learn,nelson-liu/scikit-learn,murali-munna/scikit-learn,jm-begon/scikit-learn,yunfeilu/scikit-learn,CVML/scikit-learn,fabioticconi/scikit-learn,jorik041/scikit-learn,lin-credible/scikit-learn,pratapvardhan/scikit-learn,MartinDelzant/scikit-learn,harshaneelhg/scikit-learn,PatrickChrist/scikit-learn,RayMick/scikit-learn,siutanwong/scikit-learn,ElDeveloper/scikit-learn,zorojean/scikit-learn,chrsrds/scikit-learn,YinongLong/scikit-learn,lbishal/scikit-learn,lbishal/scikit-learn,appapantula/scikit-learn,hitszxp/scikit-learn,kagayakidan/scikit-learn,LiaoPan/scikit-learn,f3r/scikit-learn,trankmichael/scikit-learn,macks22/scikit-learn,glennq/scikit-learn,kashif/scikit-learn,themrmax/scikit-learn,Adai0808/scikit-learn,siutanwong/scikit-learn,nrhine1/scikit-learn,robin-lai/scikit-learn,dsullivan7/scikit-learn,lbishal/scikit-learn,wzbozon/scikit-learn,davidgbe/scikit-learn,ankurankan/scikit-learn,shangwuhencc/scikit-learn,jlegendary/scikit-learn,ldirer/scikit-learn,hugobowne/scikit-learn,depet/scikit-learn,trankmichael/scikit-learn,icdishb/scikit-learn,aflaxman/scikit-learn,B3AU/waveTree,jmschrei/scikit-learn,zihua/scikit-learn,evgchz/scikit-learn,victorbergelin/scikit-learn,ogrisel/scikit-learn,q1ang/scikit-learn,DonBeo/scikit-learn,ycaihua/scikit-learn,aflaxman/scikit-learn,ChanderG/scikit-learn,mrshu/scikit-learn,maheshakya/scikit-learn,NunoEdgarGub1/scikit-learn,sergeyf/scikit-learn,sanketloke/scikit-learn,lazywei/scikit-learn,adamgreenhall/scikit-learn,cl4rke/scikit-learn,jakobworldpeace/scikit-learn,khkaminska/scikit-learn,ahoyosid/scikit-learn,mattilyra/scikit-learn,mugizico/scikit-learn,larsmans/scikit-learn,sgenoud/scikit-learn,Jimmy-Morzaria/scikit-learn,xiaoxiamii/scikit-learn,andaag/scikit-learn,saiwing-yeung/scikit-learn,zhenv5/scikit-learn,tomlof/scikit-learn,gotomypc/scikit-learn,PatrickChrist/scikit-learn,glouppe/scikit-learn,Djabbz/scikit-learn,3manuek/scikit-learn,themrmax/scikit-learn,jm-begon/scikit-learn,Barmaley-exe/scikit-learn,hitszxp/scikit-learn,robbymeals/scikit-learn,davidgbe/scikit-learn,ZenDevelopmentSystems/scikit-learn,macks22/scikit-learn,eickenberg/scikit-learn,Garrett-R/scikit-learn,IssamLaradji/scikit-learn,ivannz/scikit-learn,yunfeilu/scikit-learn,depet/scikit-learn,ankurankan/scikit-learn,hrjn/scikit-learn,xzh86/scikit-learn,yonglehou/scikit-learn,mikebenfield/scikit-learn,jpautom/scikit-learn,ldirer/scikit-learn,0x0all/scikit-learn,treycausey/scikit-learn,PrashntS/scikit-learn,imaculate/scikit-learn,jakobworldpeace/scikit-learn,cdegroc/scikit-learn,DSLituiev/scikit-learn,fbagirov/scikit-learn,liangz0707/scikit-learn,vivekmishra1991/scikit-learn,TomDLT/scikit-learn,tawsifkhan/scikit-learn,yunfeilu/scikit-learn,kaichogami/scikit-learn,iismd17/scikit-learn,sarahgrogan/scikit-learn,hsuantien/scikit-learn,IndraVikas/scikit-learn,MohammedWasim/scikit-learn,RPGOne/scikit-learn,mugizico/scikit-learn,toastedcornflakes/scikit-learn,kmike/scikit-learn,Myasuka/scikit-learn,vivekmishra1991/scikit-learn,fengzhyuan/scikit-learn,lesteve/scikit-learn,HolgerPeters/scikit-learn,mfjb/scikit-learn,tosolveit/scikit-learn,Djabbz/scikit-learn,NelisVerhoef/scikit-learn,maheshakya/scikit-learn,kjung/scikit-learn,cl4rke/scikit-learn,thilbern/scikit-learn,CVML/scikit-learn,ningchi/scikit-learn,ivannz/scikit-learn,andrewnc/scikit-learn,mikebenfield/scikit-learn,waterponey/scikit-learn,manashmndl/scikit-learn,zhenv5/scikit-learn,etkirsch/scikit-learn,pianomania/scikit-learn,hugobowne/scikit-learn,jlegendary/scikit-learn,AlexanderFabisch/scikit-learn,kevin-intel/scikit-learn,roxyboy/scikit-learn,ClimbsRocks/scikit-learn,evgchz/scikit-learn,andaag/scikit-learn,belltailjp/scikit-learn,arabenjamin/scikit-learn,olologin/scikit-learn,pratapvardhan/scikit-learn,loli/sklearn-ensembletrees,quheng/scikit-learn,cybernet14/scikit-learn,aminert/scikit-learn,hsiaoyi0504/scikit-learn,khkaminska/scikit-learn,jlegendary/scikit-learn,sergeyf/scikit-learn,jakirkham/scikit-learn,mikebenfield/scikit-learn,vinayak-mehta/scikit-learn,ndingwall/scikit-learn,ashhher3/scikit-learn,spallavolu/scikit-learn,thientu/scikit-learn,krez13/scikit-learn,russel1237/scikit-learn,murali-munna/scikit-learn,waterponey/scikit-learn,simon-pepin/scikit-learn,jjx02230808/project0223,RayMick/scikit-learn,BiaDarkia/scikit-learn,glemaitre/scikit-learn,loli/semisupervisedforests,NunoEdgarGub1/scikit-learn,andaag/scikit-learn,abhishekgahlot/scikit-learn,trankmichael/scikit-learn,sanketloke/scikit-learn,tosolveit/scikit-learn,RPGOne/scikit-learn,Garrett-R/scikit-learn,YinongLong/scikit-learn,PrashntS/scikit-learn,AlexRobson/scikit-learn,thilbern/scikit-learn,Vimos/scikit-learn,ahoyosid/scikit-learn,mhue/scikit-learn,h2educ/scikit-learn,xyguo/scikit-learn,OshynSong/scikit-learn,Srisai85/scikit-learn,arahuja/scikit-learn,mattgiguere/scikit-learn,samzhang111/scikit-learn,thilbern/scikit-learn,jmschrei/scikit-learn,clemkoa/scikit-learn,devanshdalal/scikit-learn,khkaminska/scikit-learn,robin-lai/scikit-learn,joshloyal/scikit-learn,ZenDevelopmentSystems/scikit-learn,henrykironde/scikit-learn,scikit-learn/scikit-learn,liyu1990/sklearn,theoryno3/scikit-learn,fbagirov/scikit-learn,rahuldhote/scikit-learn,wzbozon/scikit-learn,depet/scikit-learn,scikit-learn/scikit-learn,saiwing-yeung/scikit-learn,cdegroc/scikit-learn,Fireblend/scikit-learn,ahoyosid/scikit-learn,tdhopper/scikit-learn,ilyes14/scikit-learn,anntzer/scikit-learn,ishanic/scikit-learn,jzt5132/scikit-learn,AIML/scikit-learn,loli/sklearn-ensembletrees,rexshihaoren/scikit-learn,vibhorag/scikit-learn,xavierwu/scikit-learn,ycaihua/scikit-learn,davidgbe/scikit-learn,alvarofierroclavero/scikit-learn,procoder317/scikit-learn,raghavrv/scikit-learn,NunoEdgarGub1/scikit-learn,Clyde-fare/scikit-learn,cwu2011/scikit-learn,mxjl620/scikit-learn,plissonf/scikit-learn,bikong2/scikit-learn,shenzebang/scikit-learn,shahankhatch/scikit-learn,nhejazi/scikit-learn,massmutual/scikit-learn,mugizico/scikit-learn,HolgerPeters/scikit-learn,vivekmishra1991/scikit-learn,harshaneelhg/scikit-learn,fzalkow/scikit-learn,lucidfrontier45/scikit-learn,vibhorag/scikit-learn,jseabold/scikit-learn,ChanderG/scikit-learn,xiaoxiamii/scikit-learn,Lawrence-Liu/scikit-learn,moutai/scikit-learn,pkruskal/scikit-learn,nhejazi/scikit-learn,JosmanPS/scikit-learn,ky822/scikit-learn,jpautom/scikit-learn,IndraVikas/scikit-learn,yonglehou/scikit-learn,hsuantien/scikit-learn,ilo10/scikit-learn,adamgreenhall/scikit-learn,lesteve/scikit-learn,mwv/scikit-learn,kagayakidan/scikit-learn,sgenoud/scikit-learn,themrmax/scikit-learn,eickenberg/scikit-learn,MohammedWasim/scikit-learn,bhargav/scikit-learn,zorojean/scikit-learn,ZENGXH/scikit-learn,JPFrancoia/scikit-learn,mattgiguere/scikit-learn,cainiaocome/scikit-learn,tawsifkhan/scikit-learn,justincassidy/scikit-learn,sarahgrogan/scikit-learn,massmutual/scikit-learn,fabioticconi/scikit-learn,jereze/scikit-learn,PatrickChrist/scikit-learn,joernhees/scikit-learn,mattilyra/scikit-learn,ivannz/scikit-learn,ssaeger/scikit-learn,0x0all/scikit-learn,ominux/scikit-learn,aminert/scikit-learn,kagayakidan/scikit-learn,stylianos-kampakis/scikit-learn,idlead/scikit-learn,ilyes14/scikit-learn,poryfly/scikit-learn,Achuth17/scikit-learn,aminert/scikit-learn,dhruv13J/scikit-learn,hsiaoyi0504/scikit-learn,larsmans/scikit-learn,hsiaoyi0504/scikit-learn,tdhopper/scikit-learn,lazywei/scikit-learn,mayblue9/scikit-learn,MartinDelzant/scikit-learn,fredhusser/scikit-learn,kmike/scikit-learn,Sentient07/scikit-learn,Clyde-fare/scikit-learn,pianomania/scikit-learn,yask123/scikit-learn,Myasuka/scikit-learn,vybstat/scikit-learn,xzh86/scikit-learn,fabianp/scikit-learn,andrewnc/scikit-learn,abhishekkrthakur/scikit-learn,LiaoPan/scikit-learn,fengzhyuan/scikit-learn,ilo10/scikit-learn,Titan-C/scikit-learn,nmayorov/scikit-learn,anurag313/scikit-learn,zhenv5/scikit-learn,shusenl/scikit-learn,sinhrks/scikit-learn,jm-begon/scikit-learn,henridwyer/scikit-learn,beepee14/scikit-learn,lazywei/scikit-learn,bigdataelephants/scikit-learn,Akshay0724/scikit-learn,ldirer/scikit-learn,jm-begon/scikit-learn,trungnt13/scikit-learn,h2educ/scikit-learn,xuewei4d/scikit-learn,DSLituiev/scikit-learn,fredhusser/scikit-learn,hitszxp/scikit-learn,3manuek/scikit-learn,cauchycui/scikit-learn,nrhine1/scikit-learn,glennq/scikit-learn,0x0all/scikit-learn,arjoly/scikit-learn,manashmndl/scikit-learn,IshankGulati/scikit-learn,massmutual/scikit-learn,jorik041/scikit-learn,rvraghav93/scikit-learn,mjgrav2001/scikit-learn,mjudsp/Tsallis,bikong2/scikit-learn,cauchycui/scikit-learn,fredhusser/scikit-learn,ningchi/scikit-learn,nvoron23/scikit-learn,aflaxman/scikit-learn,elkingtonmcb/scikit-learn,mattgiguere/scikit-learn,Lawrence-Liu/scikit-learn,HolgerPeters/scikit-learn,nikitasingh981/scikit-learn,Myasuka/scikit-learn,eg-zhang/scikit-learn,jaidevd/scikit-learn,xiaoxiamii/scikit-learn,vinayak-mehta/scikit-learn,LohithBlaze/scikit-learn,hainm/scikit-learn,MohammedWasim/scikit-learn,AlexandreAbraham/scikit-learn,roxyboy/scikit-learn,amueller/scikit-learn,spallavolu/scikit-learn,AlexRobson/scikit-learn,eg-zhang/scikit-learn,anirudhjayaraman/scikit-learn,Windy-Ground/scikit-learn,jereze/scikit-learn,vybstat/scikit-learn,schets/scikit-learn,bnaul/scikit-learn,victorbergelin/scikit-learn,appapantula/scikit-learn,manhhomienbienthuy/scikit-learn,bthirion/scikit-learn,nhejazi/scikit-learn,anirudhjayaraman/scikit-learn,fbagirov/scikit-learn,vybstat/scikit-learn,tosolveit/scikit-learn,hdmetor/scikit-learn,IshankGulati/scikit-learn,jblackburne/scikit-learn,loli/sklearn-ensembletrees,vybstat/scikit-learn,shangwuhencc/scikit-learn,BiaDarkia/scikit-learn,jblackburne/scikit-learn,vortex-ape/scikit-learn,shikhardb/scikit-learn,PrashntS/scikit-learn,espg/scikit-learn,anirudhjayaraman/scikit-learn,Vimos/scikit-learn,fzalkow/scikit-learn,zorroblue/scikit-learn,ilo10/scikit-learn,russel1237/scikit-learn,untom/scikit-learn,petosegan/scikit-learn,heli522/scikit-learn,lenovor/scikit-learn,nomadcube/scikit-learn,idlead/scikit-learn,evgchz/scikit-learn,q1ang/scikit-learn,xubenben/scikit-learn,pv/scikit-learn,bikong2/scikit-learn,liangz0707/scikit-learn,mjgrav2001/scikit-learn,hdmetor/scikit-learn,NelisVerhoef/scikit-learn,yanlend/scikit-learn,Aasmi/scikit-learn,ngoix/OCRF,ChanChiChoi/scikit-learn,giorgiop/scikit-learn,ZENGXH/scikit-learn,ankurankan/scikit-learn,loli/sklearn-ensembletrees,gotomypc/scikit-learn,kylerbrown/scikit-learn,AnasGhrab/scikit-learn,lin-credible/scikit-learn,costypetrisor/scikit-learn,shahankhatch/scikit-learn,JsNoNo/scikit-learn,ishanic/scikit-learn,themrmax/scikit-learn,yanlend/scikit-learn,cdegroc/scikit-learn,billy-inn/scikit-learn,aewhatley/scikit-learn,aetilley/scikit-learn,glemaitre/scikit-learn,equialgo/scikit-learn,poryfly/scikit-learn,lbishal/scikit-learn,wlamond/scikit-learn,wazeerzulfikar/scikit-learn,shusenl/scikit-learn,xavierwu/scikit-learn,deepesch/scikit-learn,AnasGhrab/scikit-learn,Sentient07/scikit-learn,ngoix/OCRF,dingocuster/scikit-learn,hrjn/scikit-learn,jayflo/scikit-learn,Jimmy-Morzaria/scikit-learn,jorik041/scikit-learn,arjoly/scikit-learn,pompiduskus/scikit-learn,spallavolu/scikit-learn,djgagne/scikit-learn,wanggang3333/scikit-learn,h2educ/scikit-learn,sergeyf/scikit-learn,justincassidy/scikit-learn,madjelan/scikit-learn,pkruskal/scikit-learn,sonnyhu/scikit-learn,quheng/scikit-learn,frank-tancf/scikit-learn,CforED/Machine-Learning,kylerbrown/scikit-learn,tomlof/scikit-learn,HolgerPeters/scikit-learn,hugobowne/scikit-learn,betatim/scikit-learn,appapantula/scikit-learn,IshankGulati/scikit-learn,aewhatley/scikit-learn,eickenberg/scikit-learn,nesterione/scikit-learn,pianomania/scikit-learn,cwu2011/scikit-learn,zorojean/scikit-learn,PrashntS/scikit-learn,mikebenfield/scikit-learn,xwolf12/scikit-learn,Adai0808/scikit-learn,arabenjamin/scikit-learn,alexsavio/scikit-learn,akionakamura/scikit-learn,idlead/scikit-learn,Obus/scikit-learn,mhdella/scikit-learn,luo66/scikit-learn,tmhm/scikit-learn,qifeigit/scikit-learn,rishikksh20/scikit-learn,rajat1994/scikit-learn,olologin/scikit-learn,sumspr/scikit-learn,mlyundin/scikit-learn,AlexandreAbraham/scikit-learn,theoryno3/scikit-learn,henridwyer/scikit-learn,ishanic/scikit-learn,mxjl620/scikit-learn,Achuth17/scikit-learn,giorgiop/scikit-learn,UNR-AERIAL/scikit-learn,kmike/scikit-learn,tawsifkhan/scikit-learn,nmayorov/scikit-learn,mhue/scikit-learn,siutanwong/scikit-learn,mayblue9/scikit-learn,akionakamura/scikit-learn,RachitKansal/scikit-learn,rahul-c1/scikit-learn,jpautom/scikit-learn,jakirkham/scikit-learn,zaxtax/scikit-learn,voxlol/scikit-learn,belltailjp/scikit-learn,trankmichael/scikit-learn,joshloyal/scikit-learn,untom/scikit-learn,abhishekgahlot/scikit-learn,ssaeger/scikit-learn,ngoix/OCRF,yanlend/scikit-learn,mhdella/scikit-learn,florian-f/sklearn,russel1237/scikit-learn,wlamond/scikit-learn,henrykironde/scikit-learn,aetilley/scikit-learn,vivekmishra1991/scikit-learn,mfjb/scikit-learn,dingocuster/scikit-learn,smartscheduling/scikit-learn-categorical-tree,MechCoder/scikit-learn,jmetzen/scikit-learn,gclenaghan/scikit-learn,Djabbz/scikit-learn,clemkoa/scikit-learn,huobaowangxi/scikit-learn,OshynSong/scikit-learn,lenovor/scikit-learn,jorge2703/scikit-learn,poryfly/scikit-learn,beepee14/scikit-learn,mehdidc/scikit-learn,dsquareindia/scikit-learn,hdmetor/scikit-learn,rsivapr/scikit-learn,thientu/scikit-learn,jjx02230808/project0223,massmutual/scikit-learn,kylerbrown/scikit-learn,iismd17/scikit-learn,shikhardb/scikit-learn,xwolf12/scikit-learn,xyguo/scikit-learn,JeanKossaifi/scikit-learn,f3r/scikit-learn,bnaul/scikit-learn,466152112/scikit-learn,abhishekkrthakur/scikit-learn,sarahgrogan/scikit-learn,JsNoNo/scikit-learn,jkarnows/scikit-learn,tosolveit/scikit-learn,bnaul/scikit-learn,alvarofierroclavero/scikit-learn,justincassidy/scikit-learn,procoder317/scikit-learn,moutai/scikit-learn,466152112/scikit-learn,amueller/scikit-learn,rsivapr/scikit-learn,Vimos/scikit-learn,Obus/scikit-learn,luo66/scikit-learn,JeanKossaifi/scikit-learn,kevin-intel/scikit-learn,toastedcornflakes/scikit-learn,MartinSavc/scikit-learn,mrshu/scikit-learn,huzq/scikit-learn,hitszxp/scikit-learn,anurag313/scikit-learn,ningchi/scikit-learn,carrillo/scikit-learn,anntzer/scikit-learn,alexsavio/scikit-learn,sanketloke/scikit-learn,glouppe/scikit-learn,joernhees/scikit-learn,potash/scikit-learn,marcocaccin/scikit-learn,fabianp/scikit-learn,fyffyt/scikit-learn,wlamond/scikit-learn,zuku1985/scikit-learn,fabianp/scikit-learn,IssamLaradji/scikit-learn,herilalaina/scikit-learn,andrewnc/scikit-learn,mayblue9/scikit-learn,Jimmy-Morzaria/scikit-learn,krez13/scikit-learn,rahul-c1/scikit-learn,ishanic/scikit-learn,sgenoud/scikit-learn,cl4rke/scikit-learn,potash/scikit-learn,ilyes14/scikit-learn,MatthieuBizien/scikit-learn,tmhm/scikit-learn,liyu1990/sklearn,nesterione/scikit-learn,tawsifkhan/scikit-learn,anurag313/scikit-learn,shyamalschandra/scikit-learn,sonnyhu/scikit-learn,jkarnows/scikit-learn,aetilley/scikit-learn,mattilyra/scikit-learn,victorbergelin/scikit-learn,xiaoxiamii/scikit-learn,saiwing-yeung/scikit-learn,shikhardb/scikit-learn,depet/scikit-learn,bhargav/scikit-learn,alexeyum/scikit-learn,dingocuster/scikit-learn,ankurankan/scikit-learn,zhenv5/scikit-learn,nelson-liu/scikit-learn,yyjiang/scikit-learn,RayMick/scikit-learn,dhruv13J/scikit-learn,NelisVerhoef/scikit-learn,zihua/scikit-learn,wazeerzulfikar/scikit-learn,JsNoNo/scikit-learn,Clyde-fare/scikit-learn,pkruskal/scikit-learn,rrohan/scikit-learn,f3r/scikit-learn,hlin117/scikit-learn,giorgiop/scikit-learn,ltiao/scikit-learn,MechCoder/scikit-learn,samzhang111/scikit-learn,rexshihaoren/scikit-learn,smartscheduling/scikit-learn-categorical-tree,jseabold/scikit-learn,arjoly/scikit-learn,MatthieuBizien/scikit-learn,jakirkham/scikit-learn,huzq/scikit-learn,kmike/scikit-learn,jblackburne/scikit-learn,akionakamura/scikit-learn,OshynSong/scikit-learn,AlexanderFabisch/scikit-learn,michigraber/scikit-learn,0x0all/scikit-learn,plissonf/scikit-learn,fredhusser/scikit-learn,nvoron23/scikit-learn,raghavrv/scikit-learn,maheshakya/scikit-learn,icdishb/scikit-learn,Akshay0724/scikit-learn,pompiduskus/scikit-learn,jayflo/scikit-learn,xwolf12/scikit-learn,ashhher3/scikit-learn,ndingwall/scikit-learn,fabioticconi/scikit-learn,liangz0707/scikit-learn,pratapvardhan/scikit-learn,chrisburr/scikit-learn,huobaowangxi/scikit-learn,0x0all/scikit-learn,cybernet14/scikit-learn,imaculate/scikit-learn,kashif/scikit-learn,justincassidy/scikit-learn,glennq/scikit-learn,rsivapr/scikit-learn,rishikksh20/scikit-learn,belltailjp/scikit-learn,jjx02230808/project0223,NunoEdgarGub1/scikit-learn,B3AU/waveTree,dsquareindia/scikit-learn,wazeerzulfikar/scikit-learn,petosegan/scikit-learn,deepesch/scikit-learn,devanshdalal/scikit-learn,mojoboss/scikit-learn,Lawrence-Liu/scikit-learn,arahuja/scikit-learn,schets/scikit-learn,bthirion/scikit-learn,stylianos-kampakis/scikit-learn,ankurankan/scikit-learn,scikit-learn/scikit-learn,dingocuster/scikit-learn,BiaDarkia/scikit-learn,ZENGXH/scikit-learn,vshtanko/scikit-learn,ephes/scikit-learn,dsquareindia/scikit-learn,jorik041/scikit-learn,mhue/scikit-learn,lucidfrontier45/scikit-learn,loli/semisupervisedforests,wanggang3333/scikit-learn,yanlend/scikit-learn,0asa/scikit-learn,ldirer/scikit-learn,xuewei4d/scikit-learn,rajat1994/scikit-learn,wzbozon/scikit-learn,AIML/scikit-learn,xyguo/scikit-learn,mhdella/scikit-learn,RomainBrault/scikit-learn,thientu/scikit-learn,0asa/scikit-learn,tomlof/scikit-learn,liangz0707/scikit-learn,xubenben/scikit-learn,lesteve/scikit-learn,nvoron23/scikit-learn,raghavrv/scikit-learn,pnedunuri/scikit-learn,mjudsp/Tsallis,luo66/scikit-learn,yask123/scikit-learn,dsullivan7/scikit-learn,shenzebang/scikit-learn,waterponey/scikit-learn,walterreade/scikit-learn,petosegan/scikit-learn,yask123/scikit-learn,pnedunuri/scikit-learn,samuel1208/scikit-learn,betatim/scikit-learn,kevin-intel/scikit-learn,icdishb/scikit-learn,alexeyum/scikit-learn,JPFrancoia/scikit-learn,kaichogami/scikit-learn,arahuja/scikit-learn,Sentient07/scikit-learn,ZENGXH/scikit-learn,gclenaghan/scikit-learn,ycaihua/scikit-learn,jaidevd/scikit-learn,adamgreenhall/scikit-learn,jayflo/scikit-learn,krez13/scikit-learn,CVML/scikit-learn,spallavolu/scikit-learn,joernhees/scikit-learn,fyffyt/scikit-learn,Nyker510/scikit-learn,raghavrv/scikit-learn,nomadcube/scikit-learn,jakobworldpeace/scikit-learn,3manuek/scikit-learn,larsmans/scikit-learn,evgchz/scikit-learn,heli522/scikit-learn,pv/scikit-learn,jorge2703/scikit-learn,altairpearl/scikit-learn,potash/scikit-learn,rexshihaoren/scikit-learn,trungnt13/scikit-learn,rohanp/scikit-learn,qifeigit/scikit-learn,costypetrisor/scikit-learn,heli522/scikit-learn,jseabold/scikit-learn,robbymeals/scikit-learn,CVML/scikit-learn,DonBeo/scikit-learn,iismd17/scikit-learn,equialgo/scikit-learn,dsquareindia/scikit-learn,gotomypc/scikit-learn,glemaitre/scikit-learn,AlexanderFabisch/scikit-learn,xuewei4d/scikit-learn,maheshakya/scikit-learn,amueller/scikit-learn,sonnyhu/scikit-learn,simon-pepin/scikit-learn,mfjb/scikit-learn,djgagne/scikit-learn,scikit-learn/scikit-learn,Nyker510/scikit-learn,mwv/scikit-learn,nikitasingh981/scikit-learn,ephes/scikit-learn,mojoboss/scikit-learn,glennq/scikit-learn,mojoboss/scikit-learn,glouppe/scikit-learn,michigraber/scikit-learn,0asa/scikit-learn,michigraber/scikit-learn,pythonvietnam/scikit-learn,r-mart/scikit-learn,cwu2011/scikit-learn,JPFrancoia/scikit-learn,michigraber/scikit-learn,xyguo/scikit-learn,zorroblue/scikit-learn,pratapvardhan/scikit-learn,etkirsch/scikit-learn,ChanderG/scikit-learn,vigilv/scikit-learn,phdowling/scikit-learn,florian-f/sklearn,MartinSavc/scikit-learn,ZenDevelopmentSystems/scikit-learn,gclenaghan/scikit-learn,UNR-AERIAL/scikit-learn,Windy-Ground/scikit-learn,joshloyal/scikit-learn,kmike/scikit-learn,nmayorov/scikit-learn,abhishekgahlot/scikit-learn,herilalaina/scikit-learn,xavierwu/scikit-learn,ngoix/OCRF,eickenberg/scikit-learn,vortex-ape/scikit-learn,jjx02230808/project0223,procoder317/scikit-learn,walterreade/scikit-learn,equialgo/scikit-learn,rrohan/scikit-learn,rsivapr/scikit-learn,jzt5132/scikit-learn,IndraVikas/scikit-learn,xubenben/scikit-learn,chrsrds/scikit-learn,fzalkow/scikit-learn,mehdidc/scikit-learn,pnedunuri/scikit-learn,mxjl620/scikit-learn,jorge2703/scikit-learn,madjelan/scikit-learn,mlyundin/scikit-learn,wazeerzulfikar/scikit-learn,liyu1990/sklearn,Srisai85/scikit-learn,btabibian/scikit-learn,zaxtax/scikit-learn,petosegan/scikit-learn,phdowling/scikit-learn,qifeigit/scikit-learn,cdegroc/scikit-learn,Barmaley-exe/scikit-learn,belltailjp/scikit-learn,mjudsp/Tsallis,RomainBrault/scikit-learn,lesteve/scikit-learn,r-mart/scikit-learn,ogrisel/scikit-learn,elkingtonmcb/scikit-learn,ClimbsRocks/scikit-learn,ominux/scikit-learn,beepee14/scikit-learn,rahul-c1/scikit-learn,ElDeveloper/scikit-learn,eg-zhang/scikit-learn,shyamalschandra/scikit-learn,fengzhyuan/scikit-learn,zorroblue/scikit-learn,Lawrence-Liu/scikit-learn,lucidfrontier45/scikit-learn,MechCoder/scikit-learn,moutai/scikit-learn,CforED/Machine-Learning,espg/scikit-learn,nelson-liu/scikit-learn,mattilyra/scikit-learn,YinongLong/scikit-learn,abimannans/scikit-learn,h2educ/scikit-learn,anurag313/scikit-learn,marcocaccin/scikit-learn,kjung/scikit-learn,appapantula/scikit-learn,herilalaina/scikit-learn,ivannz/scikit-learn,dsullivan7/scikit-learn,PatrickOReilly/scikit-learn,UNR-AERIAL/scikit-learn,costypetrisor/scikit-learn,Sentient07/scikit-learn,samuel1208/scikit-learn,voxlol/scikit-learn,jmschrei/scikit-learn,AIML/scikit-learn,simon-pepin/scikit-learn,nikitasingh981/scikit-learn,mblondel/scikit-learn,mhdella/scikit-learn,davidgbe/scikit-learn,MartinDelzant/scikit-learn,frank-tancf/scikit-learn,kjung/scikit-learn,jereze/scikit-learn,cwu2011/scikit-learn,maheshakya/scikit-learn,djgagne/scikit-learn,LiaoPan/scikit-learn,arjoly/scikit-learn,bikong2/scikit-learn,qifeigit/scikit-learn,dhruv13J/scikit-learn,wzbozon/scikit-learn,billy-inn/scikit-learn,treycausey/scikit-learn,pypot/scikit-learn,mblondel/scikit-learn,tdhopper/scikit-learn,q1ang/scikit-learn,q1ang/scikit-learn,thientu/scikit-learn,tmhm/scikit-learn,eickenberg/scikit-learn,depet/scikit-learn,jmetzen/scikit-learn,ssaeger/scikit-learn,rajat1994/scikit-learn,ChanChiChoi/scikit-learn,Akshay0724/scikit-learn,rvraghav93/scikit-learn,procoder317/scikit-learn,ChanChiChoi/scikit-learn,mwv/scikit-learn,elkingtonmcb/scikit-learn,abimannans/scikit-learn,henridwyer/scikit-learn,ahoyosid/scikit-learn,mugizico/scikit-learn,olologin/scikit-learn,ClimbsRocks/scikit-learn,carrillo/scikit-learn,mfjb/scikit-learn,icdishb/scikit-learn,joshloyal/scikit-learn,ltiao/scikit-learn,rahuldhote/scikit-learn,abhishekgahlot/scikit-learn,huzq/scikit-learn,Jimmy-Morzaria/scikit-learn,B3AU/waveTree,nikitasingh981/scikit-learn,toastedcornflakes/scikit-learn,robin-lai/scikit-learn,mblondel/scikit-learn,zuku1985/scikit-learn,yask123/scikit-learn,cybernet14/scikit-learn,yyjiang/scikit-learn,herilalaina/scikit-learn,yyjiang/scikit-learn,fengzhyuan/scikit-learn,mehdidc/scikit-learn,macks22/scikit-learn,nmayorov/scikit-learn,vermouthmjl/scikit-learn,Windy-Ground/scikit-learn,chrsrds/scikit-learn,zihua/scikit-learn,mjudsp/Tsallis,adamgreenhall/scikit-learn,samzhang111/scikit-learn,imaculate/scikit-learn,manhhomienbienthuy/scikit-learn,jzt5132/scikit-learn,vigilv/scikit-learn,btabibian/scikit-learn,quheng/scikit-learn,rexshihaoren/scikit-learn,lenovor/scikit-learn,mattilyra/scikit-learn,meduz/scikit-learn,jseabold/scikit-learn,fabioticconi/scikit-learn,hainm/scikit-learn,zaxtax/scikit-learn,vermouthmjl/scikit-learn,samuel1208/scikit-learn,glemaitre/scikit-learn,henridwyer/scikit-learn,toastedcornflakes/scikit-learn,cainiaocome/scikit-learn,aminert/scikit-learn,Achuth17/scikit-learn,ltiao/scikit-learn,zuku1985/scikit-learn,liberatorqjw/scikit-learn,shyamalschandra/scikit-learn,jakobworldpeace/scikit-learn,ZenDevelopmentSystems/scikit-learn,hsuantien/scikit-learn,RPGOne/scikit-learn,kaichogami/scikit-learn,trungnt13/scikit-learn,Akshay0724/scikit-learn,costypetrisor/scikit-learn,Garrett-R/scikit-learn,robbymeals/scikit-learn,bthirion/scikit-learn,alexeyum/scikit-learn,PatrickOReilly/scikit-learn,ChanChiChoi/scikit-learn,RPGOne/scikit-learn,carrillo/scikit-learn,sonnyhu/scikit-learn,smartscheduling/scikit-learn-categorical-tree,cauchycui/scikit-learn,manashmndl/scikit-learn,untom/scikit-learn,aflaxman/scikit-learn,Nyker510/scikit-learn,Fireblend/scikit-learn,kashif/scikit-learn,alexsavio/scikit-learn,DonBeo/scikit-learn,ElDeveloper/scikit-learn,pompiduskus/scikit-learn,466152112/scikit-learn,bhargav/scikit-learn,aabadie/scikit-learn,0asa/scikit-learn,jorge2703/scikit-learn,nelson-liu/scikit-learn,plissonf/scikit-learn,JeanKossaifi/scikit-learn,sinhrks/scikit-learn,pompiduskus/scikit-learn,betatim/scikit-learn,mwv/scikit-learn,frank-tancf/scikit-learn,voxlol/scikit-learn,ky822/scikit-learn,ogrisel/scikit-learn,OshynSong/scikit-learn,sinhrks/scikit-learn,Titan-C/scikit-learn,ky822/scikit-learn,Windy-Ground/scikit-learn,shahankhatch/scikit-learn,jkarnows/scikit-learn,JPFrancoia/scikit-learn,kashif/scikit-learn,andrewnc/scikit-learn,kevin-intel/scikit-learn,MartinSavc/scikit-learn,YinongLong/scikit-learn,jmschrei/scikit-learn,ltiao/scikit-learn,aabadie/scikit-learn,aabadie/scikit-learn,marcocaccin/scikit-learn,roxyboy/scikit-learn,liyu1990/sklearn,terkkila/scikit-learn,LiaoPan/scikit-learn,IndraVikas/scikit-learn,hlin117/scikit-learn,pypot/scikit-learn,LohithBlaze/scikit-learn,anirudhjayaraman/scikit-learn,deepesch/scikit-learn,manashmndl/scikit-learn,fyffyt/scikit-learn,florian-f/sklearn,roxyboy/scikit-learn,ominux/scikit-learn,djgagne/scikit-learn,MohammedWasim/scikit-learn,rvraghav93/scikit-learn,huobaowangxi/scikit-learn,pypot/scikit-learn,mjgrav2001/scikit-learn,treycausey/scikit-learn,Achuth17/scikit-learn,ClimbsRocks/scikit-learn,vshtanko/scikit-learn,voxlol/scikit-learn,TomDLT/scikit-learn,mjudsp/Tsallis,gclenaghan/scikit-learn,Fireblend/scikit-learn,meduz/scikit-learn,ngoix/OCRF,jmetzen/scikit-learn,billy-inn/scikit-learn,hsiaoyi0504/scikit-learn,sarahgrogan/scikit-learn,lucidfrontier45/scikit-learn,wlamond/scikit-learn,kjung/scikit-learn,mjgrav2001/scikit-learn,vermouthmjl/scikit-learn,zorojean/scikit-learn,mrshu/scikit-learn,alvarofierroclavero/scikit-learn,phdowling/scikit-learn,meduz/scikit-learn,evgchz/scikit-learn,Aasmi/scikit-learn,Srisai85/scikit-learn,terkkila/scikit-learn,r-mart/scikit-learn,henrykironde/scikit-learn,shusenl/scikit-learn,ky822/scikit-learn,khkaminska/scikit-learn,madjelan/scikit-learn,jakirkham/scikit-learn,pianomania/scikit-learn,manhhomienbienthuy/scikit-learn,vibhorag/scikit-learn,heli522/scikit-learn,hainm/scikit-learn,liberatorqjw/scikit-learn,yunfeilu/scikit-learn,jkarnows/scikit-learn,ngoix/OCRF,arabenjamin/scikit-learn,ycaihua/scikit-learn,elkingtonmcb/scikit-learn,zuku1985/scikit-learn,btabibian/scikit-learn,macks22/scikit-learn,thilbern/scikit-learn,deepesch/scikit-learn,iismd17/scikit-learn,fzalkow/scikit-learn,jmetzen/scikit-learn,ashhher3/scikit-learn,pv/scikit-learn,MartinDelzant/scikit-learn,hsuantien/scikit-learn,poryfly/scikit-learn,robin-lai/scikit-learn,mrshu/scikit-learn,RachitKansal/scikit-learn,shenzebang/scikit-learn,pnedunuri/scikit-learn,fbagirov/scikit-learn,vortex-ape/scikit-learn,DSLituiev/scikit-learn,devanshdalal/scikit-learn,RachitKansal/scikit-learn,altairpearl/scikit-learn,Djabbz/scikit-learn,shenzebang/scikit-learn,imaculate/scikit-learn,PatrickChrist/scikit-learn,chrisburr/scikit-learn,nomadcube/scikit-learn,murali-munna/scikit-learn,madjelan/scikit-learn,gotomypc/scikit-learn,zihua/scikit-learn,theoryno3/scikit-learn,466152112/scikit-learn,victorbergelin/scikit-learn,AlexanderFabisch/scikit-learn,ominux/scikit-learn,IssamLaradji/scikit-learn,hrjn/scikit-learn,Myasuka/scikit-learn,lazywei/scikit-learn,rrohan/scikit-learn,kylerbrown/scikit-learn,lin-credible/scikit-learn,Aasmi/scikit-learn,AnasGhrab/scikit-learn,RomainBrault/scikit-learn,olologin/scikit-learn,sergeyf/scikit-learn,rohanp/scikit-learn,wanggang3333/scikit-learn,murali-munna/scikit-learn,ephes/scikit-learn,xzh86/scikit-learn,AlexandreAbraham/scikit-learn,dhruv13J/scikit-learn,henrykironde/scikit-learn,rrohan/scikit-learn,hugobowne/scikit-learn,Titan-C/scikit-learn,moutai/scikit-learn,NelisVerhoef/scikit-learn,treycausey/scikit-learn,Adai0808/scikit-learn,mlyundin/scikit-learn,Obus/scikit-learn,amueller/scikit-learn,alexsavio/scikit-learn,bhargav/scikit-learn,hainm/scikit-learn,rahul-c1/scikit-learn,lenovor/scikit-learn,potash/scikit-learn,abimannans/scikit-learn,nrhine1/scikit-learn,hrjn/scikit-learn,rvraghav93/scikit-learn,devanshdalal/scikit-learn,vinayak-mehta/scikit-learn,manhhomienbienthuy/scikit-learn,hdmetor/scikit-learn,kaichogami/scikit-learn,sumspr/scikit-learn,JosmanPS/scikit-learn,RachitKansal/scikit-learn,lucidfrontier45/scikit-learn,loli/semisupervisedforests,shahankhatch/scikit-learn,untom/scikit-learn,RayMick/scikit-learn,vshtanko/scikit-learn,anntzer/scikit-learn,saiwing-yeung/scikit-learn,xuewei4d/scikit-learn,Clyde-fare/scikit-learn,Vimos/scikit-learn,B3AU/waveTree,bthirion/scikit-learn,yonglehou/scikit-learn,jblackburne/scikit-learn,alvarofierroclavero/scikit-learn,pythonvietnam/scikit-learn,terkkila/scikit-learn,JeanKossaifi/scikit-learn
--- +++ @@ -1,10 +1,10 @@ -# Last Change: Sat Jun 09 07:00 PM 2007 J +# Last Change: Sat Jun 09 08:00 PM 2007 J #======================================================== # Constants used throughout the module (def args, etc...) #======================================================== # This is the default dimension for representing confidence ellipses -DEF_VIS_DIM = [0, 1] +DEF_VIS_DIM = (0, 1) DEF_ELL_NP = 100 DEF_LEVEL = 0.39 #=====================================================================
726beb71b45c7320b4e2e883f246d389709efe19
run_tracker.py
run_tracker.py
import sys from cloudtracker import main def run_tracker(input_dir): print( " Running the cloud-tracking algorithm... " ) print( " Input dir: \"" + input_dir + "\" \n" ) main.main(input_dir) print( "\n Entrainment analysis completed " ) if __name__ == '__main__': if len(sys.argv) == 1: run_tracker("./data/") elif len(sys.argv) == 2: run_tracker(sys.argv[1]) else: print( " Invalid input " )
import sys, json from cloudtracker import main as tracker_main def run_tracker(input): print( " Running the cloud-tracking algorithm... " ) print( " Input dir: \"" + input + "\" \n" ) # Read .json configuration file with open('model_config.json', 'r') as json_file: config = json.load(json_file) tracker_main.main(input, config) print( "\n Entrainment analysis completed " ) if __name__ == '__main__': if len(sys.argv) == 1: run_tracker("./data/") elif len(sys.argv) == 2: run_tracker(sys.argv[1]) else: print( " Invalid input " )
Read .json file from starter
Read .json file from starter
Python
bsd-2-clause
lorenghoh/loh_tracker
--- +++ @@ -1,11 +1,16 @@ -import sys -from cloudtracker import main +import sys, json + +from cloudtracker import main as tracker_main -def run_tracker(input_dir): +def run_tracker(input): print( " Running the cloud-tracking algorithm... " ) - print( " Input dir: \"" + input_dir + "\" \n" ) + print( " Input dir: \"" + input + "\" \n" ) - main.main(input_dir) + # Read .json configuration file + with open('model_config.json', 'r') as json_file: + config = json.load(json_file) + + tracker_main.main(input, config) print( "\n Entrainment analysis completed " )
116708c5458b68110e75a593a0edaa0298bb5586
cyder/core/fields.py
cyder/core/fields.py
from django.db.models import CharField from django.core.exceptions import ValidationError from cyder.cydhcp.validation import validate_mac class MacAddrField(CharField): """A general purpose MAC address field This field holds a MAC address. clean() removes colons and hyphens from the field value, raising an exception if the value is invalid or empty. Arguments: dhcp_enabled (string): The name of another attribute (possibly a field) in the model that holds a boolean specifying whether to validate this MacAddrField; if not specified, always validate. """ def __init__(self, *args, **kwargs): if 'dhcp_enabled' in kwargs: self.dhcp_enabled = kwargs.pop('dhcp_enabled') else: self.dhcp_enabled = None # always validate for option in ['max_length', 'blank']: if option in kwargs: raise Exception('You cannot specify the {0} option.' .format(option)) kwargs['max_length'] = 12 kwargs['blank'] = True super(MacAddrField, self).__init__(*args, **kwargs) def clean(self, value, model_instance): # [ always validate ] [ DHCP is enabled ] if not self.dhcp_enabled or getattr(model_instance, self.dhcp_enabled): if value == '': raise ValidationError( "This field is required when DHCP is enabled") value = value.lower().replace(':', '') validate_mac(value) value = super(CharField, self).clean(value, model_instance) return value
from django.db.models import CharField, NOT_PROVIDED from django.core.exceptions import ValidationError from south.modelsinspector import add_introspection_rules from cyder.cydhcp.validation import validate_mac class MacAddrField(CharField): """A general purpose MAC address field This field holds a MAC address. clean() removes colons and hyphens from the field value, raising an exception if the value is invalid or empty. Arguments: dhcp_enabled (string): The name of another attribute (possibly a field) in the model that holds a boolean specifying whether to validate this MacAddrField; if not specified, always validate. """ def __init__(self, *args, **kwargs): if 'dhcp_enabled' in kwargs: self.dhcp_enabled = kwargs.pop('dhcp_enabled') else: self.dhcp_enabled = None # always validate kwargs['max_length'] = 12 kwargs['blank'] = True super(MacAddrField, self).__init__(*args, **kwargs) def clean(self, value, model_instance): # [ always validate ] [ DHCP is enabled ] if not self.dhcp_enabled or getattr(model_instance, self.dhcp_enabled): if value == '': raise ValidationError( "This field is required when DHCP is enabled") value = value.lower().replace(':', '') validate_mac(value) value = super(CharField, self).clean(value, model_instance) return value add_introspection_rules([ ( [MacAddrField], # model [], # args {'dhcp_enabled': ('dhcp_enabled', {})}, # kwargs ) ], [r'^cyder\.core\.fields\.MacAddrField'])
Add introspection rule; prevent South weirdness
Add introspection rule; prevent South weirdness
Python
bsd-3-clause
drkitty/cyder,murrown/cyder,OSU-Net/cyder,akeym/cyder,murrown/cyder,murrown/cyder,zeeman/cyder,drkitty/cyder,OSU-Net/cyder,drkitty/cyder,zeeman/cyder,akeym/cyder,OSU-Net/cyder,akeym/cyder,akeym/cyder,OSU-Net/cyder,drkitty/cyder,zeeman/cyder,murrown/cyder,zeeman/cyder
--- +++ @@ -1,5 +1,6 @@ -from django.db.models import CharField +from django.db.models import CharField, NOT_PROVIDED from django.core.exceptions import ValidationError +from south.modelsinspector import add_introspection_rules from cyder.cydhcp.validation import validate_mac @@ -23,11 +24,6 @@ else: self.dhcp_enabled = None # always validate - for option in ['max_length', 'blank']: - if option in kwargs: - raise Exception('You cannot specify the {0} option.' - .format(option)) - kwargs['max_length'] = 12 kwargs['blank'] = True @@ -44,3 +40,12 @@ value = super(CharField, self).clean(value, model_instance) return value + + +add_introspection_rules([ + ( + [MacAddrField], # model + [], # args + {'dhcp_enabled': ('dhcp_enabled', {})}, # kwargs + ) +], [r'^cyder\.core\.fields\.MacAddrField'])
f2312d1546eb3f6de75cc03a2dabb427a2174e17
examples/sequencealignment.py
examples/sequencealignment.py
# Create sequences to be aligned. from alignment.sequence import Sequence a = Sequence("what a beautiful day".split()) b = Sequence("what a disappointingly bad day".split()) print "Sequence A:", a print "Sequence B:", b print # Create a vocabulary and encode the sequences. from alignment.vocabulary import Vocabulary v = Vocabulary() aEncoded = v.encodeSequence(a) bEncoded = v.encodeSequence(b) print "Encoded A:", aEncoded print "Encoded B:", bEncoded print # Create a scoring and align the sequences using global aligner. from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner scoring = SimpleScoring(2, -1) aligner = GlobalSequenceAligner(scoring, -2) score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True) # Iterate over optimal alignments and print them. for encoded in encodeds: alignment = v.decodeSequenceAlignment(encoded) print alignment print "Alignment score:", alignment.score print "Percent identity:", alignment.percentIdentity() print
from alignment.sequence import Sequence from alignment.vocabulary import Vocabulary from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner # Create sequences to be aligned. a = Sequence("what a beautiful day".split()) b = Sequence("what a disappointingly bad day".split()) print "Sequence A:", a print "Sequence B:", b print # Create a vocabulary and encode the sequences. v = Vocabulary() aEncoded = v.encodeSequence(a) bEncoded = v.encodeSequence(b) print "Encoded A:", aEncoded print "Encoded B:", bEncoded print # Create a scoring and align the sequences using global aligner. scoring = SimpleScoring(2, -1) aligner = GlobalSequenceAligner(scoring, -2) score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True) # Iterate over optimal alignments and print them. for encoded in encodeds: alignment = v.decodeSequenceAlignment(encoded) print alignment print "Alignment score:", alignment.score print "Percent identity:", alignment.percentIdentity() print
Update the sequence alignment example.
Update the sequence alignment example.
Python
bsd-3-clause
eseraygun/python-entities,eseraygun/python-alignment
--- +++ @@ -1,5 +1,9 @@ +from alignment.sequence import Sequence +from alignment.vocabulary import Vocabulary +from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner + + # Create sequences to be aligned. -from alignment.sequence import Sequence a = Sequence("what a beautiful day".split()) b = Sequence("what a disappointingly bad day".split()) print "Sequence A:", a @@ -7,7 +11,6 @@ print # Create a vocabulary and encode the sequences. -from alignment.vocabulary import Vocabulary v = Vocabulary() aEncoded = v.encodeSequence(a) bEncoded = v.encodeSequence(b) @@ -16,16 +19,14 @@ print # Create a scoring and align the sequences using global aligner. -from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner scoring = SimpleScoring(2, -1) aligner = GlobalSequenceAligner(scoring, -2) score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True) # Iterate over optimal alignments and print them. for encoded in encodeds: - alignment = v.decodeSequenceAlignment(encoded) - print alignment - print "Alignment score:", alignment.score - print "Percent identity:", alignment.percentIdentity() - print - + alignment = v.decodeSequenceAlignment(encoded) + print alignment + print "Alignment score:", alignment.score + print "Percent identity:", alignment.percentIdentity() + print
5b9e168b4a855197b07527c468ef6b60c50ec0c7
avalanche/__init__.py
avalanche/__init__.py
from avalanche import benchmarks from avalanche import evaluation from avalanche import logging from avalanche import models from avalanche import training __version__ = "0.1.0a0" _dataset_add = None def _avdataset_radd(self, other, *args, **kwargs): from avalanche.benchmarks.utils import AvalancheDataset global _dataset_add if isinstance(other, AvalancheDataset): return NotImplemented return _dataset_add(self, other, *args, **kwargs) def _avalanche_monkey_patches(): from torch.utils.data.dataset import Dataset global _dataset_add _dataset_add = Dataset.__add__ Dataset.__add__ = _avdataset_radd _avalanche_monkey_patches()
from avalanche import benchmarks from avalanche import evaluation from avalanche import logging from avalanche import models from avalanche import training __version__ = "0.2.0" _dataset_add = None def _avdataset_radd(self, other, *args, **kwargs): from avalanche.benchmarks.utils import AvalancheDataset global _dataset_add if isinstance(other, AvalancheDataset): return NotImplemented return _dataset_add(self, other, *args, **kwargs) def _avalanche_monkey_patches(): from torch.utils.data.dataset import Dataset global _dataset_add _dataset_add = Dataset.__add__ Dataset.__add__ = _avdataset_radd _avalanche_monkey_patches()
Set package version to 0.2.0
Set package version to 0.2.0
Python
mit
ContinualAI/avalanche,ContinualAI/avalanche
--- +++ @@ -5,7 +5,7 @@ from avalanche import training -__version__ = "0.1.0a0" +__version__ = "0.2.0" _dataset_add = None
d584ccea9fe985fa230c937ee2e6a03ef6b99967
audio_pipeline/util/__init__.py
audio_pipeline/util/__init__.py
from . import Exceptions from . import MBInfo from . import Tag from . import Util from . import format from . import Discogs import re # unknown artist input pattern: class Utilities: unknown_artist_pattern = re.compile(r'unknown artist|^\s*$', flags=re.I) @classmethod def know_artist_name(cls, artist): """ Returns false if 'artist' is "unknown artist" or empty :param artist: :return: """ unknown_artist = not (artist or artist.isspace() or cls.unknown_artist_pattern.search(artist)) return unknown_artist
from . import Exceptions from . import MBInfo from . import Tag from . import Util from . import format from . import Discogs import re # unknown artist input pattern: class Utilities: unknown_artist_pattern = re.compile(r'unknown artist|^\s*$', flags=re.I) @classmethod def know_artist_name(cls, artist): """ Returns false if 'artist' is "unknown artist" or empty :param artist: :return: """ unknown_artist = artist is None or not (artist or artist.isspace() or cls.unknown_artist_pattern.search(artist)) return unknown_artist
Check to make sure artist is not None, or evil will occur...
Check to make sure artist is not None, or evil will occur...
Python
mit
hidat/audio_pipeline
--- +++ @@ -18,5 +18,5 @@ :param artist: :return: """ - unknown_artist = not (artist or artist.isspace() or cls.unknown_artist_pattern.search(artist)) + unknown_artist = artist is None or not (artist or artist.isspace() or cls.unknown_artist_pattern.search(artist)) return unknown_artist
19e9080f06aa2264e77b65a9c1ad6d30e6b7da4c
app/aflafrettir/routes.py
app/aflafrettir/routes.py
from flask import render_template from . import aflafrettir from ..models import User, Category, Post @aflafrettir.route('/') def index(): categories = Category.get_all_active() posts = Post.get_all() return render_template('aflafrettir/index.html', categories=categories, posts=posts) @aflafrettir.route('/user/<username>') def user(username): user = User.query.filter_by(username=username).first_or_404() return render_template('aflafrettir/user.html', user=user)
from flask import render_template from . import aflafrettir from ..models import User, Category, Post @aflafrettir.route('/frettir') @aflafrettir.route('/', alias=True) def index(): categories = Category.get_all_active() posts = Post.get_all() return render_template('aflafrettir/index.html', categories=categories, posts=posts) @aflafrettir.route('/frettir/flokkur/<int:cid>') def category(cid): categories = Category.get_all_active() posts = Post.get_by_category(cid) return render_template('aflafrettir/index.html', categories=categories, posts=posts) @aflafrettir.route('/user/<username>') def user(username): user = User.query.filter_by(username=username).first_or_404() return render_template('aflafrettir/user.html', user=user)
Add a route for displaying posts by categories
Add a route for displaying posts by categories
Python
mit
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
--- +++ @@ -3,7 +3,8 @@ from . import aflafrettir from ..models import User, Category, Post -@aflafrettir.route('/') +@aflafrettir.route('/frettir') +@aflafrettir.route('/', alias=True) def index(): categories = Category.get_all_active() posts = Post.get_all() @@ -11,6 +12,14 @@ categories=categories, posts=posts) +@aflafrettir.route('/frettir/flokkur/<int:cid>') +def category(cid): + categories = Category.get_all_active() + posts = Post.get_by_category(cid) + return render_template('aflafrettir/index.html', + categories=categories, + posts=posts) + @aflafrettir.route('/user/<username>') def user(username): user = User.query.filter_by(username=username).first_or_404()
1e5d549b6fdf62c1016451f9dfe566c9546b2f38
bcbio/bed/__init__.py
bcbio/bed/__init__.py
import pybedtools as bt import six def concat(bed_files, catted=None): """ recursively concat a set of BED files, returning a sorted bedtools object of the result """ if len(bed_files) == 0: if catted: return catted.sort() else: return catted if not catted: bed_files = list(bed_files) catted = bt.BedTool(bed_files.pop()) else: catted = catted.cat(bed_files.pop(), postmerge=False, force_truncate=False) return concat(bed_files, catted) def merge(bedfiles): """ given a BED file or list of BED files merge them an return a bedtools object """ if isinstance(bedfiles, list): catted = concat(bedfiles) else: catted = concat([bedfiles]) if catted: return concat(bedfiles).sort().merge() else: return catted
import pybedtools as bt import six def concat(bed_files, catted=None): """ recursively concat a set of BED files, returning a sorted bedtools object of the result """ bed_files = [x for x in bed_files if x] if len(bed_files) == 0: if catted: # move to a .bed extension for downstream tools if not already sorted_bed = catted.sort() if not sorted_bed.fn.endswith(".bed"): return sorted_bed.moveto(sorted_bed.fn + ".bed") else: return sorted_bed else: return catted if not catted: bed_files = list(bed_files) catted = bt.BedTool(bed_files.pop()) else: catted = catted.cat(bed_files.pop(), postmerge=False, force_truncate=False) return concat(bed_files, catted) def merge(bedfiles): """ given a BED file or list of BED files merge them an return a bedtools object """ if isinstance(bedfiles, list): catted = concat(bedfiles) else: catted = concat([bedfiles]) if catted: return concat(bedfiles).sort().merge() else: return catted
Move the file to have an extension of .bed.
Move the file to have an extension of .bed. A lot of tools detect what type of file it is by the extension, so this lets us pass on the BedTool.fn as the filename and not break things.
Python
mit
guillermo-carrasco/bcbio-nextgen,lbeltrame/bcbio-nextgen,gifford-lab/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,vladsaveliev/bcbio-nextgen,brainstorm/bcbio-nextgen,mjafin/bcbio-nextgen,lbeltrame/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,brainstorm/bcbio-nextgen,fw1121/bcbio-nextgen,verdurin/bcbio-nextgen,lpantano/bcbio-nextgen,brainstorm/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,a113n/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,chapmanb/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,chapmanb/bcbio-nextgen,biocyberman/bcbio-nextgen,chapmanb/bcbio-nextgen,vladsaveliev/bcbio-nextgen,gifford-lab/bcbio-nextgen,fw1121/bcbio-nextgen,hjanime/bcbio-nextgen,biocyberman/bcbio-nextgen,lpantano/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,lbeltrame/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,mjafin/bcbio-nextgen,a113n/bcbio-nextgen,lpantano/bcbio-nextgen,fw1121/bcbio-nextgen,gifford-lab/bcbio-nextgen,verdurin/bcbio-nextgen,biocyberman/bcbio-nextgen,vladsaveliev/bcbio-nextgen,a113n/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,mjafin/bcbio-nextgen,hjanime/bcbio-nextgen,hjanime/bcbio-nextgen,verdurin/bcbio-nextgen
--- +++ @@ -6,9 +6,15 @@ recursively concat a set of BED files, returning a sorted bedtools object of the result """ + bed_files = [x for x in bed_files if x] if len(bed_files) == 0: if catted: - return catted.sort() + # move to a .bed extension for downstream tools if not already + sorted_bed = catted.sort() + if not sorted_bed.fn.endswith(".bed"): + return sorted_bed.moveto(sorted_bed.fn + ".bed") + else: + return sorted_bed else: return catted
8abf65d6b364bd71e8aa32e25d319c77d716a85f
bin/verify_cached_graphs.py
bin/verify_cached_graphs.py
#!/usr/bin/env python import sys from pprint import pprint as pp from cc.payment import flow def verify(): for ignore_balances in (True, False): graph = flow.build_graph(ignore_balances) cached = flow.get_cached_graph(ignore_balances) diff = compare(cached, graph) if diff: pp(diff) return False return True def compare(g1, g2): e1 = set(normalize(g1.edges(data=True))) e2 = set(normalize(g2.edges(data=True))) return e1.symmetric_difference(e2) def normalize(edge_list): return ((src, dest, data['capacity'], data['weight'], data['creditline_id']) for src, dest, data in edge_list) if __name__ == '__main__': if verify(): print 'OK.' sys.exit(0) else: print 'Mismatch.' sys.exit(1)
#!/usr/bin/env python import sys from pprint import pprint as pp from cc.payment import flow def verify(): for ignore_balances in (True, False): cached = flow.get_cached_graph(ignore_balances) if not cached: continue graph = flow.build_graph(ignore_balances) diff = compare(cached, graph) if diff: pp(diff) return False return True def compare(g1, g2): e1 = set(normalize(g1.edges(data=True))) e2 = set(normalize(g2.edges(data=True))) return e1.symmetric_difference(e2) def normalize(edge_list): return ((src, dest, data['capacity'], data['weight'], data['creditline_id']) for src, dest, data in edge_list) if __name__ == '__main__': if verify(): print 'OK.' sys.exit(0) else: print 'Mismatch.' sys.exit(1)
Fix cached graph verifier tool to handle case where no graph is cached ATM.
Fix cached graph verifier tool to handle case where no graph is cached ATM.
Python
agpl-3.0
rfugger/villagescc,rfugger/villagescc,rfugger/villagescc,rfugger/villagescc
--- +++ @@ -7,8 +7,10 @@ def verify(): for ignore_balances in (True, False): + cached = flow.get_cached_graph(ignore_balances) + if not cached: + continue graph = flow.build_graph(ignore_balances) - cached = flow.get_cached_graph(ignore_balances) diff = compare(cached, graph) if diff: pp(diff)
5d63656e9b03aaed2ef9042ff61a86bc4b8ee715
django_rq/decorators.py
django_rq/decorators.py
from django.utils import six from rq.decorators import job as _rq_job from .queues import get_queue def job(func_or_queue, connection=None, *args, **kwargs): """ The same as RQ's job decorator, but it works automatically works out the ``connection`` argument from RQ_QUEUES. And also, it allows simplified ``@job`` syntax to put job into default queue. """ if callable(func_or_queue): func = func_or_queue queue = 'default' else: func = None queue = func_or_queue if isinstance(queue, six.string_types): try: queue = get_queue(queue) if connection is None: connection = queue.connection except KeyError: pass decorator = _rq_job(queue, connection=connection, *args, **kwargs) if func: return decorator(func) return decorator
from rq.decorators import job as _rq_job from .queues import get_queue def job(func_or_queue, connection=None, *args, **kwargs): """ The same as RQ's job decorator, but it works automatically works out the ``connection`` argument from RQ_QUEUES. And also, it allows simplified ``@job`` syntax to put job into default queue. """ if callable(func_or_queue): func = func_or_queue queue = 'default' else: func = None queue = func_or_queue try: from django.utils import six string_type = six.string_types except ImportError: # for django lt v1.5 and python 2 string_type = basestring if isinstance(queue, string_type): try: queue = get_queue(queue) if connection is None: connection = queue.connection except KeyError: pass decorator = _rq_job(queue, connection=connection, *args, **kwargs) if func: return decorator(func) return decorator
Add a fallback for older Django versions that doesn't come with "six"
Add a fallback for older Django versions that doesn't come with "six"
Python
mit
meteozond/django-rq,sbussetti/django-rq,sbussetti/django-rq,ui/django-rq,viaregio/django-rq,1024inc/django-rq,meteozond/django-rq,lechup/django-rq,ui/django-rq,mjec/django-rq,1024inc/django-rq,ryanisnan/django-rq,ryanisnan/django-rq,viaregio/django-rq,mjec/django-rq,lechup/django-rq
--- +++ @@ -1,4 +1,3 @@ -from django.utils import six from rq.decorators import job as _rq_job from .queues import get_queue @@ -19,7 +18,14 @@ func = None queue = func_or_queue - if isinstance(queue, six.string_types): + try: + from django.utils import six + string_type = six.string_types + except ImportError: + # for django lt v1.5 and python 2 + string_type = basestring + + if isinstance(queue, string_type): try: queue = get_queue(queue) if connection is None:
34da1ea604d1aea4fcefae188f259df4bd8119a5
indra/sources/crog/processor.py
indra/sources/crog/processor.py
# -*- coding: utf-8 -*- """Processor for the `Chemical Roles Graph (CRoG) <https://github.com/chemical-roles/chemical-roles>`_. """ from typing import Optional from ..utils import RemoteProcessor __all__ = [ 'CrogProcessor', ] CROG_URL = 'https://raw.githubusercontent.com/chemical-roles/' \ 'chemical-roles/master/docs/_data/crog.indra.json' class CrogProcessor(RemoteProcessor): """A processor for the Chemical Roles Graph. Parameters ---------- url : An optional URL. If none given, defaults to :data:`indra.sources.crog.processor.CROG_URL`. """ def __init__(self, url: Optional[str] = None): super().__init__(url=url or CROG_URL) def extract_statements(self): super().extract_statements() for stmt in self.statements: # We remap the source API to crog to align with the belief model for ev in stmt.evidence: ev.source_api = 'crog' # We also change the name of targets whose names are ECCODEs to # have the EC prefix in their name for agent in stmt.real_agent_list(): if agent.name == agent.db_refs.get('ECCODE'): agent.name = 'EC%s' % agent.name
# -*- coding: utf-8 -*- """Processor for the `Chemical Roles Graph (CRoG) <https://github.com/chemical-roles/chemical-roles>`_. """ from typing import Optional from ..utils import RemoteProcessor __all__ = [ 'CrogProcessor', ] CROG_URL = 'https://raw.githubusercontent.com/chemical-roles/' \ 'chemical-roles/master/docs/_data/crog.indra.json' class CrogProcessor(RemoteProcessor): """A processor for the Chemical Roles Graph. Parameters ---------- url : An optional URL. If none given, defaults to :data:`indra.sources.crog.processor.CROG_URL`. """ def __init__(self, url: Optional[str] = None): super().__init__(url=url or CROG_URL) def extract_statements(self): super().extract_statements() for stmt in self.statements: # We remap the source API to crog to align with the belief model for ev in stmt.evidence: ev.source_api = 'crog' # We also change the name of targets whose names are ECCODEs to # have the EC prefix in their name for agent in stmt.real_agent_list(): if agent.name == agent.db_refs.get('ECCODE'): agent.name = 'EC %s' % agent.name
Add space after EC prefix
Add space after EC prefix Co-authored-by: Charles Tapley Hoyt <71cbf5b94f8862eb69e356b36e0cdaee3e60b67f@gmail.com>
Python
bsd-2-clause
johnbachman/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,johnbachman/indra,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra
--- +++ @@ -39,4 +39,4 @@ # have the EC prefix in their name for agent in stmt.real_agent_list(): if agent.name == agent.db_refs.get('ECCODE'): - agent.name = 'EC%s' % agent.name + agent.name = 'EC %s' % agent.name
bd1719885b1328c5aca34bc8d78b761e846f4037
tests/query_test/test_decimal_queries.py
tests/query_test/test_decimal_queries.py
#!/usr/bin/env python # Copyright (c) 2012 Cloudera, Inc. All rights reserved. # Targeted tests for decimal type. # import logging import pytest from copy import copy from tests.common.test_vector import * from tests.common.impala_test_suite import * class TestDecimalQueries(ImpalaTestSuite): BATCH_SIZES = [0, 1] @classmethod def get_workload(cls): return 'functional-query' @classmethod def add_test_dimensions(cls): super(TestDecimalQueries, cls).add_test_dimensions() cls.TestMatrix.add_dimension( TestDimension('batch_size', *TestDecimalQueries.BATCH_SIZES)) # On CDH4, hive does not support decimal so we can't run these tests against # the other file formats. Enable them on C5. cls.TestMatrix.add_constraint(lambda v:\ (v.get_value('table_format').file_format == 'text' and v.get_value('table_format').compression_codec == 'none') or v.get_value('table_format').file_format == 'parquet') def test_queries(self, vector): new_vector = copy(vector) new_vector.get_value('exec_option')['batch_size'] = vector.get_value('batch_size') self.run_test_case('QueryTest/decimal', new_vector)
#!/usr/bin/env python # Copyright (c) 2012 Cloudera, Inc. All rights reserved. # Targeted tests for decimal type. # import logging import pytest from copy import copy from tests.common.test_vector import * from tests.common.impala_test_suite import * class TestDecimalQueries(ImpalaTestSuite): BATCH_SIZES = [0, 1] @classmethod def get_workload(cls): return 'functional-query' @classmethod def add_test_dimensions(cls): super(TestDecimalQueries, cls).add_test_dimensions() cls.TestMatrix.add_dimension( TestDimension('batch_size', *TestDecimalQueries.BATCH_SIZES)) # On CDH4, hive does not support decimal so we can't run these tests against # the other file formats. Enable them on C5. cls.TestMatrix.add_constraint(lambda v:\ (v.get_value('table_format').file_format == 'text' and v.get_value('table_format').compression_codec == 'none') or v.get_value('table_format').file_format == 'parquet') def test_queries(self, vector): if os.environ.get('ASAN_OPTIONS') == 'handle_segv=0': pytest.xfail(reason="IMPALA-959: Sum on a decimal column fails ASAN") new_vector = copy(vector) new_vector.get_value('exec_option')['batch_size'] = vector.get_value('batch_size') self.run_test_case('QueryTest/decimal', new_vector)
Fix the ASAN build by xfailing test_decimal when ASAN_OPTIONS is set.
Fix the ASAN build by xfailing test_decimal when ASAN_OPTIONS is set. Adding decimal columns crashes an ASAN built impalad. This change skips the test. Change-Id: Ic94055a3f0d00f89354177de18bc27d2f4cecec2 Reviewed-on: http://gerrit.ent.cloudera.com:8080/2532 Reviewed-by: Ishaan Joshi <d1d1e60202ec9f2503deb1b724986485a125d802@cloudera.com> Tested-by: jenkins Reviewed-on: http://gerrit.ent.cloudera.com:8080/2594
Python
apache-2.0
cchanning/Impala,XiaominZhang/Impala,tempbottle/Impala,cgvarela/Impala,kapilrastogi/Impala,grundprinzip/Impala,scalingdata/Impala,bratatidas9/Impala-1,mapr/impala,ImpalaToGo/ImpalaToGo,cchanning/Impala,lnliuxing/Impala,gerashegalov/Impala,theyaa/Impala,lirui-intel/Impala,bowlofstew/Impala,ImpalaToGo/ImpalaToGo,gerashegalov/Impala,henryr/Impala,ImpalaToGo/ImpalaToGo,tempbottle/Impala,bowlofstew/Impala,henryr/Impala,caseyching/Impala,kapilrastogi/Impala,rdblue/Impala,XiaominZhang/Impala,lnliuxing/Impala,rdblue/Impala,caseyching/Impala,theyaa/Impala,grundprinzip/Impala,grundprinzip/Impala,cloudera/recordservice,cchanning/Impala,cchanning/Impala,theyaa/Impala,XiaominZhang/Impala,gerashegalov/Impala,placrosse/ImpalaToGo,tempbottle/Impala,cloudera/recordservice,cchanning/Impala,mapr/impala,ImpalaToGo/ImpalaToGo,XiaominZhang/Impala,lirui-intel/Impala,cgvarela/Impala,henryr/Impala,ImpalaToGo/ImpalaToGo,rdblue/Impala,lnliuxing/Impala,ibmsoe/ImpalaPPC,caseyching/Impala,lirui-intel/Impala,bowlofstew/Impala,XiaominZhang/Impala,scalingdata/Impala,gerashegalov/Impala,brightchen/Impala,bowlofstew/Impala,placrosse/ImpalaToGo,grundprinzip/Impala,kapilrastogi/Impala,ibmsoe/ImpalaPPC,rdblue/Impala,cgvarela/Impala,bowlofstew/Impala,bratatidas9/Impala-1,theyaa/Impala,XiaominZhang/Impala,lnliuxing/Impala,cchanning/Impala,scalingdata/Impala,bowlofstew/Impala,grundprinzip/Impala,cloudera/recordservice,bowlofstew/Impala,brightchen/Impala,cgvarela/Impala,cgvarela/Impala,caseyching/Impala,henryr/Impala,rdblue/Impala,mapr/impala,gerashegalov/Impala,cgvarela/Impala,placrosse/ImpalaToGo,kapilrastogi/Impala,kapilrastogi/Impala,kapilrastogi/Impala,tempbottle/Impala,mapr/impala,lirui-intel/Impala,brightchen/Impala,lnliuxing/Impala,scalingdata/Impala,ibmsoe/ImpalaPPC,scalingdata/Impala,lirui-intel/Impala,bratatidas9/Impala-1,theyaa/Impala,theyaa/Impala,tempbottle/Impala,placrosse/ImpalaToGo,caseyching/Impala,bratatidas9/Impala-1,brightchen/Impala,tempbottle/Impala,gerashegalov/Impala,kapilrastogi/Impala,bratatidas9/Impala-1,grundprinzip/Impala,mapr/impala,brightchen/Impala,placrosse/ImpalaToGo,gerashegalov/Impala,lnliuxing/Impala,tempbottle/Impala,lnliuxing/Impala,cchanning/Impala,placrosse/ImpalaToGo,rdblue/Impala,henryr/Impala,ibmsoe/ImpalaPPC,XiaominZhang/Impala,ibmsoe/ImpalaPPC,rdblue/Impala,cloudera/recordservice,ibmsoe/ImpalaPPC,lirui-intel/Impala,ibmsoe/ImpalaPPC,scalingdata/Impala,lirui-intel/Impala,henryr/Impala,bratatidas9/Impala-1,brightchen/Impala,cgvarela/Impala,cloudera/recordservice,cloudera/recordservice,cloudera/recordservice,brightchen/Impala,ImpalaToGo/ImpalaToGo,bratatidas9/Impala-1,caseyching/Impala,caseyching/Impala,theyaa/Impala
--- +++ @@ -29,6 +29,8 @@ v.get_value('table_format').file_format == 'parquet') def test_queries(self, vector): + if os.environ.get('ASAN_OPTIONS') == 'handle_segv=0': + pytest.xfail(reason="IMPALA-959: Sum on a decimal column fails ASAN") new_vector = copy(vector) new_vector.get_value('exec_option')['batch_size'] = vector.get_value('batch_size') self.run_test_case('QueryTest/decimal', new_vector)
9611fcd38c8d75b1c101870ae59de3db326c6951
pyfive/tests/test_pyfive.py
pyfive/tests/test_pyfive.py
import numpy as np from numpy.testing import assert_array_equal import pyfive import h5py def test_read_basic_example(): # reading with HDF5 hfile = h5py.File('basic_example.hdf5', 'r') assert hfile['/example'].attrs['foo'] == 99.5 assert hfile['/example'].attrs['bar'] == 42 np.testing.assert_array_equal( hfile['/example'][:], np.arange(100, dtype='int32')) assert hfile['/example'].dtype == np.dtype('int32') assert hfile['/example'].shape == (100, ) hfile.close() # reading with pyfive hfile = pyfive.HDF5File('basic_example.hdf5') assert 'example' in hfile.datasets dset = hfile.datasets['example'] attrs = dset.get_attributes() assert 'bar' in attrs assert 'foo' in attrs assert attrs['bar'] == 42 assert attrs['foo'] == 99.5 data = dset.get_data() assert data.dtype == np.dtype('int32') assert data.shape == (100, ) assert_array_equal(data, np.arange(100, dtype='int32')) hfile.close()
""" Unit tests for pyfive. """ import os import numpy as np from numpy.testing import assert_array_equal import pyfive import h5py DIRNAME = os.path.dirname(__file__) BASIC_HDF5_FILE = os.path.join(DIRNAME, 'basic_example.hdf5') BASIC_NETCDF4_FILE = os.path.join(DIRNAME, 'basic_example.nc') def test_read_basic_example(): # reading with HDF5 hfile = h5py.File(BASIC_HDF5_FILE, 'r') assert hfile['/example'].attrs['foo'] == 99.5 assert hfile['/example'].attrs['bar'] == 42 np.testing.assert_array_equal( hfile['/example'][:], np.arange(100, dtype='int32')) assert hfile['/example'].dtype == np.dtype('int32') assert hfile['/example'].shape == (100, ) hfile.close() # reading with pyfive hfile = pyfive.HDF5File(BASIC_HDF5_FILE) assert 'example' in hfile.datasets dset = hfile.datasets['example'] attrs = dset.get_attributes() assert 'bar' in attrs assert 'foo' in attrs assert attrs['bar'] == 42 assert attrs['foo'] == 99.5 data = dset.get_data() assert data.dtype == np.dtype('int32') assert data.shape == (100, ) assert_array_equal(data, np.arange(100, dtype='int32')) hfile.close()
Make unit tests path aware
Make unit tests path aware
Python
bsd-3-clause
jjhelmus/pyfive
--- +++ @@ -1,3 +1,5 @@ +""" Unit tests for pyfive. """ +import os import numpy as np from numpy.testing import assert_array_equal @@ -5,10 +7,15 @@ import pyfive import h5py +DIRNAME = os.path.dirname(__file__) +BASIC_HDF5_FILE = os.path.join(DIRNAME, 'basic_example.hdf5') +BASIC_NETCDF4_FILE = os.path.join(DIRNAME, 'basic_example.nc') + + def test_read_basic_example(): # reading with HDF5 - hfile = h5py.File('basic_example.hdf5', 'r') + hfile = h5py.File(BASIC_HDF5_FILE, 'r') assert hfile['/example'].attrs['foo'] == 99.5 assert hfile['/example'].attrs['bar'] == 42 np.testing.assert_array_equal( @@ -19,7 +26,7 @@ hfile.close() # reading with pyfive - hfile = pyfive.HDF5File('basic_example.hdf5') + hfile = pyfive.HDF5File(BASIC_HDF5_FILE) assert 'example' in hfile.datasets dset = hfile.datasets['example']
2cc8a541814cc353e7b60767afd2128dce38918a
tests/test_plugins/test_plugin/server.py
tests/test_plugins/test_plugin/server.py
#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### # Copyright Kitware Inc. # # Licensed under the Apache License, Version 2.0 ( the "License" ); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### from girder.api import access from girder.api.describe import Description from girder.api.rest import Resource class CustomAppRoot(object): """ The webroot endpoint simply serves the main index HTML file. """ exposed = True def GET(self): return "hello world" class Other(Resource): def __init__(self): self.resourceName = 'other' self.route('GET', (), self.getResource) @access.public def getResource(self, params): return ['custom REST route'] getResource.description = Description('Get something.') def load(info): info['serverRoot'], info['serverRoot'].girder = CustomAppRoot(), info['serverRoot'] info['serverRoot'].api = info['serverRoot'].girder.api del info['serverRoot'].girder.api info['apiRoot'].other = Other()
#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### # Copyright Kitware Inc. # # Licensed under the Apache License, Version 2.0 ( the "License" ); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### from girder.api import access from girder.api.describe import Description from girder.api.rest import Resource class CustomAppRoot(object): """ The webroot endpoint simply serves the main index HTML file. """ exposed = True def GET(self): return "hello world" class Other(Resource): def __init__(self): self.resourceName = 'other' self.route('GET', (), self.getResource) @access.public def getResource(self, params): return ['custom REST route'] getResource.description = Description('Get something.') def load(info): info['serverRoot'], info['serverRoot'].girder = ( CustomAppRoot(), info['serverRoot']) info['serverRoot'].api = info['serverRoot'].girder.api del info['serverRoot'].girder.api info['apiRoot'].other = Other()
Fix failing python style test
Fix failing python style test
Python
apache-2.0
jbeezley/girder,jcfr/girder,RafaelPalomar/girder,opadron/girder,Kitware/girder,essamjoubori/girder,RafaelPalomar/girder,adsorensen/girder,Xarthisius/girder,adsorensen/girder,data-exp-lab/girder,jcfr/girder,girder/girder,opadron/girder,Xarthisius/girder,data-exp-lab/girder,jcfr/girder,kotfic/girder,manthey/girder,msmolens/girder,salamb/girder,sutartmelson/girder,adsorensen/girder,essamjoubori/girder,data-exp-lab/girder,essamjoubori/girder,chrismattmann/girder,kotfic/girder,opadron/girder,kotfic/girder,Xarthisius/girder,jcfr/girder,data-exp-lab/girder,girder/girder,opadron/girder,girder/girder,manthey/girder,salamb/girder,salamb/girder,adsorensen/girder,kotfic/girder,jbeezley/girder,data-exp-lab/girder,msmolens/girder,msmolens/girder,chrismattmann/girder,essamjoubori/girder,essamjoubori/girder,Kitware/girder,jcfr/girder,Xarthisius/girder,chrismattmann/girder,RafaelPalomar/girder,adsorensen/girder,jbeezley/girder,chrismattmann/girder,sutartmelson/girder,sutartmelson/girder,RafaelPalomar/girder,RafaelPalomar/girder,kotfic/girder,sutartmelson/girder,Xarthisius/girder,Kitware/girder,jbeezley/girder,salamb/girder,manthey/girder,msmolens/girder,chrismattmann/girder,girder/girder,salamb/girder,manthey/girder,sutartmelson/girder,Kitware/girder,opadron/girder,msmolens/girder
--- +++ @@ -45,7 +45,8 @@ def load(info): - info['serverRoot'], info['serverRoot'].girder = CustomAppRoot(), info['serverRoot'] + info['serverRoot'], info['serverRoot'].girder = ( + CustomAppRoot(), info['serverRoot']) info['serverRoot'].api = info['serverRoot'].girder.api del info['serverRoot'].girder.api
db99f77edfb7318ee3b4a443a98c837611054515
utils/fields.py
utils/fields.py
import json from django.contrib.postgres.forms.jsonb import InvalidJSONInput, JSONField class JSONPrettyField(JSONField): def __init__(self, *args, **kwargs): self.__indent = kwargs.pop('indent', 2) super().__init__(*args, **kwargs) def prepare_value(self, value): if isinstance(value, InvalidJSONInput): return value return json.dumps(value, indent=self.__indent, sort_keys=True, ensure_ascii=False)
import json from django.contrib.postgres.forms.jsonb import InvalidJSONInput, JSONField from django.forms import ValidationError class JSONPrettyField(JSONField): def __init__(self, *args, **kwargs): self.__indent = kwargs.pop('indent', 2) self.__dict_only = kwargs.pop('dict_only', False) self.__list_only = kwargs.pop('list_only', False) if self.__dict_only and self.__list_only: raise ValueError('Only one of dict_only or list_only can be True') super().__init__(*args, **kwargs) def prepare_value(self, value): if isinstance(value, InvalidJSONInput): return value return json.dumps(value, indent=self.__indent, sort_keys=True, ensure_ascii=False) def validate(self, value): if self.__dict_only and not isinstance(value, dict): raise ValidationError('{} is not of type dict'.format(value)) if self.__list_only and not isinstance(value, list): raise ValidationError('{} is not of type list'.format(value)) return value
Add list_only and dict_only to JSONPrettyField
Add list_only and dict_only to JSONPrettyField
Python
mit
bulv1ne/django-utils,bulv1ne/django-utils
--- +++ @@ -1,14 +1,26 @@ import json from django.contrib.postgres.forms.jsonb import InvalidJSONInput, JSONField +from django.forms import ValidationError class JSONPrettyField(JSONField): def __init__(self, *args, **kwargs): self.__indent = kwargs.pop('indent', 2) + self.__dict_only = kwargs.pop('dict_only', False) + self.__list_only = kwargs.pop('list_only', False) + if self.__dict_only and self.__list_only: + raise ValueError('Only one of dict_only or list_only can be True') super().__init__(*args, **kwargs) def prepare_value(self, value): if isinstance(value, InvalidJSONInput): return value return json.dumps(value, indent=self.__indent, sort_keys=True, ensure_ascii=False) + + def validate(self, value): + if self.__dict_only and not isinstance(value, dict): + raise ValidationError('{} is not of type dict'.format(value)) + if self.__list_only and not isinstance(value, list): + raise ValidationError('{} is not of type list'.format(value)) + return value
30b6a5364dc22261a4d47aec2e0a77e0c5b8ccd4
wsme/release.py
wsme/release.py
name = "WSME" version = "0.1.0a2" description = "Web Services Made Easy" long_description = """ Web Service Made Easy is a pure-wsgi and modular rewrite of TGWebServices. """ author = "Christophe de Vienne" email = "cdevienne@gmail.com" url = "http://bitbucket.org/cdevienne/wsme" license = "MIT"
name = "WSME" version = "0.1.0a3" description = "Web Services Made Easy" long_description = """ Web Service Made Easy is a pure-wsgi and modular rewrite of TGWebServices. """ author = "Christophe de Vienne" email = "python-wsme@googlegroups.com" url = "http://bitbucket.org/cdevienne/wsme" license = "MIT"
Update the contact mail and version
Update the contact mail and version
Python
mit
stackforge/wsme
--- +++ @@ -1,5 +1,5 @@ name = "WSME" -version = "0.1.0a2" +version = "0.1.0a3" description = "Web Services Made Easy" long_description = """ @@ -7,7 +7,7 @@ """ author = "Christophe de Vienne" -email = "cdevienne@gmail.com" +email = "python-wsme@googlegroups.com" url = "http://bitbucket.org/cdevienne/wsme"
49263d5e43be6ab9a5c3faf2ee6478840526cccb
flatten-array/flatten_array.py
flatten-array/flatten_array.py
def flatten(lst): """Completely flatten an arbitrarily-deep list""" return [*_flatten(lst)] def _flatten(lst): """Generator for flattening arbitrarily-deep lists""" if isinstance(lst, (list, tuple)): for item in lst: if item is None: continue else: yield from _flatten(item) else: yield lst
def flatten(lst): """Completely flatten an arbitrarily-deep list""" return [*_flatten(lst)] def _flatten(lst): """Generator for flattening arbitrarily-deep lists""" for item in lst: if isinstance(item, (list, tuple)): yield from _flatten(item) elif item is not None: yield item
Tidy and simplify generator code
Tidy and simplify generator code
Python
agpl-3.0
CubicComet/exercism-python-solutions
--- +++ @@ -5,11 +5,8 @@ def _flatten(lst): """Generator for flattening arbitrarily-deep lists""" - if isinstance(lst, (list, tuple)): - for item in lst: - if item is None: - continue - else: - yield from _flatten(item) - else: - yield lst + for item in lst: + if isinstance(item, (list, tuple)): + yield from _flatten(item) + elif item is not None: + yield item
614ab31af817fa9775fe2aa904687456656bf6fc
tags/fields.py
tags/fields.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db.models.fields import CharField from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import python_2_unicode_compatible from tags.models import Tag @python_2_unicode_compatible class TagField(CharField): def __init__(self, verbose_name=_('Tags'), max_length=4000, blank=True, null=True, help_text=_('A comma-separated list of tags.'), **kwargs): kwargs['max_length'] = max_length kwargs['blank'] = blank kwargs['null'] = null kwargs['verbose_name'] = verbose_name kwargs['help_text'] = help_text self.max_length = max_length self.blank = blank self.null = null self.verbose_name = verbose_name self.help_text = help_text CharField.__init__(self, **kwargs) def pre_save(self, model_instance, add): str_tags = getattr(model_instance, self.name) if str_tags: tags = set(str_tags.split(',')) for tag in tags: Tag.objects.get_or_create(name=tag) return ','.join(tags) return super(TagField, self).pre_save(model_instance, add) try: from south.modelsinspector import add_introspection_rules add_introspection_rules([], ["^tags\.fields\.TagField"]) except: pass
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.db.models.fields import CharField from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import python_2_unicode_compatible from tags.models import Tag @python_2_unicode_compatible class TagField(CharField): def __init__(self, verbose_name=_('Tags'), max_length=4000, blank=True, null=True, help_text=_('A comma-separated list of tags.'), **kwargs): kwargs['max_length'] = max_length kwargs['blank'] = blank kwargs['null'] = null kwargs['verbose_name'] = verbose_name kwargs['help_text'] = help_text self.max_length = max_length self.blank = blank self.null = null self.verbose_name = verbose_name self.help_text = help_text CharField.__init__(self, **kwargs) def pre_save(self, model_instance, add): str_tags = getattr(model_instance, self.name) if str_tags: tags = set(str_tags.split(',')) for tag in tags: Tag.objects.get_or_create(name=tag) return ','.join(tags) return super(TagField, self).pre_save(model_instance, add) try: from south.modelsinspector import add_introspection_rules add_introspection_rules([], ["^tags\.fields\.TagField"]) except ImportError: pass
Set except import error on add introspection rules south
Set except import error on add introspection rules south
Python
mit
avelino/django-tags
--- +++ @@ -40,6 +40,6 @@ try: from south.modelsinspector import add_introspection_rules add_introspection_rules([], ["^tags\.fields\.TagField"]) -except: +except ImportError: pass
8934730ac2702d2c88d96ed8bb015f7c6e65566b
js2xml/__init__.py
js2xml/__init__.py
import lxml.etree from js2xml.parser import CustomParser as Parser from js2xml.xmlvisitor import XmlVisitor _parser = Parser() _visitor = XmlVisitor() def parse(text, encoding="utf8", debug=False): if encoding not in (None, "utf8"): text = text.decode(encoding) tree = _parser.parse(text if not isinstance(text, unicode) else text.encode("utf8"), debug=debug) xml = _visitor.visit(tree) return xml def pretty_print(tree): return lxml.etree.tostring(tree, pretty_print=True)
import lxml.etree from js2xml.parser import CustomParser as Parser from js2xml.xmlvisitor import XmlVisitor import js2xml.jsonlike as jsonlike _parser = Parser() _visitor = XmlVisitor() def parse(text, encoding="utf8", debug=False): if encoding not in (None, "utf8"): text = text.decode(encoding) tree = _parser.parse(text if not isinstance(text, unicode) else text.encode("utf8"), debug=debug) xml = _visitor.visit(tree) return xml def pretty_print(tree): return lxml.etree.tostring(tree, pretty_print=True)
Allow js2xml.jsonlike... when importing js2xml only
Allow js2xml.jsonlike... when importing js2xml only
Python
mit
redapple/js2xml,redapple/js2xml,redapple/js2xml,redapple/js2xml
--- +++ @@ -1,6 +1,7 @@ import lxml.etree from js2xml.parser import CustomParser as Parser from js2xml.xmlvisitor import XmlVisitor +import js2xml.jsonlike as jsonlike _parser = Parser() _visitor = XmlVisitor()
be03357a9d18a4a6174c075db1fdd786100925aa
lat_lng.py
lat_lng.py
from math import atan, tan, radians def lat_lng(lat, lng): """ Return corrected lat/lng. Lat: -90 to 90 Lng: -180 to 180 """ # lat # if lat > 180: # reduce to value less than 180 # lat = lat - (lat//180)*180 # if lat < -180: # increase to value greater than -180 # lat = lat # if lat > 90.0: # amt_gt_90 = lat - (lat//90)*90 # lat = 90 - amt_gt_90 lng = -2*atan(1/tan((radians(lng)-180)/2)) return lat, lng
from math import atan, tan, radians, degrees def lat_lng(lat, lng): """ Return corrected lat/lng. Lat: -90 to 90 Lng: -180 to 180 """ # lat # if lat > 180: # reduce to value less than 180 # lat = lat - (lat//180)*180 # if lat < -180: # increase to value greater than -180 # lat = lat # if lat > 90.0: # amt_gt_90 = lat - (lat//90)*90 # lat = 90 - amt_gt_90 lng = degrees(-2*atan(1/tan((radians(lng)-180)/2))) return lat, lng
Change output back to degrees.
Change output back to degrees.
Python
mit
bm5w/lat_lng
--- +++ @@ -1,4 +1,4 @@ -from math import atan, tan, radians +from math import atan, tan, radians, degrees def lat_lng(lat, lng): @@ -17,6 +17,6 @@ # if lat > 90.0: # amt_gt_90 = lat - (lat//90)*90 # lat = 90 - amt_gt_90 - lng = -2*atan(1/tan((radians(lng)-180)/2)) + lng = degrees(-2*atan(1/tan((radians(lng)-180)/2))) return lat, lng
f16e8d0bd0765e4d4a8e0f917bf0325a772a1a23
rbm2m/models/record.py
rbm2m/models/record.py
# -*- coding: utf-8 -*- from sqlalchemy import (Column, Integer, String, DateTime, ForeignKey) from sqlalchemy.orm import relationship, backref from .base import Base class Record(Base): __tablename__ = 'records' id = Column(Integer, primary_key=True, autoincrement=False) genre_id = Column(Integer, ForeignKey('genres.id'), nullable=False) import_date = Column(DateTime, nullable=False) artist = Column(String(250), nullable=False) title = Column(String(250), nullable=False) label = Column(String(250), nullable=False) notes = Column(String(500)) grade = Column(String(16), nullable=False) format = Column(String(2), nullable=False) price = Column(Integer) genre = relationship("Genre", backref=backref('records', order_by=id)) class RecordStatus(Base): __tablename__ = 'record_status' record_id = Column(Integer, ForeignKey('records.id'), primary_key=True, autoincrement=False) status = Column(String(50), nullable=False)
# -*- coding: utf-8 -*- from sqlalchemy import (Column, Integer, String, DateTime, ForeignKey) from sqlalchemy.orm import relationship, backref from .base import Base class Record(Base): __tablename__ = 'records' id = Column(Integer, primary_key=True, autoincrement=False) genre_id = Column(Integer, ForeignKey('genres.id'), nullable=False) import_date = Column(DateTime, nullable=False) artist = Column(String(250), nullable=False) title = Column(String(250), nullable=False) label = Column(String(250), nullable=False) notes = Column(String(2500)) grade = Column(String(16), nullable=False) format = Column(String(2), nullable=False) price = Column(Integer) genre = relationship("Genre", backref=backref('records', order_by=id)) class RecordStatus(Base): __tablename__ = 'record_status' record_id = Column(Integer, ForeignKey('records.id'), primary_key=True, autoincrement=False) status = Column(String(50), nullable=False)
Increase description max length to 2500 characters
Increase description max length to 2500 characters
Python
apache-2.0
notapresent/rbm2m,notapresent/rbm2m
--- +++ @@ -14,7 +14,7 @@ artist = Column(String(250), nullable=False) title = Column(String(250), nullable=False) label = Column(String(250), nullable=False) - notes = Column(String(500)) + notes = Column(String(2500)) grade = Column(String(16), nullable=False) format = Column(String(2), nullable=False) price = Column(Integer)
4657ecdf6889684cf83c77f34233d8bd3ba852a2
tests/events/test_models.py
tests/events/test_models.py
# -*- coding: utf-8 -*- import pytest from components.events.models import Event, Performance, Venue from components.events.factories import (EventFactory, PerformanceFactory, VenueFactory) pytestmark = pytest.mark.django_db class TestEvents: def test_factory(self): factory = EventFactory() assert isinstance(factory, Event) assert 'event' in factory.romanized_name class TestPerformances: def test_factory(self): factory = PerformanceFactory() assert isinstance(factory, Performance) assert 'performance' in factory.romanized_name class TestVenues: def test_factory(self): factory = VenueFactory() assert isinstance(factory, Venue) assert 'venue' in factory.romanized_name
# -*- coding: utf-8 -*- import datetime import pytest from components.events.models import Event, Performance, Venue from components.events.factories import (EventFactory, PerformanceFactory, VenueFactory) pytestmark = pytest.mark.django_db class TestEvents: def test_factory(self): factory = EventFactory() assert isinstance(factory, Event) assert 'event' in factory.romanized_name def test_get_absolute_url(self, client): factory = EventFactory() response = client.get(factory.get_absolute_url()) assert response.status_code == 200 class TestPerformances: def test_factory(self): factory = PerformanceFactory() assert isinstance(factory, Performance) assert 'performance' in factory.romanized_name def test_string_representation(self): day = datetime.date.today() performance = PerformanceFactory() assert str(day) in str(performance) start_time = datetime.datetime.now().time() performance_with_start_time = PerformanceFactory(start_time=start_time) assert str(start_time) in str(performance_with_start_time) class TestVenues: def test_factory(self): factory = VenueFactory() assert isinstance(factory, Venue) assert 'venue' in factory.romanized_name def test_get_absolute_url(self, client): factory = VenueFactory() response = client.get(factory.get_absolute_url()) assert response.status_code == 200 def test_string_representation(self): factory = VenueFactory() assert str(factory) == factory.romanized_name
Test string representations and get_absolute_url() calls.
Test string representations and get_absolute_url() calls.
Python
apache-2.0
hello-base/web,hello-base/web,hello-base/web,hello-base/web
--- +++ @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import datetime import pytest from components.events.models import Event, Performance, Venue @@ -14,6 +15,11 @@ assert isinstance(factory, Event) assert 'event' in factory.romanized_name + def test_get_absolute_url(self, client): + factory = EventFactory() + response = client.get(factory.get_absolute_url()) + assert response.status_code == 200 + class TestPerformances: def test_factory(self): @@ -21,9 +27,27 @@ assert isinstance(factory, Performance) assert 'performance' in factory.romanized_name + def test_string_representation(self): + day = datetime.date.today() + performance = PerformanceFactory() + assert str(day) in str(performance) + + start_time = datetime.datetime.now().time() + performance_with_start_time = PerformanceFactory(start_time=start_time) + assert str(start_time) in str(performance_with_start_time) + class TestVenues: def test_factory(self): factory = VenueFactory() assert isinstance(factory, Venue) assert 'venue' in factory.romanized_name + + def test_get_absolute_url(self, client): + factory = VenueFactory() + response = client.get(factory.get_absolute_url()) + assert response.status_code == 200 + + def test_string_representation(self): + factory = VenueFactory() + assert str(factory) == factory.romanized_name
dfefb21bd170bf253f0d07dba2931de82ed0b1e8
tests/conftest.py
tests/conftest.py
import os.path import pytest def pytest_collection_modifyitems(items): for item in items: module_path = os.path.relpath( item.module.__file__, os.path.commonprefix([__file__, item.module.__file__]), ) module_root_dir = module_path.split(os.sep)[0] if module_root_dir == "functional": item.add_marker(pytest.mark.functional) elif module_root_dir == "unit": item.add_marker(pytest.mark.unit) else: raise RuntimeError( "Unknown test type (filename = {0})".format(module_path) )
import os.path import pytest @pytest.yield_fixture def tmpdir(request, tmpdir): try: yield tmpdir finally: tmpdir.remove(ignore_errors=True) def pytest_collection_modifyitems(items): for item in items: module_path = os.path.relpath( item.module.__file__, os.path.commonprefix([__file__, item.module.__file__]), ) module_root_dir = module_path.split(os.sep)[0] if module_root_dir == "functional": item.add_marker(pytest.mark.functional) elif module_root_dir == "unit": item.add_marker(pytest.mark.unit) else: raise RuntimeError( "Unknown test type (filename = {0})".format(module_path) )
Fix tmpdir fixture to remove all the sutff (normally it keeps the last 3, which is a lot).
Fix tmpdir fixture to remove all the sutff (normally it keeps the last 3, which is a lot).
Python
mit
ionelmc/virtualenv,ionelmc/virtualenv,ionelmc/virtualenv
--- +++ @@ -1,6 +1,14 @@ import os.path import pytest + + +@pytest.yield_fixture +def tmpdir(request, tmpdir): + try: + yield tmpdir + finally: + tmpdir.remove(ignore_errors=True) def pytest_collection_modifyitems(items):
34754e91a398e35f0e7d16bbd591c5b4a496536a
src/commons.py
src/commons.py
from contextlib import contextmanager from sympy import Eq, Lambda, Function, Indexed def define(let, be, **kwds): return Eq(let, be, **kwds) @contextmanager def lift_to_Lambda(eq, return_eq=False, lhs_handler=lambda args: []): lhs = eq.lhs args = (lhs.args[1:] if isinstance(lhs, Indexed) else lhs.args if isinstance(lhs, Function) else lhs_handler(lhs)) yield Lambda(args, eq if return_eq else eq.rhs)
from contextlib import contextmanager, redirect_stdout from sympy import Eq, Lambda, Function, Indexed, latex def define(let, be, **kwds): return Eq(let, be, **kwds) @contextmanager def lift_to_Lambda(eq, return_eq=False, lhs_handler=lambda args: []): lhs = eq.lhs args = (lhs.args[1:] if isinstance(lhs, Indexed) else lhs.args if isinstance(lhs, Function) else lhs_handler(lhs)) yield Lambda(args, eq if return_eq else eq.rhs) def save_latex_repr(term, filename): with open(filename, 'w') as f: with redirect_stdout(f): print('.. math::\n\n\t{}'.format(latex(term)))
Add a definition about saving latex representation of a term in a file capturing `print` stdout.
Add a definition about saving latex representation of a term in a file capturing `print` stdout.
Python
mit
massimo-nocentini/simulation-methods,massimo-nocentini/simulation-methods
--- +++ @@ -1,7 +1,6 @@ +from contextlib import contextmanager, redirect_stdout -from contextlib import contextmanager - -from sympy import Eq, Lambda, Function, Indexed +from sympy import Eq, Lambda, Function, Indexed, latex def define(let, be, **kwds): return Eq(let, be, **kwds) @@ -13,3 +12,9 @@ lhs.args if isinstance(lhs, Function) else lhs_handler(lhs)) yield Lambda(args, eq if return_eq else eq.rhs) + +def save_latex_repr(term, filename): + with open(filename, 'w') as f: + with redirect_stdout(f): + print('.. math::\n\n\t{}'.format(latex(term))) +
c33e9cbf0f08c4ec93c9aeea899d93ac257b9bea
sysrev/tests.py
sysrev/tests.py
from django.test import TestCase from api import PubMed from sysrev.models import Review class PubmedQueryTestCase(TestCase): def test_query(self): result = PubMed.query("smoking") self.assertGreater(result[u'Count'], 25000, "Expected >25000 results for smoking") def test_paper(self): result = PubMed.read_papers_from_ids([25929677]) self.assertEquals(len(result[0][u'MedlineCitation'][u'Article'][u'AuthorList']), 7, "25929677 should have 7 authors") def test_create_papers_from_ids(self): review = Review.objects.get_or_create(title="Investigating the effects of acupuncture on children with ADHD")[0] result = PubMed.create_papers_from_ids([26502548], review)[0] print result.title self.assertEquals("[A Meta-analysis on Acupuncture Treatment of Attention Deficit/Hyperactivity Disorder].", result.title)
from django.test import TestCase from api import PubMed from sysrev.models import Review class PubmedQueryTestCase(TestCase): def test_query(self): result = PubMed.query("smoking") self.assertGreater(result[u'Count'], 25000, "Expected >25000 results for smoking") def test_paper(self): result = PubMed.read_papers_from_ids([25929677]) self.assertEquals(len(result[0][u'MedlineCitation'][u'Article'][u'AuthorList']), 7, "25929677 should have 7 authors") def test_create_papers_from_ids(self): review = Review.objects.get_or_create(title="Investigating the effects of acupuncture on children with ADHD")[0] result = PubMed.create_papers_from_ids([26502548], review)[0] print result.title self.assertEquals("[A Meta-analysis on Acupuncture Treatment of Attention Deficit/Hyperactivity Disorder].", result.title) def test_adhd_query(self): query = """(adhd OR adhs OR addh) AND (child OR adolescent) AND acupuncture""" result = PubMed.get_ids_from_query(query) self.assertGreater(len(result), 0, "Expected some results for ADHD query")
Add (failing) test for ADHD query. Returns results on site, not through API. Needs investigation
Add (failing) test for ADHD query. Returns results on site, not through API. Needs investigation
Python
mit
iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview
--- +++ @@ -22,3 +22,8 @@ print result.title self.assertEquals("[A Meta-analysis on Acupuncture Treatment of Attention Deficit/Hyperactivity Disorder].", result.title) + + def test_adhd_query(self): + query = """(adhd OR adhs OR addh) AND (child OR adolescent) AND acupuncture""" + result = PubMed.get_ids_from_query(query) + self.assertGreater(len(result), 0, "Expected some results for ADHD query")
8922e6ff0570fc3b073746b01e6ee1d963315448
udger/__init__.py
udger/__init__.py
from .parser import Udger __version__ = '4.0.1' __all__ = ['Udger']
from .parser import Udger __version__ = '4.0.2' __all__ = ['Udger']
Allow MutableMapping for python >= 3.10
Allow MutableMapping for python >= 3.10
Python
mit
udger/udger-python
--- +++ @@ -1,5 +1,5 @@ from .parser import Udger -__version__ = '4.0.1' +__version__ = '4.0.2' __all__ = ['Udger']
f2396815912b1698c4969d86d1f4176122489222
taemin/plugin.py
taemin/plugin.py
""" Base class for all taemin plugin """ class TaeminPlugin(object): helper = {} def __init__(self, taemin): self.taemin = taemin def start(self): pass def stop(self): pass def on_join(self, connection): pass def on_pubmsg(self, msg): pass def on_privmsg(self, msg): pass def on_quit(self, user): pass def on_part(self, connection): pass def privmsg(self, chan, msg): """ Send a message to a chan or an user """ if not isinstance(msg, str): msg = msg.decode("utf-8") if chan in self.taemin.chans: self.taemin.create_pub_message(self.taemin.name, chan, msg) else: self.taemin.create_priv_message(self.taemin.name, chan, msg) self.taemin.connection.privmsg(chan, msg)
""" Base class for all taemin plugin """ import itertools MAX_MSG_LENGTH = 400 class TaeminPlugin(object): helper = {} def __init__(self, taemin): self.taemin = taemin def start(self): pass def stop(self): pass def on_join(self, connection): pass def on_pubmsg(self, msg): pass def on_privmsg(self, msg): pass def on_quit(self, user): pass def on_part(self, connection): pass def privmsg(self, chan, msg): """ Send a message to a chan or an user """ if not msg: return if not isinstance(msg, str): msg = msg.decode("utf-8") for m in ("".join(itertools.takewhile(lambda x: x, a)) for a in itertools.zip_longest(*([iter(msg)] * MAX_MSG_LENGTH))): print(m) if chan in self.taemin.chans: self.taemin.create_pub_message(self.taemin.name, chan, m) else: self.taemin.create_priv_message(self.taemin.name, chan, m) self.taemin.connection.privmsg(chan, m)
Split privmsg if their are too long
Split privmsg if their are too long
Python
mit
ningirsu/taemin,ningirsu/taemin
--- +++ @@ -1,4 +1,8 @@ """ Base class for all taemin plugin """ + +import itertools + +MAX_MSG_LENGTH = 400 class TaeminPlugin(object): helper = {} @@ -30,12 +34,17 @@ def privmsg(self, chan, msg): """ Send a message to a chan or an user """ + if not msg: + return + if not isinstance(msg, str): msg = msg.decode("utf-8") - if chan in self.taemin.chans: - self.taemin.create_pub_message(self.taemin.name, chan, msg) - else: - self.taemin.create_priv_message(self.taemin.name, chan, msg) + for m in ("".join(itertools.takewhile(lambda x: x, a)) for a in itertools.zip_longest(*([iter(msg)] * MAX_MSG_LENGTH))): + print(m) + if chan in self.taemin.chans: + self.taemin.create_pub_message(self.taemin.name, chan, m) + else: + self.taemin.create_priv_message(self.taemin.name, chan, m) - self.taemin.connection.privmsg(chan, msg) + self.taemin.connection.privmsg(chan, m)
6c93bfc862ceb598747531dc5aef4f9445162e68
src/config/api-server/setup.py
src/config/api-server/setup.py
# # Copyright (c) 2013 Juniper Networks, Inc. All rights reserved. # from setuptools import setup setup( name='vnc_cfg_api_server', version='0.1dev', packages=[ 'vnc_cfg_api_server', 'vnc_cfg_api_server.gen', ], package_data={'': ['*.html', '*.css', '*.xml']}, zip_safe=False, long_description="VNC Configuration API Server Implementation", install_requires=[ 'lxml>=2.3.2', 'gevent==0.13.6', 'geventhttpclient==1.0a', 'pycassa>=1.7.2', 'netaddr>=0.7.5', 'bitarray==0.8.0', 'psutil==0.4.1', ], entry_points = { 'console_scripts' : [ 'contrail-api = vnc_cfg_api_server.vnc_cfg_api_server:server_main', ], }, )
# # Copyright (c) 2013 Juniper Networks, Inc. All rights reserved. # from setuptools import setup setup( name='vnc_cfg_api_server', version='0.1dev', packages=[ 'vnc_cfg_api_server', 'vnc_cfg_api_server.gen', ], package_data={'': ['*.html', '*.css', '*.xml']}, zip_safe=False, long_description="VNC Configuration API Server Implementation", install_requires=[ 'lxml>=2.3.2', 'gevent==0.13.6', 'geventhttpclient>=1.0a', 'pycassa>=1.7.2', 'netaddr>=0.7.5', 'bitarray==0.8.0', 'psutil==0.4.1', ], entry_points = { 'console_scripts' : [ 'contrail-api = vnc_cfg_api_server.vnc_cfg_api_server:server_main', ], }, )
Remove strong dependency to geventhttpclient>=1.0a
[geventhttpclient] Remove strong dependency to geventhttpclient>=1.0a We can remove the strong dependency on 1.0a, Deepinder Setia manage this fix in https://bugs.launchpad.net/opencontrail/+bug/1306715 Refs: http://lists.opencontrail.org/pipermail/dev_lists.opencontrail.org/2014-April/000930.html And already merged in Juniper/contrail-third-party#16 And for packaging Juniper/contrail-packages#31
Python
apache-2.0
reiaaoyama/contrail-controller,nischalsheth/contrail-controller,rombie/contrail-controller,cloudwatt/contrail-controller,sajuptpm/contrail-controller,tcpcloud/contrail-controller,cloudwatt/contrail-controller,Juniper/contrail-dev-controller,DreamLab/contrail-controller,DreamLab/contrail-controller,srajag/contrail-controller,DreamLab/contrail-controller,eonpatapon/contrail-controller,eonpatapon/contrail-controller,vmahuli/contrail-controller,nischalsheth/contrail-controller,codilime/contrail-controller,codilime/contrail-controller,srajag/contrail-controller,rombie/contrail-controller,vmahuli/contrail-controller,sajuptpm/contrail-controller,nischalsheth/contrail-controller,srajag/contrail-controller,codilime/contrail-controller,facetothefate/contrail-controller,varunarya10/contrail-controller,vpramo/contrail-controller,numansiddique/contrail-controller,sajuptpm/contrail-controller,tcpcloud/contrail-controller,hthompson6/contrail-controller,vpramo/contrail-controller,eonpatapon/contrail-controller,varunarya10/contrail-controller,hthompson6/contrail-controller,vpramo/contrail-controller,nischalsheth/contrail-controller,tcpcloud/contrail-controller,Juniper/contrail-dev-controller,sajuptpm/contrail-controller,hthompson6/contrail-controller,eonpatapon/contrail-controller,eonpatapon/contrail-controller,srajag/contrail-controller,rombie/contrail-controller,sajuptpm/contrail-controller,varunarya10/contrail-controller,rombie/contrail-controller,numansiddique/contrail-controller,reiaaoyama/contrail-controller,DreamLab/contrail-controller,eonpatapon/contrail-controller,rombie/contrail-controller,hthompson6/contrail-controller,tcpcloud/contrail-controller,vmahuli/contrail-controller,varunarya10/contrail-controller,codilime/contrail-controller,sajuptpm/contrail-controller,DreamLab/contrail-controller,vmahuli/contrail-controller,hthompson6/contrail-controller,vpramo/contrail-controller,nischalsheth/contrail-controller,numansiddique/contrail-controller,cloudwatt/contrail-controller,srajag/contrail-controller,reiaaoyama/contrail-controller,tcpcloud/contrail-controller,varunarya10/contrail-controller,Juniper/contrail-dev-controller,nischalsheth/contrail-controller,facetothefate/contrail-controller,facetothefate/contrail-controller,cloudwatt/contrail-controller,vpramo/contrail-controller,numansiddique/contrail-controller,facetothefate/contrail-controller,eonpatapon/contrail-controller,codilime/contrail-controller,codilime/contrail-controller,facetothefate/contrail-controller,rombie/contrail-controller,numansiddique/contrail-controller,Juniper/contrail-dev-controller,nischalsheth/contrail-controller,cloudwatt/contrail-controller,reiaaoyama/contrail-controller,vmahuli/contrail-controller,Juniper/contrail-dev-controller,nischalsheth/contrail-controller,rombie/contrail-controller,tcpcloud/contrail-controller,reiaaoyama/contrail-controller
--- +++ @@ -16,7 +16,7 @@ install_requires=[ 'lxml>=2.3.2', 'gevent==0.13.6', - 'geventhttpclient==1.0a', + 'geventhttpclient>=1.0a', 'pycassa>=1.7.2', 'netaddr>=0.7.5', 'bitarray==0.8.0',
c52a39b8a89e1fc8bfe607d2bfa92970d7ae17ad
evelink/parsing/assets.py
evelink/parsing/assets.py
from evelink import api from evelink import constants def parse_assets(api_result): def handle_rowset(rowset, parent_location): results = [] for row in rowset.findall('row'): item = {'id': int(row.attrib['itemID']), 'item_type_id': int(row.attrib['typeID']), 'location_id': int(row.attrib.get('locationID', parent_location)), 'location_flag': int(row.attrib['flag']), 'quantity': int(row.attrib['quantity']), 'packaged': row.attrib['singleton'] == '0', } contents = row.find('rowset') if contents: item['contents'] = handle_rowset(contents, item['location_id']) results.append(item) return results result_list = handle_rowset(api_result.find('rowset'), None) # For convenience, key the result by top-level location ID. result_dict = {} for item in result_list: location = item['location_id'] result_dict.setdefault(location, {}) result_dict[location]['location_id'] = location result_dict[location].setdefault('contents', []) result_dict[location]['contents'].append(item) return result_dict
from evelink import api from evelink import constants def parse_assets(api_result): def handle_rowset(rowset, parent_location): results = [] for row in rowset.findall('row'): item = {'id': int(row.attrib['itemID']), 'item_type_id': int(row.attrib['typeID']), 'location_id': int(row.attrib.get('locationID', parent_location)), 'location_flag': int(row.attrib['flag']), 'quantity': int(row.attrib['quantity']), 'packaged': row.attrib['singleton'] == '0', } contents = row.find('rowset') if contents is not None: item['contents'] = handle_rowset(contents, item['location_id']) results.append(item) return results result_list = handle_rowset(api_result.find('rowset'), None) # For convenience, key the result by top-level location ID. result_dict = {} for item in result_list: location = item['location_id'] result_dict.setdefault(location, {}) result_dict[location]['location_id'] = location result_dict[location].setdefault('contents', []) result_dict[location]['contents'].append(item) return result_dict
Fix test involving Element object
Fix test involving Element object
Python
mit
zigdon/evelink,FashtimeDotCom/evelink,Morloth1274/EVE-Online-POCO-manager,ayust/evelink,bastianh/evelink
--- +++ @@ -13,7 +13,7 @@ 'packaged': row.attrib['singleton'] == '0', } contents = row.find('rowset') - if contents: + if contents is not None: item['contents'] = handle_rowset(contents, item['location_id']) results.append(item) return results
07999d1f24acbbfde50fe94897054e7c8df7fea1
api/jsonstore.py
api/jsonstore.py
import json import os import tempfile def store(data, directory="/var/www/luke/wikipedia/graphs/"): try: json.loads(data) except ValueError: return "not-json" tf = tempfile.mkstemp(prefix="", dir=directory)[1] with open(tf, "w") as f: f.write(data) return tf if __name__ == "__main__": print(store('{}'))
import json import os import tempfile def store(data, directory="/var/www/luke/wikipedia/graphs/"): try: json.loads(data) except ValueError: return "not-json" tf = tempfile.mkstemp(prefix="", dir=directory)[1] with open(tf, "w") as f: f.write(data) return os.path.split(tf)[1] if __name__ == "__main__": print(store('{}'))
Tweak JSON api return value to be friendlier
Tweak JSON api return value to be friendlier
Python
mit
controversial/wikipedia-map,controversial/wikipedia-map,controversial/wikipedia-map
--- +++ @@ -13,7 +13,7 @@ with open(tf, "w") as f: f.write(data) - return tf + return os.path.split(tf)[1] if __name__ == "__main__": print(store('{}'))
56e3225329d2f7fae37139ec1d6727784718d339
test_portend.py
test_portend.py
import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = None port = portend.find_available_local_port() family = socket.AF_UNSPEC socktype = socket.SOCK_STREAM return socket.getaddrinfo(host, port, family, socktype) def id_for_info(info): af, = info[:1] return str(af) def build_addr_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_addr_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() @pytest.fixture(**build_addr_infos()) def nonlistening_addr(request): af, socktype, proto, canonname, sa = request.param return sa class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) def test_check_port_nonlistening(self, nonlistening_addr): portend._check_port(*nonlistening_addr[:2])
import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = None # all available interfaces port = portend.find_available_local_port() family = socket.AF_UNSPEC socktype = socket.SOCK_STREAM return socket.getaddrinfo(host, port, family, socktype) def id_for_info(info): af, = info[:1] return str(af) def build_addr_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_addr_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() @pytest.fixture(**build_addr_infos()) def nonlistening_addr(request): af, socktype, proto, canonname, sa = request.param return sa class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) def test_check_port_nonlistening(self, nonlistening_addr): portend._check_port(*nonlistening_addr[:2])
Add indication of what None means
Add indication of what None means
Python
mit
jaraco/portend
--- +++ @@ -9,7 +9,7 @@ """ Generate addr infos for connections to localhost """ - host = None + host = None # all available interfaces port = portend.find_available_local_port() family = socket.AF_UNSPEC socktype = socket.SOCK_STREAM
6bb3321c0a2e4221d08f39e46e1d21220361cdc6
shuup_tests/api/conftest.py
shuup_tests/api/conftest.py
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2018, Shuup Inc. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. from django.conf import settings def pytest_runtest_setup(item): settings.INSTALLED_APPS = [app for app in settings.INSTALLED_APPS if "shuup.front" not in app]
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2018, Shuup Inc. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. from django.conf import settings ORIGINAL_SETTINGS = [] def pytest_runtest_setup(item): global ORIGINAL_SETTINGS ORIGINAL_SETTINGS = [item for item in settings.INSTALLED_APPS] settings.INSTALLED_APPS = [app for app in settings.INSTALLED_APPS if "shuup.front" not in app] def pytest_runtest_teardown(item): settings.INSTALLED_APPS = [item for item in ORIGINAL_SETTINGS]
Fix unit test by adding back front apps after API tests
Fix unit test by adding back front apps after API tests
Python
agpl-3.0
shoopio/shoop,shoopio/shoop,shoopio/shoop
--- +++ @@ -8,5 +8,14 @@ from django.conf import settings +ORIGINAL_SETTINGS = [] + + def pytest_runtest_setup(item): + global ORIGINAL_SETTINGS + ORIGINAL_SETTINGS = [item for item in settings.INSTALLED_APPS] settings.INSTALLED_APPS = [app for app in settings.INSTALLED_APPS if "shuup.front" not in app] + + +def pytest_runtest_teardown(item): + settings.INSTALLED_APPS = [item for item in ORIGINAL_SETTINGS]
de4df4feb7f38577bb3db8852610398ecc238870
stella/llvm.py
stella/llvm.py
from llvm import * from llvm.core import * from llvm.ee import * import logging tp_int = Type.int() tp_float = Type.float() def py_type_to_llvm(tp): if tp == int: return tp_int elif tp == float: return tp_float else: raise TypeError("Unknown type " + tp) def get_generic_value(tp, val): if type(val) == int: return GenericValue.int(tp, val) elif type(val) == float: return GenericValue.real(tp, val) def llvm_to_py(tp, val): if tp == int: return val.as_int() elif tp == float: return val.as_real(py_type_to_llvm(tp)) else: raise Exception ("Unknown type {0}".format(tp))
from llvm import * from llvm.core import * from llvm.ee import * import logging tp_int = Type.int(64) tp_float = Type.float() def py_type_to_llvm(tp): if tp == int: return tp_int elif tp == float: return tp_float else: raise TypeError("Unknown type " + tp) def get_generic_value(tp, val): if type(val) == int: return GenericValue.int(tp, val) elif type(val) == float: return GenericValue.real(tp, val) def llvm_to_py(tp, val): if tp == int: return val.as_int_signed() elif tp == float: return val.as_real(py_type_to_llvm(tp)) else: raise Exception ("Unknown type {0}".format(tp))
Change types to get the tests to complete.
Change types to get the tests to complete.
Python
apache-2.0
squisher/stella,squisher/stella,squisher/stella,squisher/stella
--- +++ @@ -4,7 +4,7 @@ import logging -tp_int = Type.int() +tp_int = Type.int(64) tp_float = Type.float() def py_type_to_llvm(tp): if tp == int: @@ -22,7 +22,7 @@ def llvm_to_py(tp, val): if tp == int: - return val.as_int() + return val.as_int_signed() elif tp == float: return val.as_real(py_type_to_llvm(tp)) else:
46ea832db6db8a98c5b9f5a58a37bfed16a27a10
app/actions/peptable/base.py
app/actions/peptable/base.py
from app.dataformats import peptable as peptabledata from app.dataformats import mzidtsv as psmtsvdata def add_peptide(allpeps, psm, scorecol=False, fncol=None, new=False, track_psms=True): peptide = {'score': psm[scorecol], 'line': psm, 'psms': [] } if track_psms: if not new: peptide['psms'] = allpeps[psm[peptabledata.HEADER_PEPTIDE]]['psms'] peptide['psms'].append('{0}_{1}'.format(psm[fncol], psm[psmtsvdata.HEADER_SCANNR])) allpeps[psm[peptabledata.HEADER_PEPTIDE]] = peptide def evaluate_peptide(peptides, psm, key, higherbetter, scorecol, fncol=None, track_psms=True): try: existing_score = peptides[key]['score'] except KeyError: add_peptide(peptides, psm, scorecol, fncol, True, track_psms) else: if higherbetter and psm[scorecol] > existing_score: add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms) elif not higherbetter and psm[scorecol] < existing_score: add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms) return peptides
from app.dataformats import mzidtsv as psmtsvdata def add_peptide(allpeps, psm, key, scorecol=False, fncol=None, new=False, track_psms=True): peptide = {'score': psm[scorecol], 'line': psm, 'psms': [] } if track_psms: if not new: peptide['psms'] = allpeps[key]['psms'] peptide['psms'].append('{0}_{1}'.format(psm[fncol], psm[psmtsvdata.HEADER_SCANNR])) allpeps[key] = peptide def evaluate_peptide(peptides, psm, key, higherbetter, scorecol, fncol=None, track_psms=True): try: existing_score = peptides[key]['score'] except KeyError: add_peptide(peptides, psm, key, scorecol, fncol, True, track_psms) else: if higherbetter and psm[scorecol] > existing_score: add_peptide(peptides, psm, key, scorecol, fncol, track_psms=track_psms) elif not higherbetter and psm[scorecol] < existing_score: add_peptide(peptides, psm, key, scorecol, fncol, track_psms=track_psms) return peptides
Use input param key instead of using HEADER field
Use input param key instead of using HEADER field
Python
mit
glormph/msstitch
--- +++ @@ -1,8 +1,7 @@ -from app.dataformats import peptable as peptabledata from app.dataformats import mzidtsv as psmtsvdata -def add_peptide(allpeps, psm, scorecol=False, fncol=None, new=False, +def add_peptide(allpeps, psm, key, scorecol=False, fncol=None, new=False, track_psms=True): peptide = {'score': psm[scorecol], 'line': psm, @@ -10,10 +9,10 @@ } if track_psms: if not new: - peptide['psms'] = allpeps[psm[peptabledata.HEADER_PEPTIDE]]['psms'] + peptide['psms'] = allpeps[key]['psms'] peptide['psms'].append('{0}_{1}'.format(psm[fncol], psm[psmtsvdata.HEADER_SCANNR])) - allpeps[psm[peptabledata.HEADER_PEPTIDE]] = peptide + allpeps[key] = peptide def evaluate_peptide(peptides, psm, key, higherbetter, scorecol, fncol=None, @@ -21,10 +20,12 @@ try: existing_score = peptides[key]['score'] except KeyError: - add_peptide(peptides, psm, scorecol, fncol, True, track_psms) + add_peptide(peptides, psm, key, scorecol, fncol, True, track_psms) else: if higherbetter and psm[scorecol] > existing_score: - add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms) + add_peptide(peptides, psm, key, scorecol, fncol, + track_psms=track_psms) elif not higherbetter and psm[scorecol] < existing_score: - add_peptide(peptides, psm, scorecol, fncol, track_psms=track_psms) + add_peptide(peptides, psm, key, scorecol, fncol, + track_psms=track_psms) return peptides
02f7edc042b46f091663fc12451aa043106f4f38
correctiv_justizgelder/urls.py
correctiv_justizgelder/urls.py
from functools import wraps from django.conf.urls import patterns, url from django.utils.translation import ugettext_lazy as _ from django.views.decorators.cache import cache_page from .views import OrganisationSearchView, OrganisationDetail CACHE_TIME = 15 * 60 def c(view): @wraps(view) def cache_page_anonymous(request, *args, **kwargs): if request.user.is_authenticated(): return view(request, *args, **kwargs) return cache_page(CACHE_TIME)(view)(request, *args, **kwargs) return cache_page_anonymous urlpatterns = patterns('', url(r'^$', c(OrganisationSearchView.as_view()), name='search'), url(_(r'^recipient/(?P<slug>[^/]+)/$'), c(OrganisationDetail.as_view()), name='organisation_detail'), )
from functools import wraps from django.conf.urls import url from django.utils.translation import ugettext_lazy as _ from django.views.decorators.cache import cache_page from .views import OrganisationSearchView, OrganisationDetail CACHE_TIME = 15 * 60 def c(view): @wraps(view) def cache_page_anonymous(request, *args, **kwargs): if request.user.is_authenticated(): return view(request, *args, **kwargs) return cache_page(CACHE_TIME)(view)(request, *args, **kwargs) return cache_page_anonymous urlpatterns = [ url(r'^$', c(OrganisationSearchView.as_view()), name='search'), url(_(r'^recipient/(?P<slug>[^/]+)/$'), c(OrganisationDetail.as_view()), name='organisation_detail'), ]
Update urlpatterns and remove old patterns pattern
Update urlpatterns and remove old patterns pattern
Python
mit
correctiv/correctiv-justizgelder,correctiv/correctiv-justizgelder
--- +++ @@ -1,6 +1,6 @@ from functools import wraps -from django.conf.urls import patterns, url +from django.conf.urls import url from django.utils.translation import ugettext_lazy as _ from django.views.decorators.cache import cache_page @@ -18,9 +18,9 @@ return cache_page_anonymous -urlpatterns = patterns('', +urlpatterns = [ url(r'^$', c(OrganisationSearchView.as_view()), name='search'), url(_(r'^recipient/(?P<slug>[^/]+)/$'), c(OrganisationDetail.as_view()), name='organisation_detail'), -) +]
db9703ef5cb277e4556d94503c581cbdf46a8419
api/addons/serializers.py
api/addons/serializers.py
from rest_framework import serializers as ser from api.base.serializers import JSONAPISerializer, LinksField from api.base.utils import absolute_reverse class NodeAddonFolderSerializer(JSONAPISerializer): """ Overrides AddonSettingsSerializer to return node-specific fields """ class Meta: type_ = 'node_addon_folders' id = ser.CharField(source='provider', read_only=True) kind = ser.CharField(default='folder', read_only=True) name = ser.CharField(read_only=True) path = ser.CharField(read_only=True) folder_id = ser.CharField(read_only=True) links = LinksField({ 'children': 'get_child_folders' }) def get_child_folders(self, obj): node_id = self.context['request'].parser_context['kwargs']['node_id'] addon_name = self.context['request'].parser_context['kwargs']['provider'] return absolute_reverse( 'nodes:node-addon-folders', kwargs={ 'node_id': node_id, 'provider': addon_name }, query_kwargs={ 'path': obj['path'], 'folder_id': obj['folder_id'] } )
from rest_framework import serializers as ser from api.base.serializers import JSONAPISerializer, LinksField from api.base.utils import absolute_reverse class NodeAddonFolderSerializer(JSONAPISerializer): class Meta: type_ = 'node_addon_folders' id = ser.CharField(source='provider', read_only=True) kind = ser.CharField(default='folder', read_only=True) name = ser.CharField(read_only=True) path = ser.CharField(read_only=True) folder_id = ser.CharField(read_only=True) links = LinksField({ 'children': 'get_child_folders' }) def get_child_folders(self, obj): node_id = self.context['request'].parser_context['kwargs']['node_id'] addon_name = self.context['request'].parser_context['kwargs']['provider'] return absolute_reverse( 'nodes:node-addon-folders', kwargs={ 'node_id': node_id, 'provider': addon_name }, query_kwargs={ 'path': obj['path'], 'folder_id': obj['folder_id'] } )
Remove other docstring of lies
Remove other docstring of lies
Python
apache-2.0
chennan47/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,saradbowman/osf.io,chrisseto/osf.io,cwisecarver/osf.io,acshi/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,mluo613/osf.io,adlius/osf.io,sloria/osf.io,icereval/osf.io,adlius/osf.io,SSJohns/osf.io,laurenrevere/osf.io,chennan47/osf.io,adlius/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,saradbowman/osf.io,cslzchen/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,hmoco/osf.io,samchrisinger/osf.io,DanielSBrown/osf.io,cwisecarver/osf.io,erinspace/osf.io,emetsger/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,rdhyee/osf.io,SSJohns/osf.io,hmoco/osf.io,aaxelb/osf.io,mluke93/osf.io,emetsger/osf.io,mfraezz/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,wearpants/osf.io,wearpants/osf.io,samchrisinger/osf.io,laurenrevere/osf.io,sloria/osf.io,DanielSBrown/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,alexschiller/osf.io,felliott/osf.io,HalcyonChimera/osf.io,icereval/osf.io,felliott/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,leb2dg/osf.io,baylee-d/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,amyshi188/osf.io,amyshi188/osf.io,mattclark/osf.io,caseyrollins/osf.io,mluo613/osf.io,samchrisinger/osf.io,rdhyee/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,felliott/osf.io,alexschiller/osf.io,rdhyee/osf.io,acshi/osf.io,acshi/osf.io,mattclark/osf.io,acshi/osf.io,sloria/osf.io,aaxelb/osf.io,caseyrollins/osf.io,leb2dg/osf.io,Nesiehr/osf.io,hmoco/osf.io,crcresearch/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,mattclark/osf.io,mfraezz/osf.io,felliott/osf.io,laurenrevere/osf.io,caneruguz/osf.io,amyshi188/osf.io,baylee-d/osf.io,aaxelb/osf.io,mluo613/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,alexschiller/osf.io,baylee-d/osf.io,rdhyee/osf.io,binoculars/osf.io,wearpants/osf.io,caseyrollins/osf.io,TomBaxter/osf.io,leb2dg/osf.io,emetsger/osf.io,icereval/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,mfraezz/osf.io,adlius/osf.io,crcresearch/osf.io,cslzchen/osf.io,erinspace/osf.io,samchrisinger/osf.io,binoculars/osf.io,chrisseto/osf.io,mluo613/osf.io,mluke93/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,leb2dg/osf.io,mluke93/osf.io,acshi/osf.io,wearpants/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,mluke93/osf.io,cwisecarver/osf.io,pattisdr/osf.io,DanielSBrown/osf.io,mluo613/osf.io,erinspace/osf.io,chrisseto/osf.io,SSJohns/osf.io,emetsger/osf.io
--- +++ @@ -3,9 +3,6 @@ from api.base.utils import absolute_reverse class NodeAddonFolderSerializer(JSONAPISerializer): - """ - Overrides AddonSettingsSerializer to return node-specific fields - """ class Meta: type_ = 'node_addon_folders'
f64582d7b254e5b4861a0d06ea40f9e608e3cc30
modules/urlparser/twitter.py
modules/urlparser/twitter.py
import re import urllib2 import traceback try: import simplejson as json except ImportError: import json class Twitter(object): """Checks incoming messages for Twitter urls and calls the Twitter API to retrieve the tweet. TODO: Implement commands for Twitter functionality """ pattern = re.compile("http(s|)://(www\.|)twitter.com/(?:#!/|)[^/]+/status/([0-9]+)") def __init__(self, *args, **kwargs): """Constructor.""" pass def handle(self, match, **kwargs): try: data = self.fetch (match.group(3)) return '\x02%s\x02 tweets "%s"' % (data['user']['name'], ''.join(data['text'].splitlines())) except: print "Problem fetching tweet" print traceback.print_exc() def fetch(self, status_id): """Use Twitter's REST API to fetch a status.""" api_url = 'http://api.twitter.com/1/statuses/show.json?id=%s&include_entities=true' % (status_id) req = urllib2.Request(api_url) response = urllib2.urlopen(req) page = response.read() decoded = json.loads(page) return decoded
import re import urllib2 import traceback try: import simplejson as json except ImportError: import json class Twitter(object): """Checks incoming messages for Twitter urls and calls the Twitter API to retrieve the tweet. TODO: Implement commands for Twitter functionality """ pattern = re.compile("http(?:s|)://(?:mobile\.|)(?:www\.|)twitter.com/(?:#!/|)[^/]+/status/([0-9]+)") def __init__(self, *args, **kwargs): """Constructor.""" pass def handle(self, match, **kwargs): try: data = self.fetch (match.group(1)) return '\x02%s\x02 tweets "%s"' % (data['user']['name'], ''.join(data['text'].splitlines())) except: print "Problem fetching tweet" print traceback.print_exc() def fetch(self, status_id): """Use Twitter's REST API to fetch a status.""" api_url = 'http://api.twitter.com/1/statuses/show.json?id=%s&include_entities=true' % (status_id) req = urllib2.Request(api_url) response = urllib2.urlopen(req) page = response.read() decoded = json.loads(page) return decoded
Change regex, added some non-catching groups
Change regex, added some non-catching groups
Python
mit
billyvg/piebot
--- +++ @@ -15,7 +15,7 @@ Implement commands for Twitter functionality """ - pattern = re.compile("http(s|)://(www\.|)twitter.com/(?:#!/|)[^/]+/status/([0-9]+)") + pattern = re.compile("http(?:s|)://(?:mobile\.|)(?:www\.|)twitter.com/(?:#!/|)[^/]+/status/([0-9]+)") def __init__(self, *args, **kwargs): """Constructor.""" @@ -23,7 +23,7 @@ def handle(self, match, **kwargs): try: - data = self.fetch (match.group(3)) + data = self.fetch (match.group(1)) return '\x02%s\x02 tweets "%s"' % (data['user']['name'], ''.join(data['text'].splitlines())) except: print "Problem fetching tweet"