commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
dea1d0d957cdfc07a561b42125f49d0c1e1c4da6
bingmaps/urlschema/location_by_point_schema.py
bingmaps/urlschema/location_by_point_schema.py
from .location_schema import Location from marshmallow import Schema, fields, post_dump from .location_api_url import LocationUrl class LocationByPointQueryString(Schema): point = fields.Str() includeEntityTypes = fields.Str() includeNeighborhood = fields.Int( default=1 ) include = fields.Str( default='ciso2' ) o = fields.Str() key = fields.Str( required=True, error_messages={'required': 'Please provide a key'} ) class Meta: fields = ('point', 'includeEntityTypes', 'includeNeighborhood', 'include', 'o', 'key') ordered = True @post_dump def build_query_string(self, data): queryValues = [] for key, value in data.items(): if not key == 'point': queryValues.append('{0}={1}'.format(key, value)) queryString = '&'.join(queryValues) return '{0}?{1}'.format(data['point'], queryString) class LocationByPointSchema(Location, Schema): queryParameters = fields.Nested( LocationByPointQueryString ) class Meta: fields = ('version', 'restApi', 'resourcePath', 'queryParameters') ordered = True class LocationByPointUrl(LocationUrl): def __init__(self, data, protocol): schema = LocationByPointSchema() super().__init__(data, protocol, schema) @property def query(self): return self._schema_dict['queryParameters']
from .location_schema import Location from marshmallow import Schema, fields, post_dump from .location_api_url import LocationUrl class LocationByPointQueryString(Schema): point = fields.Str() includeEntityTypes = fields.Str() includeNeighborhood = fields.Int( default=0 ) include = fields.Str( default='ciso2' ) o = fields.Str() key = fields.Str( required=True, error_messages={'required': 'Please provide a key'} ) class Meta: fields = ('point', 'includeEntityTypes', 'includeNeighborhood', 'include', 'o', 'key') ordered = True @post_dump def build_query_string(self, data): queryValues = [] for key, value in data.items(): if not key == 'point': queryValues.append('{0}={1}'.format(key, value)) queryString = '&'.join(queryValues) return '{0}?{1}'.format(data['point'], queryString) class LocationByPointSchema(Location, Schema): queryParameters = fields.Nested( LocationByPointQueryString ) class Meta: fields = ('version', 'restApi', 'resourcePath', 'queryParameters') ordered = True class LocationByPointUrl(LocationUrl): def __init__(self, data, protocol): schema = LocationByPointSchema() super().__init__(data, protocol, schema) @property def query(self): return self._schema_dict['queryParameters']
Change includeNeighborhood default value to 0
Change includeNeighborhood default value to 0
Python
mit
bharadwajyarlagadda/bingmaps
--- +++ @@ -7,7 +7,7 @@ point = fields.Str() includeEntityTypes = fields.Str() includeNeighborhood = fields.Int( - default=1 + default=0 ) include = fields.Str( default='ciso2'
3816063967e03bc7b0cd3b7c95e74291ced04138
tools/hash_funcs.py
tools/hash_funcs.py
""" A collection of utilities to see if new ReST files need to be automatically generated from certain files in the project (examples, datasets). """ def get_hash(f): """ Gets hexadmecimal md5 hash of a string """ import hashlib m = hashlib.md5() m.update(f) return m.hexdigest() def update_hash_dict(filehash, filename): """ Opens the pickled hash dictionary, adds an entry, and dumps it back. """ try: with open(root+'/hash_dict.pickle','r') as f: hash_dict = pickle.load(f) except: hash_dict = {} hash_dict.update({filename : filehash}) with open(os.path.join(file_path,'hash_dict.pickle','w')) as f: pickle.dump(hash_dict, f) def check_hash(rawfile, filename): """ Returns True if hash does not match the previous one. """ try: with open(root+'/hash_dict.pickle','r') as f: hash_dict = pickle.load(f) except: hash_dict = {} try: checkhash = hash_dict[filename] except: checkhash = None filehash = get_hash(rawfile) if filehash == checkhash: return False, None return True, filehash
""" A collection of utilities to see if new ReST files need to be automatically generated from certain files in the project (examples, datasets). """ import os import pickle file_path = os.path.dirname(__file__) def get_hash(f): """ Gets hexadmecimal md5 hash of a string """ import hashlib m = hashlib.md5() m.update(f) return m.hexdigest() def update_hash_dict(filehash, filename): """ Opens the pickled hash dictionary, adds an entry, and dumps it back. """ try: with open(file_path+'/hash_dict.pickle','r') as f: hash_dict = pickle.load(f) except IOError as err: hash_dict = {} hash_dict.update({filename : filehash}) with open(os.path.join(file_path,'hash_dict.pickle'),'w') as f: pickle.dump(hash_dict, f) def check_hash(rawfile, filename): """ Returns True if hash does not match the previous one. """ try: with open(file_path+'/hash_dict.pickle','r') as f: hash_dict = pickle.load(f) except IOError as err: hash_dict = {} try: checkhash = hash_dict[filename] except: checkhash = None filehash = get_hash(rawfile) if filehash == checkhash: return False, None return True, filehash
Fix directory in hash funcs.
ENH: Fix directory in hash funcs.
Python
bsd-3-clause
musically-ut/statsmodels,bavardage/statsmodels,nguyentu1602/statsmodels,kiyoto/statsmodels,detrout/debian-statsmodels,adammenges/statsmodels,kiyoto/statsmodels,adammenges/statsmodels,edhuckle/statsmodels,edhuckle/statsmodels,yarikoptic/pystatsmodels,kiyoto/statsmodels,rgommers/statsmodels,jstoxrocky/statsmodels,waynenilsen/statsmodels,DonBeo/statsmodels,musically-ut/statsmodels,adammenges/statsmodels,wkfwkf/statsmodels,ChadFulton/statsmodels,bavardage/statsmodels,saketkc/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,bert9bert/statsmodels,bert9bert/statsmodels,cbmoore/statsmodels,astocko/statsmodels,bsipocz/statsmodels,bzero/statsmodels,statsmodels/statsmodels,Averroes/statsmodels,wwf5067/statsmodels,phobson/statsmodels,hainm/statsmodels,rgommers/statsmodels,phobson/statsmodels,wzbozon/statsmodels,YihaoLu/statsmodels,bavardage/statsmodels,wdurhamh/statsmodels,detrout/debian-statsmodels,detrout/debian-statsmodels,gef756/statsmodels,jseabold/statsmodels,phobson/statsmodels,rgommers/statsmodels,bsipocz/statsmodels,saketkc/statsmodels,wkfwkf/statsmodels,jstoxrocky/statsmodels,gef756/statsmodels,wdurhamh/statsmodels,astocko/statsmodels,waynenilsen/statsmodels,huongttlan/statsmodels,statsmodels/statsmodels,hlin117/statsmodels,bert9bert/statsmodels,jseabold/statsmodels,josef-pkt/statsmodels,jseabold/statsmodels,pprett/statsmodels,yarikoptic/pystatsmodels,jseabold/statsmodels,phobson/statsmodels,yl565/statsmodels,huongttlan/statsmodels,wkfwkf/statsmodels,saketkc/statsmodels,hainm/statsmodels,nguyentu1602/statsmodels,bashtage/statsmodels,bavardage/statsmodels,josef-pkt/statsmodels,pprett/statsmodels,rgommers/statsmodels,hlin117/statsmodels,josef-pkt/statsmodels,bzero/statsmodels,wdurhamh/statsmodels,cbmoore/statsmodels,hainm/statsmodels,rgommers/statsmodels,wwf5067/statsmodels,nvoron23/statsmodels,bzero/statsmodels,yl565/statsmodels,musically-ut/statsmodels,cbmoore/statsmodels,gef756/statsmodels,DonBeo/statsmodels,yl565/statsmodels,YihaoLu/statsmodels,detrout/debian-statsmodels,hainm/statsmodels,gef756/statsmodels,YihaoLu/statsmodels,bashtage/statsmodels,bashtage/statsmodels,alekz112/statsmodels,statsmodels/statsmodels,yarikoptic/pystatsmodels,josef-pkt/statsmodels,hlin117/statsmodels,wdurhamh/statsmodels,Averroes/statsmodels,kiyoto/statsmodels,bashtage/statsmodels,huongttlan/statsmodels,nvoron23/statsmodels,bashtage/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,Averroes/statsmodels,jstoxrocky/statsmodels,wkfwkf/statsmodels,wzbozon/statsmodels,bsipocz/statsmodels,bzero/statsmodels,YihaoLu/statsmodels,astocko/statsmodels,adammenges/statsmodels,bzero/statsmodels,astocko/statsmodels,nvoron23/statsmodels,bavardage/statsmodels,bert9bert/statsmodels,alekz112/statsmodels,bashtage/statsmodels,ChadFulton/statsmodels,waynenilsen/statsmodels,jstoxrocky/statsmodels,cbmoore/statsmodels,wzbozon/statsmodels,hlin117/statsmodels,edhuckle/statsmodels,saketkc/statsmodels,pprett/statsmodels,ChadFulton/statsmodels,Averroes/statsmodels,ChadFulton/statsmodels,kiyoto/statsmodels,alekz112/statsmodels,wwf5067/statsmodels,bert9bert/statsmodels,DonBeo/statsmodels,pprett/statsmodels,phobson/statsmodels,DonBeo/statsmodels,nguyentu1602/statsmodels,wzbozon/statsmodels,wkfwkf/statsmodels,nguyentu1602/statsmodels,edhuckle/statsmodels,waynenilsen/statsmodels,musically-ut/statsmodels,wzbozon/statsmodels,YihaoLu/statsmodels,yl565/statsmodels,cbmoore/statsmodels,statsmodels/statsmodels,gef756/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,alekz112/statsmodels,saketkc/statsmodels,wwf5067/statsmodels,huongttlan/statsmodels,ChadFulton/statsmodels,nvoron23/statsmodels,nvoron23/statsmodels,wdurhamh/statsmodels,yl565/statsmodels,DonBeo/statsmodels
--- +++ @@ -2,6 +2,10 @@ A collection of utilities to see if new ReST files need to be automatically generated from certain files in the project (examples, datasets). """ +import os +import pickle + +file_path = os.path.dirname(__file__) def get_hash(f): """ @@ -17,12 +21,12 @@ Opens the pickled hash dictionary, adds an entry, and dumps it back. """ try: - with open(root+'/hash_dict.pickle','r') as f: + with open(file_path+'/hash_dict.pickle','r') as f: hash_dict = pickle.load(f) - except: + except IOError as err: hash_dict = {} hash_dict.update({filename : filehash}) - with open(os.path.join(file_path,'hash_dict.pickle','w')) as f: + with open(os.path.join(file_path,'hash_dict.pickle'),'w') as f: pickle.dump(hash_dict, f) def check_hash(rawfile, filename): @@ -30,9 +34,9 @@ Returns True if hash does not match the previous one. """ try: - with open(root+'/hash_dict.pickle','r') as f: + with open(file_path+'/hash_dict.pickle','r') as f: hash_dict = pickle.load(f) - except: + except IOError as err: hash_dict = {} try: checkhash = hash_dict[filename]
10b977303008ee59a5f5c39ccf0156222a5a58c5
test_run.py
test_run.py
# -*- coding: utf-8 -*- """ Created on Wed Sep 23 15:23:58 2015 @author: jensv """ import skin_core_scanner_simple as scss reload(scss) import equil_solver as es reload(es) import newcomb_simple as new reload(new) (lambda_a_mesh, k_a_mesh, stability_maps) = scss.scan_lambda_k_space([0.01, 3.0, 10.], [0.01, 1.5, 10], epsilon=0.11, core_radius_norm=0.9, transition_width_norm=0.033, skin_width_norm=0.034, method='lsoda', max_step=1E-2, nsteps=1000)
# -*- coding: utf-8 -*- """ Created on Wed Sep 23 15:23:58 2015 @author: jensv """ import skin_core_scanner_simple as scss reload(scss) import equil_solver as es reload(es) import newcomb_simple as new reload(new) (lambda_a_mesh, k_a_mesh, stability_maps) = scss.scan_lambda_k_space([0.01, 3.0, 25.], [0.01, 1.5, 25], epsilon=0.11, core_radius_norm=0.9, transition_width_norm=0.033, skin_width_norm=0.034, method='lsoda', max_step=1E-2, nsteps=1000)
Increase lambda-k space to better differentiate bottlenecks from startup cost.
Increase lambda-k space to better differentiate bottlenecks from startup cost.
Python
mit
jensv/fluxtubestability,jensv/fluxtubestability
--- +++ @@ -13,7 +13,7 @@ reload(new) (lambda_a_mesh, k_a_mesh, - stability_maps) = scss.scan_lambda_k_space([0.01, 3.0, 10.], [0.01, 1.5, 10], + stability_maps) = scss.scan_lambda_k_space([0.01, 3.0, 25.], [0.01, 1.5, 25], epsilon=0.11, core_radius_norm=0.9, transition_width_norm=0.033, skin_width_norm=0.034,
d0791ccd79dea2ec30d890ad9060f58d1e8b1c7c
run_tests.py
run_tests.py
import pytest from bs4 import BeautifulSoup as BS pytest.main(['--durations', '10', '--cov-report', 'html']) url = r'htmlcov/index.html' page = open(url) soup = BS(page.read(), features='html5lib') aggregate_total = soup.find_all('tr', {'class': 'total'}) final = None for x in aggregate_total: pct = x.text.replace(' ', '').replace('\n', ' ').split(' ') final = pct[6] with open('test_report.txt', 'w') as report: report.write(final.strip().replace('%', ''))
import pytest from bs4 import BeautifulSoup as BS pytest.main(['--durations', '10', '--cov-report', 'html', '--junit-xml', 'test-reports/results.xml', '--verbose']) url = r'htmlcov/index.html' page = open(url) soup = BS(page.read(), features='html5lib') aggregate_total = soup.find_all('tr', {'class': 'total'}) final = None for x in aggregate_total: pct = x.text.replace(' ', '').replace('\n', ' ').split(' ') final = pct[6] with open('test_report.txt', 'w') as report: report.write(final.strip().replace('%', ''))
Update test file - add flag for reports
Update test file - add flag for reports
Python
mit
misachi/job_match,misachi/job_match,misachi/job_match
--- +++ @@ -1,7 +1,7 @@ import pytest from bs4 import BeautifulSoup as BS -pytest.main(['--durations', '10', '--cov-report', 'html']) +pytest.main(['--durations', '10', '--cov-report', 'html', '--junit-xml', 'test-reports/results.xml', '--verbose']) url = r'htmlcov/index.html' page = open(url) soup = BS(page.read(), features='html5lib')
9e4dc6763fbd0de0f17b4acaa8109a12cdff28d6
orderedmodel/models.py
orderedmodel/models.py
from django.db import models from django.core.exceptions import ValidationError class OrderedModelManager(models.Manager): def swap(self, obj1, obj2): tmp, obj2.order = obj2.order, 0 obj2.save(swapping=True) obj2.order, obj1.order = obj1.order, tmp obj1.save() obj2.save() def max_order(self): try: return self.order_by('-order').values_list('order', flat=True)[0] except IndexError: return 0 class OrderedModel(models.Model): order = models.PositiveIntegerField(blank=True, default=1, db_index=True) objects = OrderedModelManager() class Meta: abstract = True ordering = ['order'] def save(self, swapping=False, *args, **kwargs): if not self.id: self.order = self.max_order() + 1 if self.order == 0 and not swapping: raise ValidationError("Can't set 'order' to 0") super(OrderedModel, self).save(*args, **kwargs) @classmethod def swap(cls, obj1, obj2): cls.objects.swap(obj1, obj2) @classmethod def max_order(cls): return cls.objects.max_order()
from django.db import models from django.core.exceptions import ValidationError class OrderedModelManager(models.Manager): def swap(self, obj1, obj2): tmp, obj2.order = obj2.order, 0 obj2.save(swapping=True) obj2.order, obj1.order = obj1.order, tmp obj1.save() obj2.save() def max_order(self): try: return self.order_by('-order').values_list('order', flat=True)[0] except IndexError: return 0 def fix_ordering(self): """ This method must be executed only if this application is added to existing project. """ for index, item in enumerate(self.only('order'), 1): item.order = index item.save() class OrderedModel(models.Model): order = models.PositiveIntegerField(blank=True, default=1, db_index=True) objects = OrderedModelManager() class Meta: abstract = True ordering = ['order'] def save(self, swapping=False, *args, **kwargs): if not self.id: self.order = self.max_order() + 1 if self.order == 0 and not swapping: raise ValidationError("Can't set 'order' to 0") super(OrderedModel, self).save(*args, **kwargs) @classmethod def swap(cls, obj1, obj2): cls.objects.swap(obj1, obj2) @classmethod def max_order(cls): return cls.objects.max_order()
Add fix_ordering method to OrderedModelManager
Add fix_ordering method to OrderedModelManager
Python
bsd-3-clause
MagicSolutions/django-orderedmodel,MagicSolutions/django-orderedmodel
--- +++ @@ -15,6 +15,15 @@ return self.order_by('-order').values_list('order', flat=True)[0] except IndexError: return 0 + + def fix_ordering(self): + """ + This method must be executed only if this application is + added to existing project. + """ + for index, item in enumerate(self.only('order'), 1): + item.order = index + item.save() class OrderedModel(models.Model):
36065d77de34d0c8a0fc7443f01c2d9c8d63e0c4
konstrukteur/__init__.py
konstrukteur/__init__.py
# # Konstrukteur - Static website generator # Copyright 2013 Sebastian Fastner # """ **Konstrukteur - Static website generator** Konstrukteur is a website generator that uses a template and content files to create static website output. """ __version__ = "0.1.13" __author__ = "Sebastian Fastner <mail@sebastianfastner.de>" def info(): """ Prints information about Jasy to the console. """ import jasy.core.Console as Console print("Jasy %s is a powerful web tooling framework" % __version__) print("Visit %s for details." % Console.colorize("https://github.com/sebastian-software/jasy", "underline")) print() class UserError(Exception): """ Standard Jasy error class raised whenever something happens which the system understands (somehow excepected) """ pass
# # Konstrukteur - Static website generator # Copyright 2013 Sebastian Fastner # """ **Konstrukteur - Static website generator** Konstrukteur is a website generator that uses a template and content files to create static website output. """ __version__ = "0.1.14" __author__ = "Sebastian Fastner <mail@sebastianfastner.de>" def info(): """ Prints information about Jasy to the console. """ import jasy.core.Console as Console print("Jasy %s is a powerful web tooling framework" % __version__) print("Visit %s for details." % Console.colorize("https://github.com/sebastian-software/jasy", "underline")) print() class UserError(Exception): """ Standard Jasy error class raised whenever something happens which the system understands (somehow excepected) """ pass
Change to new version number
Change to new version number
Python
mit
fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur
--- +++ @@ -10,7 +10,7 @@ to create static website output. """ -__version__ = "0.1.13" +__version__ = "0.1.14" __author__ = "Sebastian Fastner <mail@sebastianfastner.de>" def info():
0ba57c8b908b5feb58af731c7b1c62a41ae84d8d
familyconnect_registration/testsettings.py
familyconnect_registration/testsettings.py
from familyconnect_registration.settings import * # flake8: noqa # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'TESTSEKRET' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True CELERY_EAGER_PROPAGATES_EXCEPTIONS = True CELERY_ALWAYS_EAGER = True BROKER_BACKEND = 'memory' CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
from familyconnect_registration.settings import * # flake8: noqa # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'TESTSEKRET' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True CELERY_EAGER_PROPAGATES_EXCEPTIONS = True CELERY_ALWAYS_EAGER = True BROKER_BACKEND = 'memory' CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend' PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', )
Test run speedup by changing password hasher
Test run speedup by changing password hasher
Python
bsd-3-clause
praekelt/familyconnect-registration,praekelt/familyconnect-registration
--- +++ @@ -12,3 +12,7 @@ CELERY_ALWAYS_EAGER = True BROKER_BACKEND = 'memory' CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend' + +PASSWORD_HASHERS = ( + 'django.contrib.auth.hashers.MD5PasswordHasher', +)
5978eedb3147bc0f124335d9e408d6c4895de3a7
__init__.py
__init__.py
import os import sys import marshal if sys.hexversion < 0x03030000: raise ImportError('python >= 3.3 required') if sys.implementation.cache_tag is None: raise ImportError('python implementation does not use bytecode') PY_TAG = sys.implementation.cache_tag PY_VERSION = sys.hexversion BUNDLE_DIR = os.path.join(__path__[0], 'bundle') BUNDLE_FILE = os.path.join(BUNDLE_DIR, PY_TAG + '.dgbundle') if not os.path.exists(BUNDLE_FILE): raise ImportError('python implementation {!r} not supported'.format(PY_TAG)) with open(BUNDLE_FILE, 'rb') as _fd: for _c in marshal.load(_fd): eval(_c) del _c del _fd
import os import sys import marshal if not hasattr(sys, 'implementation'): raise ImportError('python >= 3.3 required') if sys.implementation.cache_tag is None: raise ImportError('python implementation does not use bytecode') PY_TAG = sys.implementation.cache_tag PY_VERSION = sys.hexversion BUNDLE_DIR = os.path.join(__path__[0], 'bundle') BUNDLE_FILE = os.path.join(BUNDLE_DIR, PY_TAG + '.dgbundle') if not os.path.exists(BUNDLE_FILE): raise ImportError('python implementation {!r} not supported'.format(PY_TAG)) with open(BUNDLE_FILE, 'rb') as _fd: for _c in marshal.load(_fd): eval(_c) del _c del _fd
Use a different way of ensuring 3.3+.
Use a different way of ensuring 3.3+.
Python
mit
pyos/dg
--- +++ @@ -3,7 +3,7 @@ import marshal -if sys.hexversion < 0x03030000: +if not hasattr(sys, 'implementation'): raise ImportError('python >= 3.3 required') if sys.implementation.cache_tag is None:
4b7065426447fb27322b81b283616c9242af41b9
python_hospital_info_sys/com/pyhis/gui/main.py
python_hospital_info_sys/com/pyhis/gui/main.py
''' Created on Jan 15, 2017 @author: Marlon_2 ''' import tkinter as tk # import from tkinter import ttk # impork ttk from tkinter win = tk.Tk(); # create instance #add a title win.title("Python Hospital Information System"); #add a label #ttk.Label(win, text="Welcome to Python Hospital Information System").grid(column=0,row=0); aLabel = ttk.Label(win, text="Welcome to Python Hospital Information System"); aLabel.grid(column=0,row=0); def clickMe(): action.configure(text="** I have been clicked **") aLabel.configure(foreground='red',background='yellow') #Adding a button action = ttk.Button(win, text="Click Me!", command=clickMe) action.grid(column=0, row=1); #button click event callback function #win.resizable(200, 100); # disable resizing of the GUI win.mainloop(); # start GUI
''' Created on Jan 15, 2017 @author: Marlon_2 ''' import tkinter as tk # import from tkinter import ttk # impork ttk from tkinter win = tk.Tk(); # create instance #add a title win.title("Python Hospital Information System"); #add a label #ttk.Label(win, text="Welcome to Python Hospital Information System").grid(column=0,row=0); aLabel = ttk.Label(win, text="Welcome to Python Hospital Information System"); aLabel.grid(column=0,row=0); def clickMe(): action.configure(text="** I have been clicked **") aLabel.configure(foreground='red',background='yellow') # action = ttk.Button(win, command=clickMeReset) # action.grid(column=0, row=1); def clickMeReset(): action.configure(text="** Click Me! **") aLabel.configure(foreground='black',background='white') # action = ttk.Button(win, command=clickMe) # action.grid(column=0, row=1); #Adding a button action = ttk.Button(win, text="Click Me!", command=clickMe) action.grid(column=0, row=1); #button click event callback function #win.resizable(200, 100); # disable resizing of the GUI win.mainloop(); # start GUI
Stop experimenting on this project for the moment
Stop experimenting on this project for the moment
Python
mit
martianworm17/py_his
--- +++ @@ -20,6 +20,14 @@ def clickMe(): action.configure(text="** I have been clicked **") aLabel.configure(foreground='red',background='yellow') +# action = ttk.Button(win, command=clickMeReset) +# action.grid(column=0, row=1); + +def clickMeReset(): + action.configure(text="** Click Me! **") + aLabel.configure(foreground='black',background='white') +# action = ttk.Button(win, command=clickMe) +# action.grid(column=0, row=1); #Adding a button action = ttk.Button(win, text="Click Me!", command=clickMe)
7b15a9b510bce6a3866c0d3d7cd78c0c477cb69d
transformations/pig_latin/transformation.py
transformations/pig_latin/transformation.py
import piglatin from interfaces.SentenceOperation import SentenceOperation from tasks.TaskTypes import TaskType class PigLatin(SentenceOperation): tasks = [ TaskType.TEXT_CLASSIFICATION, TaskType.TEXT_TO_TEXT_GENERATION, TaskType.TEXT_TAGGING, ] languages = ["en"] def __init__(self, seed=0, max_outputs=1): super().__init__(seed, max_outputs=max_outputs) def generate(self, sentence: str): output_sentence = piglatin.translate(sentence.lower()) piglatin_sentence = [] for word in output_sentence.split(): piglatin_sentence.append(word.replace('-', '')) piglatin_sentence = ' '.join(piglatin_sentence) return [piglatin_sentence]
import piglatin import random from interfaces.SentenceOperation import SentenceOperation from tasks.TaskTypes import TaskType class PigLatin(SentenceOperation): tasks = [ TaskType.TEXT_CLASSIFICATION, TaskType.TEXT_TO_TEXT_GENERATION, TaskType.TEXT_TAGGING, ] languages = ["en"] def __init__(self, seed=0, max_outputs=1, replace_prob=1.0): super().__init__(seed, max_outputs=max_outputs) self.replace_prob = replace_prob def generate(self, sentence: str): piglatin_sentences = [] for _ in range(self.max_outputs): piglatin_sentence = [] for word in sentence.lower().split(): if random.random() < self.replace_prob: new_word = piglatin.translate(word) else: new_word = word piglatin_sentence.append(new_word.replace('-', '')) piglatin_sentence = ' '.join(piglatin_sentence) piglatin_sentences.append(piglatin_sentence) return piglatin_sentences
Add per-word replace probability, max outputs.
Add per-word replace probability, max outputs.
Python
mit
GEM-benchmark/NL-Augmenter
--- +++ @@ -1,4 +1,5 @@ import piglatin +import random from interfaces.SentenceOperation import SentenceOperation from tasks.TaskTypes import TaskType @@ -11,13 +12,20 @@ ] languages = ["en"] - def __init__(self, seed=0, max_outputs=1): + def __init__(self, seed=0, max_outputs=1, replace_prob=1.0): super().__init__(seed, max_outputs=max_outputs) + self.replace_prob = replace_prob def generate(self, sentence: str): - output_sentence = piglatin.translate(sentence.lower()) - piglatin_sentence = [] - for word in output_sentence.split(): - piglatin_sentence.append(word.replace('-', '')) - piglatin_sentence = ' '.join(piglatin_sentence) - return [piglatin_sentence] + piglatin_sentences = [] + for _ in range(self.max_outputs): + piglatin_sentence = [] + for word in sentence.lower().split(): + if random.random() < self.replace_prob: + new_word = piglatin.translate(word) + else: + new_word = word + piglatin_sentence.append(new_word.replace('-', '')) + piglatin_sentence = ' '.join(piglatin_sentence) + piglatin_sentences.append(piglatin_sentence) + return piglatin_sentences
0f71f39a8634927b532c3f5b258720761f1d9c5c
mentorup/users/models.py
mentorup/users/models.py
# -*- coding: utf-8 -*- # Import chosenforms for pretty search forms from chosen import forms as chosenforms # Import the AbstractUser model from django.contrib.auth.models import AbstractUser # Import the basic Django ORM models and forms library from django.db import models from django import forms # Import tags for searching from taggit.models import Tag from taggit.models import TagBase from taggit.managers import TaggableManager from django.utils.translation import ugettext_lazy as _ # Create seperate classes for each tag type that will be a foreign key reference from User class TeachSkills(models.Model): skills = TaggableManager() class LearnSkills(models.Model): skills = TaggableManager() # Subclass AbstractUser class User(AbstractUser): def __unicode__(self): return self.username teach = models.ForeignKey(TeachSkills, null=True) learn = models.ForeignKey(LearnSkills, null=True) short_bio = models.TextField() location = models.CharField(max_length=50)
# -*- coding: utf-8 -*- # Import chosenforms for pretty search forms from chosen import forms as chosenforms # Import the AbstractUser model from django.contrib.auth.models import AbstractUser # Import the basic Django ORM models and forms library from django.db import models from django import forms # Import tags for searching from taggit.models import Tag from taggit.models import TagBase from taggit.managers import TaggableManager from django.utils.translation import ugettext_lazy as _ # Create seperate classes for each tag type that will be a foreign key reference from User class TeachSkills(models.Model): skills = TaggableManager() class LearnSkills(models.Model): skills = TaggableManager() class UserManager(models.Manager): def create(self, name): new_user = Food() new_user.name = name new_user.teach = TeachSkills() new_user.teach.save() new_user.learn = LearnSkills() new_user.learn.save() new_user.save() return new_user # Subclass AbstractUser class User(AbstractUser): def __unicode__(self): return self.username objects = UserManager() teach = models.ForeignKey(TeachSkills, null=True) learn = models.ForeignKey(LearnSkills, null=True) short_bio = models.TextField() location = models.CharField(max_length=50)
Create UserManager to ensure ForeignKey relation is saved and associated with User upon creation
Create UserManager to ensure ForeignKey relation is saved and associated with User upon creation
Python
bsd-3-clause
briandant/mentor_up,briandant/mentor_up,briandant/mentor_up,briandant/mentor_up
--- +++ @@ -22,12 +22,24 @@ class LearnSkills(models.Model): skills = TaggableManager() +class UserManager(models.Manager): + def create(self, name): + new_user = Food() + new_user.name = name + new_user.teach = TeachSkills() + new_user.teach.save() + new_user.learn = LearnSkills() + new_user.learn.save() + new_user.save() + return new_user + # Subclass AbstractUser class User(AbstractUser): def __unicode__(self): return self.username - + + objects = UserManager() teach = models.ForeignKey(TeachSkills, null=True) learn = models.ForeignKey(LearnSkills, null=True) short_bio = models.TextField()
23f95f0319c929006c89efdf0d113370a1a003b4
moa/factory_registers.py
moa/factory_registers.py
from kivy.factory import Factory r = Factory.register r('MoaStage', module='moa.stage.base') r('StageRender', module='moa.stage.base') r('Delay', module='moa.stage.delay') r('TreeRender', module='moa.render.treerender') r('TreeRenderExt', module='moa.render.treerender') r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple')
from kivy.factory import Factory r = Factory.register r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('StageRender', module='moa.stage.base') r('TreeRender', module='moa.render.treerender') r('TreeRenderExt', module='moa.render.treerender') r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple')
Update factory registers with stages.
Update factory registers with stages.
Python
mit
matham/moa
--- +++ @@ -1,9 +1,11 @@ from kivy.factory import Factory r = Factory.register -r('MoaStage', module='moa.stage.base') +r('MoaStage', module='moa.stage') +r('Delay', module='moa.stage.delay') +r('GateStage', module='moa.stage.gate') + r('StageRender', module='moa.stage.base') -r('Delay', module='moa.stage.delay') r('TreeRender', module='moa.render.treerender') r('TreeRenderExt', module='moa.render.treerender') r('StageTreeNode', module='moa.render.treerender')
67f5b1796d2595a5b3fa8449ca7badaf27510ded
test_passwd_change.py
test_passwd_change.py
#!/usr/bin/env python3 from passwd_change import passwd_change, shadow_change, mails_delete from unittest import TestCase, TestLoader, TextTestRunner import os import subprocess class PasswdChange_Test(TestCase): def setUp(self): """ Preconditions """ subprocess.call(['mkdir', 'test']) subprocess.call(['touch', 'test/rvv', 'test/max', 'test/mail']) #TODO create passwd test file #TODO create shadow test file #TODO create keys.txt file def tearDown(self): if os.path.exists('test/rvv'): raise Exception('test/rvv must not exist') if not (os.path.exists('test/max') and os.path.exists('test/bdv') and os.path.exists('test/mail')): raise Exception('File max, bdv or mail must exist!') subprocess.call(['rm', '-r', 'test/']) def test_passwd_change(self): shadow_change(*passwd_change()) mails_delete(maildir_path='test') def test_passwd_change_2(self): shadow_change(*passwd_change()) mails_delete(maildir_path='test/') suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test) TextTestRunner(verbosity=2).run(suite)
#!/usr/bin/env python3 from passwd_change import passwd_change, shadow_change, mails_delete from unittest import TestCase, TestLoader, TextTestRunner import os import subprocess class PasswdChange_Test(TestCase): def setUp(self): """ Preconditions """ subprocess.call(['mkdir', 'test']) subprocess.call(['touch', 'test/rvv', 'test/max', 'test/bdv' ,'test/mail']) #TODO create passwd test file #TODO create shadow test file #TODO create keys.txt file def tearDown(self): try: if os.path.exists('test/rvv'): raise Exception('test/rvv must not exist') if not (os.path.exists('test/max') and os.path.exists('test/bdv') and os.path.exists('test/mail')): raise Exception('File max, bdv or mail must exist!') except: subprocess.call(['rm', '-r', 'test/']) raise def test_passwd_change(self): shadow_change(*passwd_change()) mails_delete(maildir_path='test') def test_passwd_change_2(self): shadow_change(*passwd_change()) mails_delete(maildir_path='test/') suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test) TextTestRunner(verbosity=2).run(suite)
Add MIT LICENSE. Fix test dir removing issue - re-raise exception after delete this dir.
Add MIT LICENSE. Fix test dir removing issue - re-raise exception after delete this dir.
Python
mit
maxsocl/oldmailer
--- +++ @@ -14,19 +14,22 @@ """ subprocess.call(['mkdir', 'test']) subprocess.call(['touch', 'test/rvv', 'test/max', - 'test/mail']) + 'test/bdv' ,'test/mail']) #TODO create passwd test file #TODO create shadow test file #TODO create keys.txt file def tearDown(self): - if os.path.exists('test/rvv'): - raise Exception('test/rvv must not exist') - if not (os.path.exists('test/max') and - os.path.exists('test/bdv') and - os.path.exists('test/mail')): - raise Exception('File max, bdv or mail must exist!') - subprocess.call(['rm', '-r', 'test/']) + try: + if os.path.exists('test/rvv'): + raise Exception('test/rvv must not exist') + if not (os.path.exists('test/max') and + os.path.exists('test/bdv') and + os.path.exists('test/mail')): + raise Exception('File max, bdv or mail must exist!') + except: + subprocess.call(['rm', '-r', 'test/']) + raise def test_passwd_change(self): shadow_change(*passwd_change())
b1bf5dfa91f1f7b84512f72d6e5e18c2109f3239
addic7ed/__init__.py
addic7ed/__init__.py
from termcolor import colored from .parser import Addic7edParser from .file_crawler import FileCrawler from .logger import init_logger from .config import Config def addic7ed(): try: init_logger() Config.load() main() except (EOFError, KeyboardInterrupt, SystemExit): print(colored("\nBye!", "yellow")) exit(0) def main(): crawler = FileCrawler() parser = Addic7edParser() for filename, ep in crawler.episodes.items(): subs = parser.parse(**ep.infos) print(ep) if not subs: print(colored("No subtitles for %s" % filename, "red"), end="\n\n") continue for i, sub in enumerate(subs): print("[%s] %s" % (colored(i, "yellow"), sub)) if Config.dry_run: print() continue else: version = input('Download number? ') if not version: print(colored("Nothing to do!", "yellow"), end="\n\n") continue try: if Config.rename != "sub": filename = subs[int(version)].download() if filename and Config.rename == "video": print(ep.rename(filename), end="\n\n") else: filename = subs[int(version)].download("%s.srt" % ep.filename) print(colored("Downloaded %s subtitle file" % filename, "green")) except Exception as e: print(colored(e, "red"), end="\n\n")
from termcolor import colored from .parser import Addic7edParser from .file_crawler import FileCrawler from .logger import init_logger from .config import Config def addic7ed(): try: init_logger() Config.load() main() except (EOFError, KeyboardInterrupt, SystemExit): print(colored("\nBye!", "yellow")) exit(0) def main(): crawler = FileCrawler() parser = Addic7edParser() for filename, ep in crawler.episodes.items(): subs = parser.parse(**ep.infos) print(ep) if not subs: print(colored("No subtitles for %s" % filename, "red"), end="\n\n") continue for i, sub in enumerate(subs): print("[%s] %s" % (colored(i, "yellow"), sub)) if Config.dry_run: print() continue else: version = input('Download number? ') if not version: print(colored("Nothing to do!", "yellow"), end="\n\n") continue try: if Config.rename != "sub": filename = subs[int(version)].download() if filename and Config.rename == "video": print(ep.rename(filename)) else: filename = subs[int(version)].download("%s.srt" % ep.filename) print(colored("Downloaded %s subtitle file" % filename, "green"), end="\n\n") except Exception as e: print(colored(e, "red"), end="\n\n")
Fix newline output of downloaded srt
Fix newline output of downloaded srt
Python
mit
Jesus-21/addic7ed
--- +++ @@ -46,12 +46,12 @@ if Config.rename != "sub": filename = subs[int(version)].download() if filename and Config.rename == "video": - print(ep.rename(filename), end="\n\n") + print(ep.rename(filename)) else: filename = subs[int(version)].download("%s.srt" % ep.filename) print(colored("Downloaded %s subtitle file" % - filename, "green")) + filename, "green"), end="\n\n") except Exception as e: print(colored(e, "red"), end="\n\n")
7b9ee45c0791d8368a0bb8af52652d3fcd482c79
qubesadmin/__init__.py
qubesadmin/__init__.py
# -*- encoding: utf8 -*- # # The Qubes OS Project, http://www.qubes-os.org # # Copyright (C) 2017 Marek Marczykowski-Górecki # <marmarek@invisiblethingslab.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation; either version 2.1 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along # with this program; if not, see <http://www.gnu.org/licenses/>. '''Qubes OS management client.''' import os import qubesadmin.config import qubesadmin.base import qubesadmin.app DEFAULT = qubesadmin.base.DEFAULT if os.path.exists(qubesadmin.config.QUBESD_SOCKET): Qubes = qubesadmin.app.QubesLocal else: Qubes = qubesadmin.app.QubesRemote
# -*- encoding: utf8 -*- # # The Qubes OS Project, http://www.qubes-os.org # # Copyright (C) 2017 Marek Marczykowski-Górecki # <marmarek@invisiblethingslab.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation; either version 2.1 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along # with this program; if not, see <http://www.gnu.org/licenses/>. '''Qubes OS management client.''' import os import qubesadmin.config import qubesadmin.base import qubesadmin.app DEFAULT = qubesadmin.base.DEFAULT if os.path.exists('/etc/qubes-release'): Qubes = qubesadmin.app.QubesLocal else: Qubes = qubesadmin.app.QubesRemote
Choose QubesLocal or QubesRemote based on /etc/qubes-release presence
Choose QubesLocal or QubesRemote based on /etc/qubes-release presence Do not check for qubesd socket (at module import time), because if not running at this precise time, it will lead to wrong choice. And a weird error message in consequence (looking for qrexec-client-vm in dom0). Fixes QubesOS/qubes-issues#2917
Python
lgpl-2.1
marmarek/qubes-core-mgmt-client,marmarek/qubes-core-mgmt-client,marmarek/qubes-core-mgmt-client
--- +++ @@ -28,7 +28,7 @@ DEFAULT = qubesadmin.base.DEFAULT -if os.path.exists(qubesadmin.config.QUBESD_SOCKET): +if os.path.exists('/etc/qubes-release'): Qubes = qubesadmin.app.QubesLocal else: Qubes = qubesadmin.app.QubesRemote
9daac0977933238929eda5e05c635e3a626cbe21
tests/test_example.py
tests/test_example.py
import unittest import object_storage_tensorflow as obj_tf class TestStringMethods(unittest.TestCase): def test_upper(self): self.assertEqual('foo'.upper(), 'FOO') def test_isupper(self): self.assertTrue('FOO'.isupper()) self.assertFalse('Foo'.isupper()) def test_split(self): s = 'hello world' self.assertEqual(s.split(), ['hello', 'world']) # check that s.split fails when the separator is not a string with self.assertRaises(TypeError): s.split(2) class TestS3Connection(unittest.TestCase): def test_buckets(self): conn = obj_tf.s3.getConnection() names = [] for bucket in conn.buckets.all(): names.append(bucket.name) self.assertTrue(len(names) > 0) if __name__ == '__main__': unittest.main()
import os import unittest import object_storage_tensorflow as obj_tf class TestStringMethods(unittest.TestCase): def test_upper(self): self.assertEqual('foo'.upper(), 'FOO') def test_isupper(self): self.assertTrue('FOO'.isupper()) self.assertFalse('Foo'.isupper()) def test_split(self): s = 'hello world' self.assertEqual(s.split(), ['hello', 'world']) # check that s.split fails when the separator is not a string with self.assertRaises(TypeError): s.split(2) class TestS3Connection(unittest.TestCase): @unittest.skipUnless(os.environ.get("TRAVIS_PULL_REQUEST") == 'false', "S3 tests will fail for Pull Requests due to lack of secrets.") def test_buckets(self): conn = obj_tf.s3.getConnection() names = [] for bucket in conn.buckets.all(): names.append(bucket.name) self.assertTrue(len(names) > 0) if __name__ == '__main__': unittest.main()
Add intelligent skip for missing secret info
Add intelligent skip for missing secret info
Python
apache-2.0
marshallford/ndsu-ibm-capstone,marshallford/ndsu-ibm-capstone
--- +++ @@ -1,4 +1,6 @@ +import os import unittest + import object_storage_tensorflow as obj_tf @@ -21,6 +23,8 @@ class TestS3Connection(unittest.TestCase): + @unittest.skipUnless(os.environ.get("TRAVIS_PULL_REQUEST") == 'false', + "S3 tests will fail for Pull Requests due to lack of secrets.") def test_buckets(self): conn = obj_tf.s3.getConnection() names = []
cb03101afdd337f2840d3a439f4452c1083e09ff
utils/strings.py
utils/strings.py
# coding=utf-8 import string from numbers import Number __author__ = 'Gareth Coles' FILENAME_SAFE_CHARS = ( "/\\-_.()#" + string.digits + string.letters + string.whitespace ) class EmptyStringFormatter(string.Formatter): """ EmptyStringFormatter - The same as the normal string formatter, except this one replaces missing tokens with empty strings. Use this just like you would a normal formatter. For example: >>> formatter = EmptyStringFormatter() >>> formatter.format("... {RED} {YELLOW} ...", RED="red") '... red ...' """ def get_value(self, key, args, kwargs): try: # if hasattr(key, "__mod__"): if isinstance(key, Number): return args[key] else: return kwargs[key] except (KeyError, IndexError): return "" def to_filename(_string): return filter( lambda x: x in FILENAME_SAFE_CHARS, _string )
# coding=utf-8 import string from numbers import Number __author__ = 'Gareth Coles' FILENAME_SAFE_CHARS = ( "/\\-_.()#:" + string.digits + string.letters + string.whitespace ) class EmptyStringFormatter(string.Formatter): """ EmptyStringFormatter - The same as the normal string formatter, except this one replaces missing tokens with empty strings. Use this just like you would a normal formatter. For example: >>> formatter = EmptyStringFormatter() >>> formatter.format("... {RED} {YELLOW} ...", RED="red") '... red ...' """ def get_value(self, key, args, kwargs): try: # if hasattr(key, "__mod__"): if isinstance(key, Number): return args[key] else: return kwargs[key] except (KeyError, IndexError): return "" def to_filename(_string): return filter( lambda x: x in FILENAME_SAFE_CHARS, _string )
Allow colons in filenames for now
[Utils] Allow colons in filenames for now
Python
artistic-2.0
UltrosBot/Ultros,UltrosBot/Ultros
--- +++ @@ -5,7 +5,7 @@ __author__ = 'Gareth Coles' FILENAME_SAFE_CHARS = ( - "/\\-_.()#" + + "/\\-_.()#:" + string.digits + string.letters + string.whitespace
e689a09c7c6d20a7e6bbc5b81b864d1bdd406295
src/setup.py
src/setup.py
#! /usr/bin/python import os import setuptools import sys # FIXME explain why this is here sys.path.insert(0, os.path.join( os.path.dirname(__file__), "lib", )) import opensub setuptools.setup( author="Bence Romsics", author_email="rubasov+opensub@gmail.com", classifiers=[ "Environment :: Console", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: POSIX", "Programming Language :: Python :: 2.7", "Topic :: Multimedia :: Video", "Topic :: Utilities", ], description="CLI utilities for opensubtitles.org.", install_requires=[ "docopt", "six", ], name="opensub", package_dir={ "": "lib", }, packages=[ "opensub", ], scripts=[ "bin/opensub-get", "bin/opensub-hash", ], tests_require=[ "nose", ], url="http://github.com/rubasov/...", # FIXME version=opensub.__version__, zip_safe=False, )
#! /usr/bin/python import os import setuptools import sys # FIXME explain why this is here sys.path.insert(0, os.path.join( os.path.dirname(__file__), "lib", )) import opensub setuptools.setup( author="Bence Romsics", author_email="rubasov+opensub@gmail.com", classifiers=[ "Environment :: Console", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: POSIX", "Programming Language :: Python :: 2.7", "Topic :: Multimedia :: Video", "Topic :: Utilities", ], description="CLI utilities for opensubtitles.org.", install_requires=[ "docopt", "six", ], name="opensub", package_dir={ "": "lib", }, packages=[ "opensub", ], scripts=[ "bin/opensub-get", "bin/opensub-hash", ], tests_require=[ "nose", ], url="https://github.com/rubasov/opensub-utils", version=opensub.__version__, zip_safe=False, )
Set package URL to where it'll be uploaded.
Set package URL to where it'll be uploaded.
Python
bsd-2-clause
rubasov/opensub-utils,rubasov/opensub-utils
--- +++ @@ -46,7 +46,7 @@ tests_require=[ "nose", ], - url="http://github.com/rubasov/...", # FIXME + url="https://github.com/rubasov/opensub-utils", version=opensub.__version__, zip_safe=False, )
1a3ffe00bfdf8c61b4ff190beb2ee6a4e9db1412
behave_django/environment.py
behave_django/environment.py
from django.core.management import call_command from django.shortcuts import resolve_url from behave_django.testcase import BehaveDjangoTestCase def before_scenario(context, scenario): # This is probably a hacky method of setting up the test case # outside of a test runner. Suggestions are welcome. :) context.test = BehaveDjangoTestCase() context.test.setUpClass() context.test() # Load fixtures if getattr(context, 'fixtures', None): call_command('loaddata', *context.fixtures, verbosity=0) context.base_url = context.test.live_server_url def get_url(to=None, *args, **kwargs): """ URL helper attached to context with built-in reverse resolution as a handy shortcut. Takes an absolute path, a view name, or a model instance as an argument (as django.shortcuts.resolve_url). Examples:: context.get_url() context.get_url('/absolute/url/here') context.get_url('view-name') context.get_url('view-name', 'with args', and='kwargs') context.get_url(model_instance) """ return context.base_url + ( resolve_url(to, *args, **kwargs) if to else '') context.get_url = get_url def after_scenario(context, scenario): context.test.tearDownClass() del context.test
from django.core.management import call_command try: from django.shortcuts import resolve_url except ImportError: import warnings warnings.warn("URL path supported only in get_url() with Django < 1.5") resolve_url = lambda to, *args, **kwargs: to from behave_django.testcase import BehaveDjangoTestCase def before_scenario(context, scenario): # This is probably a hacky method of setting up the test case # outside of a test runner. Suggestions are welcome. :) context.test = BehaveDjangoTestCase() context.test.setUpClass() context.test() # Load fixtures if getattr(context, 'fixtures', None): call_command('loaddata', *context.fixtures, verbosity=0) context.base_url = context.test.live_server_url def get_url(to=None, *args, **kwargs): """ URL helper attached to context with built-in reverse resolution as a handy shortcut. Takes an absolute path, a view name, or a model instance as an argument (as django.shortcuts.resolve_url). Examples:: context.get_url() context.get_url('/absolute/url/here') context.get_url('view-name') context.get_url('view-name', 'with args', and='kwargs') context.get_url(model_instance) """ return context.base_url + ( resolve_url(to, *args, **kwargs) if to else '') context.get_url = get_url def after_scenario(context, scenario): context.test.tearDownClass() del context.test
Support Django < 1.5 with a simplified version of `get_url()`
Support Django < 1.5 with a simplified version of `get_url()`
Python
mit
nikolas/behave-django,nikolas/behave-django,behave/behave-django,bittner/behave-django,bittner/behave-django,behave/behave-django
--- +++ @@ -1,5 +1,10 @@ from django.core.management import call_command -from django.shortcuts import resolve_url +try: + from django.shortcuts import resolve_url +except ImportError: + import warnings + warnings.warn("URL path supported only in get_url() with Django < 1.5") + resolve_url = lambda to, *args, **kwargs: to from behave_django.testcase import BehaveDjangoTestCase
005c6ceae1b80f5092e78231242b01af2ba64fed
tests/integration/api/conftest.py
tests/integration/api/conftest.py
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. API-specific fixtures """ import pytest from tests.base import create_admin_app from tests.conftest import CONFIG_PATH_DATA_KEY from .helpers import assemble_authorization_header API_TOKEN = 'just-say-PLEASE!' @pytest.fixture(scope='session') def app(admin_app, data_path): config_overrides = { 'API_TOKEN': API_TOKEN, CONFIG_PATH_DATA_KEY: data_path, 'SERVER_NAME': 'api.acmecon.test', } app = create_admin_app(config_overrides) with app.app_context(): yield app @pytest.fixture(scope='session') def api_client(app): """Provide a test HTTP client against the API.""" return app.test_client() @pytest.fixture(scope='session') def api_client_authz_header(): """Provide a test HTTP client against the API.""" return assemble_authorization_header(API_TOKEN)
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. API-specific fixtures """ import pytest from tests.base import create_admin_app from tests.conftest import CONFIG_PATH_DATA_KEY from .helpers import assemble_authorization_header API_TOKEN = 'just-say-PLEASE!' @pytest.fixture(scope='session') # `admin_app` fixture is required because it sets up the database. def app(admin_app, make_admin_app): config_overrides = { 'API_TOKEN': API_TOKEN, 'SERVER_NAME': 'api.acmecon.test', } app = make_admin_app(**config_overrides) with app.app_context(): yield app @pytest.fixture(scope='session') def api_client(app): """Provide a test HTTP client against the API.""" return app.test_client() @pytest.fixture(scope='session') def api_client_authz_header(): """Provide a test HTTP client against the API.""" return assemble_authorization_header(API_TOKEN)
Use `make_admin_app`, document why `admin_app` is still needed
Use `make_admin_app`, document why `admin_app` is still needed
Python
bsd-3-clause
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
--- +++ @@ -17,13 +17,13 @@ @pytest.fixture(scope='session') -def app(admin_app, data_path): +# `admin_app` fixture is required because it sets up the database. +def app(admin_app, make_admin_app): config_overrides = { 'API_TOKEN': API_TOKEN, - CONFIG_PATH_DATA_KEY: data_path, 'SERVER_NAME': 'api.acmecon.test', } - app = create_admin_app(config_overrides) + app = make_admin_app(**config_overrides) with app.app_context(): yield app
8c4cdc174b502610943507d4e7ffee96ad9d611a
us_ignite/snippets/tests/models_tests.py
us_ignite/snippets/tests/models_tests.py
from nose.tools import eq_, ok_ from django.test import TestCase from us_ignite.snippets.models import Snippet class TestSnippetModel(TestCase): def tearDown(self): Snippet.objects.all().delete() def get_instance(self): data = { 'name': 'Gigabit snippets', 'slug': 'featured', 'url': 'http://us-ignite.org/', } return Snippet.objects.create(**data) def test_instance_is_created_successfully(self): instance = self.get_instance() eq_(instance.name, 'Gigabit snippets') eq_(instance.status, Snippet.DRAFT) eq_(instance.url, 'http://us-ignite.org/') eq_(instance.url_text, '') eq_(instance.body, '') eq_(instance.image, 'ad.png') eq_(instance.is_featured, False) ok_(instance.created) ok_(instance.modified) eq_(instance.slug, 'featured') ok_(instance.id)
from nose.tools import eq_, ok_ from django.test import TestCase from us_ignite.snippets.models import Snippet class TestSnippetModel(TestCase): def tearDown(self): Snippet.objects.all().delete() def get_instance(self): data = { 'name': 'Gigabit snippets', 'slug': 'featured', 'url': 'http://us-ignite.org/', } return Snippet.objects.create(**data) def test_instance_is_created_successfully(self): instance = self.get_instance() eq_(instance.name, 'Gigabit snippets') eq_(instance.status, Snippet.DRAFT) eq_(instance.url, 'http://us-ignite.org/') eq_(instance.url_text, '') eq_(instance.body, '') eq_(instance.image, '') eq_(instance.is_featured, False) ok_(instance.created) ok_(instance.modified) eq_(instance.slug, 'featured') ok_(instance.id)
Fix Snippet failing test, ``image`` field is blank.
Fix Snippet failing test, ``image`` field is blank.
Python
bsd-3-clause
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
--- +++ @@ -25,7 +25,7 @@ eq_(instance.url, 'http://us-ignite.org/') eq_(instance.url_text, '') eq_(instance.body, '') - eq_(instance.image, 'ad.png') + eq_(instance.image, '') eq_(instance.is_featured, False) ok_(instance.created) ok_(instance.modified)
7b14e846f08f69601372266ed82f91ba5bd306f6
devito/core/__init__.py
devito/core/__init__.py
""" The ``core`` Devito backend is simply a "shadow" of the ``base`` backend, common to all other backends. The ``core`` backend (and therefore the ``base`` backend as well) are used to run Devito on standard CPU architectures. """ from devito.dle import (BasicRewriter, AdvancedRewriter, AdvancedRewriterSafeMath, SpeculativeRewriter, init_dle) from devito.parameters import Parameters, add_sub_configuration core_configuration = Parameters('core') core_configuration.add('autotuning', 'basic', ['none', 'basic', 'aggressive']) env_vars_mapper = { 'DEVITO_AUTOTUNING': 'autotuning', } add_sub_configuration(core_configuration, env_vars_mapper) # Initialize the DLE modes = {'basic': BasicRewriter, 'advanced': AdvancedRewriter, 'advanced-safemath': AdvancedRewriterSafeMath, 'speculative': SpeculativeRewriter} init_dle(modes) # The following used by backends.backendSelector from devito.function import (Constant, Function, TimeFunction, SparseFunction, # noqa SparseTimeFunction) from devito.grid import Grid # noqa from devito.core.operator import Operator # noqa from devito.types import CacheManager # noqa
""" The ``core`` Devito backend is simply a "shadow" of the ``base`` backend, common to all other backends. The ``core`` backend (and therefore the ``base`` backend as well) are used to run Devito on standard CPU architectures. """ from devito.dle import (BasicRewriter, AdvancedRewriter, AdvancedRewriterSafeMath, SpeculativeRewriter, init_dle) from devito.parameters import Parameters, add_sub_configuration core_configuration = Parameters('core') core_configuration.add('autotuning', 'basic', ['off', 'basic', 'aggressive']) env_vars_mapper = { 'DEVITO_AUTOTUNING': 'autotuning', } add_sub_configuration(core_configuration, env_vars_mapper) # Initialize the DLE modes = {'basic': BasicRewriter, 'advanced': AdvancedRewriter, 'advanced-safemath': AdvancedRewriterSafeMath, 'speculative': SpeculativeRewriter} init_dle(modes) # The following used by backends.backendSelector from devito.function import (Constant, Function, TimeFunction, SparseFunction, # noqa SparseTimeFunction) from devito.grid import Grid # noqa from devito.core.operator import Operator # noqa from devito.types import CacheManager # noqa
Change autotuning 'none' to 'off'
core: Change autotuning 'none' to 'off'
Python
mit
opesci/devito,opesci/devito
--- +++ @@ -9,7 +9,7 @@ from devito.parameters import Parameters, add_sub_configuration core_configuration = Parameters('core') -core_configuration.add('autotuning', 'basic', ['none', 'basic', 'aggressive']) +core_configuration.add('autotuning', 'basic', ['off', 'basic', 'aggressive']) env_vars_mapper = { 'DEVITO_AUTOTUNING': 'autotuning',
91f9ea76a1a48cf9e191b4f97818c105428bbbd6
util/test_graph.py
util/test_graph.py
import urllib2 token = 'test_token' channel = 'test_channel' graphtype = 'test' url = 'http://{}/ocpgraph/{}/{}/{}/'.format('localhost:8000', token, channel, graphtype) try: req = urllib2.Request(url) resposne = urllib2.urlopen(req) except Exception, e: raise
# Copyright 2014 Open Connectome Project (http://openconnecto.me) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import urllib2 token = 'test_token' channel = 'test_channel' graphtype = 'test' url = 'http://{}/ocpgraph/{}/{}/{}/'.format('localhost:8000', token, channel, graphtype) try: req = urllib2.Request(url) resposne = urllib2.urlopen(req) except Exception, e: raise
Test file for Graph code
[util] Test file for Graph code
Python
apache-2.0
neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome,openconnectome/open-connectome,openconnectome/open-connectome,openconnectome/open-connectome,openconnectome/open-connectome,neurodata/ndstore,neurodata/ndstore,neurodata/ndstore
--- +++ @@ -1,3 +1,17 @@ +# Copyright 2014 Open Connectome Project (http://openconnecto.me) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import urllib2 token = 'test_token'
c4818066de3b428678fb03c70dcaa028227e0c00
scripts/launch_app.py
scripts/launch_app.py
#! /usr/bin/env python3 """Launch the flask-forecaster application.""" import logging import sys logging.basicConfig( datefmt='%Y/%m/%d %H.%M.%S', format='%(levelname)s:%(name)s:%(message)s', level=logging.INFO, stream=sys.stdout, ) logger = logging.getLogger('launch_app') if __name__ == '__main__': from flask_forecaster import app host, port = app.config['SERVER_NAME'].split(':') logger.info('starting app on %s, %s', host, port) app.run(host=host, port=int(port))
#! /usr/bin/env python3 """Launch the flask-forecaster application.""" import logging import sys logging.basicConfig( datefmt='%Y/%m/%d %H.%M.%S', format='%(levelname)s:%(name)s:%(message)s', level=logging.DEBUG, stream=sys.stdout, ) logger = logging.getLogger('launch_app') if __name__ == '__main__': from flask_forecaster import app host, port = app.config['SERVER_NAME'].split(':') logger.info('starting app on %s, %s', host, port) app.run(host=host, port=int(port))
Set global logging level to DEBUG
Set global logging level to DEBUG
Python
isc
textbook/flask-forecaster,textbook/flask-forecaster
--- +++ @@ -8,7 +8,7 @@ logging.basicConfig( datefmt='%Y/%m/%d %H.%M.%S', format='%(levelname)s:%(name)s:%(message)s', - level=logging.INFO, + level=logging.DEBUG, stream=sys.stdout, )
e5b42db249dd94a0d7652881a8bba8ed78772d3e
examples/turnAndMove.py
examples/turnAndMove.py
import slither, pygame snakey = slither.Sprite() snakey.setCostumeByName("costume0") snakey.goto(0, 0) slither.slitherStage.setColor(40, 222, 40) slither.setup() # Begin slither def handlequit(): print("Quitting...") return True slither.registerCallback(pygame.QUIT, handlequit) # This uses the direct call form @slither.registerCallback(pygame.MOUSEBUTTONUP) # This uses the decorator form def handlemouseup(event): print("Mouseup:", event.pos, event.button) def run_a_frame(): snakey.xpos += 1 snakey.ypos += 1 snakey.direction += 1 slither.runMainLoop(run_a_frame)
import slither, pygame snakey = slither.Sprite() snakey.setCostumeByName("costume0") snakey.goto(0, 0) slither.setup() # Begin slither def handlequit(): print("Quitting...") return True slither.registerCallback(pygame.QUIT, handlequit) # This uses the direct call form @slither.registerCallback(pygame.MOUSEBUTTONUP) # This uses the decorator form def handlemouseup(event): print("Mouseup:", event.pos, event.button) def run_a_frame(): snakey.xpos += 1 snakey.ypos += 1 snakey.direction += 1 slither.runMainLoop(run_a_frame)
Fix small test problem\nBTW rotation works now, thanks @BookOwl
Fix small test problem\nBTW rotation works now, thanks @BookOwl
Python
mit
PySlither/Slither,PySlither/Slither
--- +++ @@ -4,8 +4,6 @@ snakey.setCostumeByName("costume0") snakey.goto(0, 0) - -slither.slitherStage.setColor(40, 222, 40) slither.setup() # Begin slither
3ae63e055146ecb45b6943e661808b0546b42273
tests/test_playsong/test_query.py
tests/test_playsong/test_query.py
#!/usr/bin/env python # coding=utf-8 from __future__ import print_function, unicode_literals import nose.tools as nose from tests.utils import run_filter def test_ignore_case(): """should ignore case when querying songs""" results = run_filter('playsong', 'mr Blue SKY') nose.assert_equal(results[0]['title'], 'Mr. Blue Sky') def test_partial(): """should match partial queries""" results = run_filter('playsong', 'blue sky') nose.assert_equal(results[0]['title'], 'Mr. Blue Sky')
#!/usr/bin/env python # coding=utf-8 from __future__ import print_function, unicode_literals import nose.tools as nose from tests.utils import run_filter def test_ignore_case(): """should ignore case when querying songs""" results = run_filter('playsong', 'mr Blue SKY') nose.assert_equal(results[0]['title'], 'Mr. Blue Sky') def test_partial(): """should match partial queries when querying songs""" results = run_filter('playsong', 'blue sky') nose.assert_equal(results[0]['title'], 'Mr. Blue Sky')
Add extra description to partial match playsong test
Add extra description to partial match playsong test
Python
mit
caleb531/play-song,caleb531/play-song
--- +++ @@ -15,6 +15,6 @@ def test_partial(): - """should match partial queries""" + """should match partial queries when querying songs""" results = run_filter('playsong', 'blue sky') nose.assert_equal(results[0]['title'], 'Mr. Blue Sky')
d0f092afc9534d25b5ebf81ff329ad296e30952e
numpy/distutils/setup.py
numpy/distutils/setup.py
#!/usr/bin/env python from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration def configuration(parent_package='',top_path=None): config = Configuration('distutils',parent_package,top_path) config.add_subpackage('command') config.add_subpackage('fcompiler') config.add_data_dir('tests') config.make_config_py() return config.todict() if __name__ == '__main__': setup(**configuration(top_path=''))
#!/usr/bin/env python from numpy.distutils.core import setup from numpy.distutils.misc_util import Configuration def configuration(parent_package='',top_path=None): config = Configuration('distutils',parent_package,top_path) config.add_subpackage('command') config.add_subpackage('fcompiler') config.add_data_dir('tests') config.add_data_files('site.cfg') config.make_config_py() return config.todict() if __name__ == '__main__': setup(**configuration(top_path=''))
Add site.cfg to datafiles installed for numpy.distutils.
Add site.cfg to datafiles installed for numpy.distutils. git-svn-id: 77a43f9646713b91fea7788fad5dfbf67e151ece@2011 94b884b6-d6fd-0310-90d3-974f1d3f35e1
Python
bsd-3-clause
teoliphant/numpy-refactor,chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,jasonmccampbell/numpy-refactor-sprint,teoliphant/numpy-refactor,Ademan/NumPy-GSoC,illume/numpy3k,chadnetzer/numpy-gaurdro,jasonmccampbell/numpy-refactor-sprint,efiring/numpy-work,illume/numpy3k,illume/numpy3k,jasonmccampbell/numpy-refactor-sprint,chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,teoliphant/numpy-refactor,efiring/numpy-work,efiring/numpy-work,Ademan/NumPy-GSoC,efiring/numpy-work,chadnetzer/numpy-gaurdro,jasonmccampbell/numpy-refactor-sprint,Ademan/NumPy-GSoC,Ademan/NumPy-GSoC,illume/numpy3k
--- +++ @@ -7,6 +7,7 @@ config.add_subpackage('command') config.add_subpackage('fcompiler') config.add_data_dir('tests') + config.add_data_files('site.cfg') config.make_config_py() return config.todict()
a2572d38eeaa7c004142a194b18fd6fdfff99f9a
test/test_translate.py
test/test_translate.py
from Bio import SeqIO import logging import unittest from select_taxa import select_genomes_by_ids import translate class Test(unittest.TestCase): def setUp(self): self.longMessage = True logging.root.setLevel(logging.DEBUG) def test_translate_genomes(self): # Select genomes genomes = select_genomes_by_ids(['13305.1']).values() # Call translate dnafiles, aafiles = translate.translate_genomes(genomes) # Verify expected output first_header = '13305.1|NC_008253.1|YP_667942.1|None|thr' first = next(SeqIO.parse(dnafiles[0], 'fasta')) self.assertEqual(first_header, first.id) first = next(SeqIO.parse(aafiles[0], 'fasta')) self.assertEqual(first_header, first.id)
from Bio import SeqIO import logging import unittest from select_taxa import select_genomes_by_ids import translate class Test(unittest.TestCase): def setUp(self): self.longMessage = True logging.root.setLevel(logging.DEBUG) def test_translate_genomes(self): # Select genomes genomes = select_genomes_by_ids(['13305.1']).values() # Call translate dnafiles, aafiles = translate.translate_genomes(genomes) # Verify expected output first_header = '13305.1|NC_008253.1|YP_667942.1|None|thr' first = next(SeqIO.parse(dnafiles[0], 'fasta')) self.assertEqual(first_header, first.id) first = next(SeqIO.parse(aafiles[0], 'fasta')) self.assertEqual(first_header, first.id) # Verify no header appears twice headers = [record.id for record in SeqIO.parse(aafiles[0], 'fasta')] self.assertEqual(len(headers), len(set(headers))) def test_translate_93125_2(self): # Select genomes genomes = select_genomes_by_ids(['93125.2']).values() # Call translate aafiles = translate.translate_genomes(genomes)[1] # Verify no header appears twice headers = [record.id for record in SeqIO.parse(aafiles[0], 'fasta')] self.assertEqual(len(headers), len(set(headers)))
Verify no header appears twice when translating 93125.2
Verify no header appears twice when translating 93125.2
Python
mit
ODoSE/odose.nl
--- +++ @@ -25,3 +25,18 @@ self.assertEqual(first_header, first.id) first = next(SeqIO.parse(aafiles[0], 'fasta')) self.assertEqual(first_header, first.id) + + # Verify no header appears twice + headers = [record.id for record in SeqIO.parse(aafiles[0], 'fasta')] + self.assertEqual(len(headers), len(set(headers))) + + def test_translate_93125_2(self): + # Select genomes + genomes = select_genomes_by_ids(['93125.2']).values() + + # Call translate + aafiles = translate.translate_genomes(genomes)[1] + + # Verify no header appears twice + headers = [record.id for record in SeqIO.parse(aafiles[0], 'fasta')] + self.assertEqual(len(headers), len(set(headers)))
9cadf855a4506e29009a910206c6ce213279aafe
tests/test_configuration.py
tests/test_configuration.py
# -*- coding: utf-8 -*- """ test_configuration ~~~~~~~~~~~~~~~~~~ Basic configuration tests """ import base64 import pytest from utils import authenticate, logout @pytest.mark.settings( logout_url='/custom_logout', login_url='/custom_login', post_login_view='/post_login', post_logout_view='/post_logout', default_http_auth_realm='Custom Realm') def test_view_configuration(client): response = client.get('/custom_login') assert b"<h1>Login</h1>" in response.data response = authenticate(client, endpoint='/custom_login', follow_redirects=True) assert b'Post Login' in response.data response = logout(client, endpoint='/custom_logout', follow_redirects=True) assert b'Post Logout' in response.data response = client.get('/http', headers={ 'Authorization': 'Basic %s' % base64.b64encode(b"joe@lp.com:bogus") }) assert b'<h1>Unauthorized</h1>' in response.data assert 'WWW-Authenticate' in response.headers assert 'Basic realm="Custom Realm"' == response.headers['WWW-Authenticate'] @pytest.mark.settings(login_user_template='custom_security/login_user.html') def test_template_configuration(client): response = client.get('/login') assert b'CUSTOM LOGIN USER' in response.data
# -*- coding: utf-8 -*- """ test_configuration ~~~~~~~~~~~~~~~~~~ Basic configuration tests """ import base64 import pytest from utils import authenticate, logout @pytest.mark.settings( logout_url='/custom_logout', login_url='/custom_login', post_login_view='/post_login', post_logout_view='/post_logout', default_http_auth_realm='Custom Realm') def test_view_configuration(client): response = client.get('/custom_login') assert b"<h1>Login</h1>" in response.data response = authenticate(client, endpoint='/custom_login') assert b'location' in response.headers assert response.headers['Location'] == 'http://localhost/post_login' response = logout(client, endpoint='/custom_logout') assert b'location' in response.headers assert response.headers['Location'] == 'http://localhost/post_logout' response = client.get('/http', headers={ 'Authorization': 'Basic %s' % base64.b64encode(b"joe@lp.com:bogus") }) assert b'<h1>Unauthorized</h1>' in response.data assert 'WWW-Authenticate' in response.headers assert 'Basic realm="Custom Realm"' == response.headers['WWW-Authenticate'] @pytest.mark.settings(login_user_template='custom_security/login_user.html') def test_template_configuration(client): response = client.get('/login') assert b'CUSTOM LOGIN USER' in response.data
Adjust POST_LOGIN_VIEW and POST_LOGOUT_VIEW test
Adjust POST_LOGIN_VIEW and POST_LOGOUT_VIEW test
Python
mit
tatataufik/flask-security,quokkaproject/flask-security,wjt/flask-security,mik3cap/private-flask-security,dlakata/flask-security,jonafato/flask-security,nfvs/flask-security,themylogin/flask-security,CodeSolid/flask-security,GregoryVigoTorres/flask-security,inveniosoftware/flask-security-fork,fuhrysteve/flask-security,Samael500/flask-security,inveniosoftware/flask-security-fork,jonafato/flask-security,felix1m/flask-security,mik3cap/private-flask-security,simright/flask-security,covertgeek/flask-security,LeonhardPrintz/flask-security-fork,wjt/flask-security,mattupstate/flask-security,asmodehn/flask-security,simright/flask-security,pawl/flask-security,dlakata/flask-security,dommert/flask-security,CodeSolid/flask-security,LeonhardPrintz/flask-security-fork,guoqiao/flask-security,redpandalabs/flask-security,x5a/flask-security,fmerges/flask-security,LeonhardPrintz/flask-security-fork,dommert/flask-security,Samael500/flask-security,redpandalabs/flask-security,pawl/flask-security,x5a/flask-security,a-pertsev/flask-security,quokkaproject/flask-security,tatataufik/flask-security,yingbo/flask-security,mafrosis/flask-security,inveniosoftware/flask-security-fork,guoqiao/flask-security,felix1m/flask-security,fmerges/flask-security,themylogin/flask-security,yingbo/flask-security,reustle/flask-security,reustle/flask-security,nfvs/flask-security,mafrosis/flask-security,a-pertsev/flask-security,mattupstate/flask-security,asmodehn/flask-security,fuhrysteve/flask-security,GregoryVigoTorres/flask-security,covertgeek/flask-security
--- +++ @@ -23,11 +23,13 @@ response = client.get('/custom_login') assert b"<h1>Login</h1>" in response.data - response = authenticate(client, endpoint='/custom_login', follow_redirects=True) - assert b'Post Login' in response.data + response = authenticate(client, endpoint='/custom_login') + assert b'location' in response.headers + assert response.headers['Location'] == 'http://localhost/post_login' - response = logout(client, endpoint='/custom_logout', follow_redirects=True) - assert b'Post Logout' in response.data + response = logout(client, endpoint='/custom_logout') + assert b'location' in response.headers + assert response.headers['Location'] == 'http://localhost/post_logout' response = client.get('/http', headers={ 'Authorization': 'Basic %s' % base64.b64encode(b"joe@lp.com:bogus")
52bfbea4e2cb17268349b61c7f00b9253755e74d
example/books/models.py
example/books/models.py
from __future__ import unicode_literals from django.core.urlresolvers import reverse from django.db import models import generic_scaffold class Book(models.Model): title = models.CharField(max_length=128) author = models.CharField(max_length=128) category = models.CharField(max_length=32) def get_absolute_url(self): return reverse(self.detail_url_name, args=[self.id])
from __future__ import unicode_literals try: from django.core.urlresolvers import reverse except ModuleNotFoundError: from django.urls import reverse from django.db import models import generic_scaffold class Book(models.Model): title = models.CharField(max_length=128) author = models.CharField(max_length=128) category = models.CharField(max_length=32) def get_absolute_url(self): return reverse(self.detail_url_name, args=[self.id]) def __str__(self): return '{0} {1} {2}'.format(self.title, self.author, self.category)
Add support for django 2 to example project
Add support for django 2 to example project
Python
mit
spapas/django-generic-scaffold,spapas/django-generic-scaffold
--- +++ @@ -1,6 +1,10 @@ from __future__ import unicode_literals -from django.core.urlresolvers import reverse +try: + from django.core.urlresolvers import reverse +except ModuleNotFoundError: + from django.urls import reverse + from django.db import models import generic_scaffold @@ -11,3 +15,6 @@ def get_absolute_url(self): return reverse(self.detail_url_name, args=[self.id]) + + def __str__(self): + return '{0} {1} {2}'.format(self.title, self.author, self.category)
50d9c1494c5f14ccc7cb7fa32979e11e19ee1eb8
utils/etc.py
utils/etc.py
def reverse_insort(seq, val, lo=0, hi=None): if hi is None: hi = len(seq) while lo < hi: mid = (lo + hi) // 2 if val > seq[mid]: hi = mid else: lo = mid + 1 seq.insert(lo, val) def default_channel(member): return next((channel for channel in member.guild.text_channels if channel.permissions_for(member).read_messages), None)
def reverse_insort(seq, val, lo=0, hi=None): if hi is None: hi = len(seq) while lo < hi: mid = (lo + hi) // 2 if val > seq[mid]: hi = mid else: lo = mid + 1 seq.insert(lo, val) def default_channel(member): return next((channel for channel in member.guild.text_channels if channel.permissions_for(member).send_messages), None)
Change default channel to send_messages
Change default channel to send_messages
Python
mit
BeatButton/beattie,BeatButton/beattie-bot
--- +++ @@ -12,4 +12,4 @@ def default_channel(member): return next((channel for channel in member.guild.text_channels - if channel.permissions_for(member).read_messages), None) + if channel.permissions_for(member).send_messages), None)
97811dc9b81d84ae1c074be00ebea1dac8c7f2fc
signac/gui/__init__.py
signac/gui/__init__.py
# Copyright (c) 2016 The Regents of the University of Michigan # All rights reserved. # This software is licensed under the BSD 3-Clause License. """Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import logging logger = logging.getLogger(__name__) try: import PySide # noqa import pymongo # noqa except ImportError as error: logger.debug("{}. The signac gui is not available.".format(error)) def main(): """Start signac-gui. The gui requires PySide and pymongo.""" raise ImportError(msg) else: from .gui import main __all__ = ['main']
# Copyright (c) 2016 The Regents of the University of Michigan # All rights reserved. # This software is licensed under the BSD 3-Clause License. """Graphical User Interface (GUI) for configuration and database inspection. The GUI is a leight-weight interface which makes the configuration of the signac framework and data inspection more straight-forward.""" import logging logger = logging.getLogger(__name__) try: import PySide # noqa import pymongo # noqa except Exception as error: msg = 'The signac gui is not available, because of an error: "{}".' logger.debug(msg.format(error)) def main(): """Start signac-gui. The gui requires PySide and pymongo.""" import PySide # noqa import pymongo # noqa else: from .gui import main __all__ = ['main']
Make package more robust against PySide import errors.
Make package more robust against PySide import errors. A bad PySide install may lead to exceptions other than ImportErrors on import. In this case any attempt to import the package will fail, even when the GUI is not even used. Any error occuring during the attempted import of the PySide package will now be logged to the debug stream, but otherwise ignored. Another attempt on import the PySide package will be made when the user actually starts the GUI.
Python
bsd-3-clause
csadorf/signac,csadorf/signac
--- +++ @@ -12,14 +12,16 @@ try: import PySide # noqa import pymongo # noqa -except ImportError as error: - logger.debug("{}. The signac gui is not available.".format(error)) +except Exception as error: + msg = 'The signac gui is not available, because of an error: "{}".' + logger.debug(msg.format(error)) def main(): """Start signac-gui. The gui requires PySide and pymongo.""" - raise ImportError(msg) + import PySide # noqa + import pymongo # noqa else: from .gui import main
e9c83d59fbb5b341e2126039109e306875db0490
syweb/__init__.py
syweb/__init__.py
import os with open(os.path.join(os.path.dirname(__file__), "webclient/VERSION")) as f: __version__ = f.read().strip()
import os def installed_location(): return __file__ with open(os.path.join(os.path.dirname(installed_location()), "webclient/VERSION")) as f: __version__ = f.read().strip()
Add an 'installed_location()' function so syweb can report its own location
Add an 'installed_location()' function so syweb can report its own location
Python
apache-2.0
williamboman/matrix-angular-sdk,williamboman/matrix-angular-sdk,matrix-org/matrix-angular-sdk,williamboman/matrix-angular-sdk,matrix-org/matrix-angular-sdk,matrix-org/matrix-angular-sdk
--- +++ @@ -1,4 +1,7 @@ import os -with open(os.path.join(os.path.dirname(__file__), "webclient/VERSION")) as f: +def installed_location(): + return __file__ + +with open(os.path.join(os.path.dirname(installed_location()), "webclient/VERSION")) as f: __version__ = f.read().strip()
0a779f17e19f18c8f7e734e7e61367712fe9e52a
examples/worker_rush.py
examples/worker_rush.py
import sc2 from sc2 import run_game, maps, Race, Difficulty from sc2.player import Bot, Computer class WorkerRushBot(sc2.BotAI): async def on_step(self, iteration): if iteration == 0: for worker in self.workers: await self.do(worker.attack(self.enemy_start_locations[0])) def main(): run_game(maps.get("Abyssal Reef LE"), [ Bot(Race.Zerg, WorkerRushBot()), Computer(Race.Protoss, Difficulty.Medium) ], realtime=True) if __name__ == '__main__': main()
from sc2 import run_game, maps, Race, Difficulty, BotAI from sc2.player import Bot, Computer class WorkerRushBot(BotAI): def __init__(self): super().__init__() self.actions = [] async def on_step(self, iteration): self.actions = [] if iteration == 0: target = self.enemy_start_locations[0] for worker in self.workers: self.actions.append(worker.attack(target)) await self.do_actions(self.actions) def main(): run_game(maps.get("Abyssal Reef LE"), [ Bot(Race.Zerg, WorkerRushBot()), Computer(Race.Protoss, Difficulty.Medium) ], realtime=True) if __name__ == '__main__': main()
Use do_actions() instead of do() in WorkerRushBot
Use do_actions() instead of do() in WorkerRushBot
Python
mit
Dentosal/python-sc2
--- +++ @@ -1,12 +1,21 @@ -import sc2 -from sc2 import run_game, maps, Race, Difficulty +from sc2 import run_game, maps, Race, Difficulty, BotAI from sc2.player import Bot, Computer -class WorkerRushBot(sc2.BotAI): +class WorkerRushBot(BotAI): + def __init__(self): + super().__init__() + self.actions = [] + async def on_step(self, iteration): + self.actions = [] + if iteration == 0: + target = self.enemy_start_locations[0] + for worker in self.workers: - await self.do(worker.attack(self.enemy_start_locations[0])) + self.actions.append(worker.attack(target)) + + await self.do_actions(self.actions) def main(): run_game(maps.get("Abyssal Reef LE"), [
258df4932fe937c0baf45d30de88c194f7f7718a
conftest.py
conftest.py
import numba import numpy import pkg_resources import pytest # The first version of numpy that broke backwards compat and improved printing. # # We set the printing format to legacy to maintain our doctests' compatibility # with both newer and older versions. # # See: https://docs.scipy.org/doc/numpy/release.html#many-changes-to-array-printing-disableable-with-the-new-legacy-printing-mode # NUMPY_PRINT_ALTERING_VERSION = pkg_resources.parse_version('1.14.0') @pytest.fixture(autouse=True) def add_preconfigured_np(doctest_namespace): """ Fixture executed for every doctest. Injects pre-configured numpy into each test's namespace. Note that even with this, doctests might fail due to the lack of full compatibility when using ``numpy.set_printoptions(legacy='1.13')``. Some of the whitespace issues can be fixed by ``NORMALIZE_WHITESPACE`` doctest option, which is currently set in ``pytest.ini``. See: https://github.com/numpy/numpy/issues/10383 """ current_version = pkg_resources.parse_version(numpy.__version__) if current_version >= NUMPY_PRINT_ALTERING_VERSION: numpy.set_printoptions(legacy='1.13') doctest_namespace['np'] = numpy def pytest_report_header(config): return 'Testing fastats using: NumPy {}, numba {}'.format( numpy.__version__, numba.__version__ )
import numba import numpy import pkg_resources import pytest import scipy # The first version of numpy that broke backwards compat and improved printing. # # We set the printing format to legacy to maintain our doctests' compatibility # with both newer and older versions. # # See: https://docs.scipy.org/doc/numpy/release.html#many-changes-to-array-printing-disableable-with-the-new-legacy-printing-mode # NUMPY_PRINT_ALTERING_VERSION = pkg_resources.parse_version('1.14.0') @pytest.fixture(autouse=True) def add_preconfigured_np(doctest_namespace): """ Fixture executed for every doctest. Injects pre-configured numpy into each test's namespace. Note that even with this, doctests might fail due to the lack of full compatibility when using ``numpy.set_printoptions(legacy='1.13')``. Some of the whitespace issues can be fixed by ``NORMALIZE_WHITESPACE`` doctest option, which is currently set in ``pytest.ini``. See: https://github.com/numpy/numpy/issues/10383 """ current_version = pkg_resources.parse_version(numpy.__version__) if current_version >= NUMPY_PRINT_ALTERING_VERSION: numpy.set_printoptions(legacy='1.13') doctest_namespace['np'] = numpy def pytest_report_header(config): return 'Testing fastats using: Numba {}, NumPy {}, SciPy {}'.format( numba.__version__, numpy.__version__, scipy.__version__, )
Add SciPy version to pytest header
Add SciPy version to pytest header
Python
mit
dwillmer/fastats,fastats/fastats
--- +++ @@ -3,6 +3,7 @@ import numpy import pkg_resources import pytest +import scipy # The first version of numpy that broke backwards compat and improved printing. @@ -39,6 +40,6 @@ def pytest_report_header(config): - return 'Testing fastats using: NumPy {}, numba {}'.format( - numpy.__version__, numba.__version__ + return 'Testing fastats using: Numba {}, NumPy {}, SciPy {}'.format( + numba.__version__, numpy.__version__, scipy.__version__, )
50dea10e4b0dfac459a2e4229cfe2ccbe3500b11
poradnia/config/local.py
poradnia/config/local.py
# -*- coding: utf-8 -*- ''' Local Configurations - Runs in Debug mode - Uses console backend for emails - Use Django Debug Toolbar ''' from configurations import values from .common import Common class Local(Common): # DEBUG DEBUG = values.BooleanValue(True) TEMPLATE_DEBUG = DEBUG # END DEBUG # INSTALLED_APPS INSTALLED_APPS = Common.INSTALLED_APPS # END INSTALLED_APPS # Mail settings EMAIL_HOST = "localhost" EMAIL_PORT = 1025 EMAIL_BACKEND = values.Value('django.core.mail.backends.console.EmailBackend') # End mail settings # django-debug-toolbar MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + ('debug_toolbar.middleware.DebugToolbarMiddleware',) INSTALLED_APPS += ('debug_toolbar', 'django_extensions', 'autofixture',) INTERNAL_IPS = ('127.0.0.1',) DEBUG_TOOLBAR_CONFIG = { 'DISABLE_PANELS': [ 'debug_toolbar.panels.redirects.RedirectsPanel', ], 'SHOW_TEMPLATE_CONTEXT': True, } # end django-debug-toolbar # Your local stuff: Below this line define 3rd party libary settings
# -*- coding: utf-8 -*- ''' Local Configurations - Runs in Debug mode - Uses console backend for emails - Use Django Debug Toolbar ''' from configurations import values from .common import Common class Local(Common): # DEBUG DEBUG = values.BooleanValue(True) TEMPLATE_DEBUG = DEBUG # END DEBUG # INSTALLED_APPS INSTALLED_APPS = Common.INSTALLED_APPS # END INSTALLED_APPS # Mail settings EMAIL_HOST = "localhost" EMAIL_PORT = 1025 EMAIL_BACKEND = values.Value('django.core.mail.backends.console.EmailBackend') # End mail settings # django-debug-toolbar MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + ('debug_toolbar.middleware.DebugToolbarMiddleware',) INSTALLED_APPS += ('debug_toolbar', 'django_extensions', 'autofixture',) INTERNAL_IPS = ('127.0.0.1', '10.0.2.2', ) DEBUG_TOOLBAR_CONFIG = { 'DISABLE_PANELS': [ 'debug_toolbar.panels.redirects.RedirectsPanel', ], 'SHOW_TEMPLATE_CONTEXT': True, } # end django-debug-toolbar # Your local stuff: Below this line define 3rd party libary settings
Add Virtualbox's/Vagrant's IP to Internal IP
Add Virtualbox's/Vagrant's IP to Internal IP
Python
mit
watchdogpolska/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,rwakulszowa/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,rwakulszowa/poradnia,watchdogpolska/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,rwakulszowa/poradnia,watchdogpolska/poradnia
--- +++ @@ -31,7 +31,7 @@ MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + ('debug_toolbar.middleware.DebugToolbarMiddleware',) INSTALLED_APPS += ('debug_toolbar', 'django_extensions', 'autofixture',) - INTERNAL_IPS = ('127.0.0.1',) + INTERNAL_IPS = ('127.0.0.1', '10.0.2.2', ) DEBUG_TOOLBAR_CONFIG = { 'DISABLE_PANELS': [
ccb021e4f672b02d63236207573cc5f7746012e2
apps/uploads/management/commands/process_uploads.py
apps/uploads/management/commands/process_uploads.py
import logging LOGGER = logging.getLogger('apps.uploads') from django.core.management.base import BaseCommand, CommandError from apps.uploads.models import DropboxUploadFile, ManualUploadFile class Command(BaseCommand): help = """Regular run of new dropbox links: manage.py process_uploads """ def handle(self, **options): for cls in [DropboxUploadFile, ManualUploadFile]: for d_file in cls.objects.filter(retrieval_start__isnull=True): print " + Downloading: %s" % d_file.url d_file.download_now() if d_file.retrieval_error: print " ! Error downloading"
"""Download from urls any uploads from outside sources""" import logging from django.utils.timezone import now from django.core.management.base import BaseCommand, CommandError from apps.uploads.models import DropboxUploadFile, ManualUploadFile, ResumableUploadFile LOGGER = logging.getLogger('apps.uploads') class Command(BaseCommand): """Run command for uploads""" help = __doc__ + """: manage.py process_uploads """ @staticmethod def handle(**_): """Handle script call""" for cls in (DropboxUploadFile, ManualUploadFile): for d_file in cls.objects.filter(retrieval_start__isnull=True): print " + Downloading: %s" % d_file.url d_file.download_now() if d_file.retrieval_error: print " ! Error downloading" count = ResumableUploadFile.objects.filter(retrieval_start__isnull=True).update( retrieval_error='Retry resumable upload, can not happen in server.', retrieval_start=now(), ) if count: print(" * Resumable uploads marked as impossible: {}".format(count))
Mark resuable uploads as broken if they are
Mark resuable uploads as broken if they are
Python
agpl-3.0
IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site
--- +++ @@ -1,21 +1,31 @@ +"""Download from urls any uploads from outside sources""" +import logging -import logging +from django.utils.timezone import now +from django.core.management.base import BaseCommand, CommandError +from apps.uploads.models import DropboxUploadFile, ManualUploadFile, ResumableUploadFile + LOGGER = logging.getLogger('apps.uploads') -from django.core.management.base import BaseCommand, CommandError -from apps.uploads.models import DropboxUploadFile, ManualUploadFile - class Command(BaseCommand): - help = """Regular run of new dropbox links: + """Run command for uploads""" + help = __doc__ + """: manage.py process_uploads """ - def handle(self, **options): - for cls in [DropboxUploadFile, ManualUploadFile]: + @staticmethod + def handle(**_): + """Handle script call""" + for cls in (DropboxUploadFile, ManualUploadFile): for d_file in cls.objects.filter(retrieval_start__isnull=True): print " + Downloading: %s" % d_file.url d_file.download_now() if d_file.retrieval_error: print " ! Error downloading" - + count = ResumableUploadFile.objects.filter(retrieval_start__isnull=True).update( + retrieval_error='Retry resumable upload, can not happen in server.', + retrieval_start=now(), + ) + if count: + print(" * Resumable uploads marked as impossible: {}".format(count))
21ab4cb4bb50acd7598b09ebceec20c7302061da
scikits/learn/datasets/tests/test_20news.py
scikits/learn/datasets/tests/test_20news.py
"""Test the 20news downloader, if the data is available.""" import numpy as np from nose.tools import assert_equal from nose.tools import assert_true from nose.plugins.skip import SkipTest from scikits.learn import datasets def test_20news(): try: data = datasets.fetch_20newsgroups(subset='all', download_if_missing=False, shuffle=False) except IOError: raise SkipTest("Download 20 newsgroups to run this test") # Extract a reduced dataset data2cats = datasets.fetch_20newsgroups(subset='all', categories=data.target_names[-1:-3:-1], shuffle=False) # Check that the ordering of the target_names is the same # as the ordering in the full dataset assert_equal(data2cats.target_names, data.target_names[-2:]) # Assert that we have only 0 and 1 as labels assert_equal(np.unique(data2cats.target).tolist(), [0, 1]) # Check that the first entry of the reduced dataset corresponds to # the first entry of the corresponding category in the full dataset entry1 = data2cats.data[0] category = data2cats.target_names[data2cats.target[0]] label = data.target_names.index(category) entry2 = data.data[np.where(data.target == label)[0][0]] assert_equal(entry1, entry2) # check that the filenames are available too assert_true(data.filenames[0].endswith( "20news_home/20news-bydate-test/talk.politics.mideast/76560"))
"""Test the 20news downloader, if the data is available.""" import numpy as np from nose.tools import assert_equal from nose.plugins.skip import SkipTest from scikits.learn import datasets def test_20news(): try: data = datasets.fetch_20newsgroups(subset='all', download_if_missing=False, shuffle=False) except IOError: raise SkipTest("Download 20 newsgroups to run this test") # Extract a reduced dataset data2cats = datasets.fetch_20newsgroups(subset='all', categories=data.target_names[-1:-3:-1], shuffle=False) # Check that the ordering of the target_names is the same # as the ordering in the full dataset assert_equal(data2cats.target_names, data.target_names[-2:]) # Assert that we have only 0 and 1 as labels assert_equal(np.unique(data2cats.target).tolist(), [0, 1]) # Check that the first entry of the reduced dataset corresponds to # the first entry of the corresponding category in the full dataset entry1 = data2cats.data[0] category = data2cats.target_names[data2cats.target[0]] label = data.target_names.index(category) entry2 = data.data[np.where(data.target == label)[0][0]] assert_equal(entry1, entry2)
Fix a bug introduced in rebasing
BUG: Fix a bug introduced in rebasing
Python
bsd-3-clause
krez13/scikit-learn,ChanderG/scikit-learn,466152112/scikit-learn,ChanChiChoi/scikit-learn,belltailjp/scikit-learn,tdhopper/scikit-learn,krez13/scikit-learn,yyjiang/scikit-learn,schets/scikit-learn,TomDLT/scikit-learn,xubenben/scikit-learn,MohammedWasim/scikit-learn,0x0all/scikit-learn,mblondel/scikit-learn,aewhatley/scikit-learn,spallavolu/scikit-learn,abimannans/scikit-learn,yask123/scikit-learn,henrykironde/scikit-learn,sergeyf/scikit-learn,moutai/scikit-learn,amueller/scikit-learn,ankurankan/scikit-learn,kmike/scikit-learn,marcocaccin/scikit-learn,bhargav/scikit-learn,vshtanko/scikit-learn,ahoyosid/scikit-learn,lbishal/scikit-learn,mattilyra/scikit-learn,anntzer/scikit-learn,jseabold/scikit-learn,AnasGhrab/scikit-learn,glouppe/scikit-learn,pypot/scikit-learn,pompiduskus/scikit-learn,olologin/scikit-learn,aetilley/scikit-learn,mojoboss/scikit-learn,btabibian/scikit-learn,PrashntS/scikit-learn,thilbern/scikit-learn,macks22/scikit-learn,ishanic/scikit-learn,PatrickOReilly/scikit-learn,toastedcornflakes/scikit-learn,vshtanko/scikit-learn,ngoix/OCRF,kagayakidan/scikit-learn,russel1237/scikit-learn,mfjb/scikit-learn,sergeyf/scikit-learn,voxlol/scikit-learn,liberatorqjw/scikit-learn,ogrisel/scikit-learn,shenzebang/scikit-learn,shikhardb/scikit-learn,shahankhatch/scikit-learn,UNR-AERIAL/scikit-learn,heli522/scikit-learn,sumspr/scikit-learn,tdhopper/scikit-learn,hlin117/scikit-learn,frank-tancf/scikit-learn,mugizico/scikit-learn,petosegan/scikit-learn,wlamond/scikit-learn,stylianos-kampakis/scikit-learn,pythonvietnam/scikit-learn,B3AU/waveTree,joshloyal/scikit-learn,manashmndl/scikit-learn,hrjn/scikit-learn,thientu/scikit-learn,zihua/scikit-learn,aflaxman/scikit-learn,huobaowangxi/scikit-learn,dhruv13J/scikit-learn,nrhine1/scikit-learn,btabibian/scikit-learn,spallavolu/scikit-learn,glemaitre/scikit-learn,trankmichael/scikit-learn,zhenv5/scikit-learn,nomadcube/scikit-learn,xzh86/scikit-learn,henridwyer/scikit-learn,altairpearl/scikit-learn,LiaoPan/scikit-learn,thilbern/scikit-learn,DSLituiev/scikit-learn,kjung/scikit-learn,lucidfrontier45/scikit-learn,etkirsch/scikit-learn,h2educ/scikit-learn,rvraghav93/scikit-learn,yunfeilu/scikit-learn,clemkoa/scikit-learn,manhhomienbienthuy/scikit-learn,Srisai85/scikit-learn,AlexRobson/scikit-learn,yask123/scikit-learn,AIML/scikit-learn,ndingwall/scikit-learn,jmetzen/scikit-learn,r-mart/scikit-learn,shahankhatch/scikit-learn,shyamalschandra/scikit-learn,ilyes14/scikit-learn,toastedcornflakes/scikit-learn,vigilv/scikit-learn,poryfly/scikit-learn,mojoboss/scikit-learn,fbagirov/scikit-learn,lbishal/scikit-learn,IndraVikas/scikit-learn,arahuja/scikit-learn,anurag313/scikit-learn,mblondel/scikit-learn,tawsifkhan/scikit-learn,chrisburr/scikit-learn,tosolveit/scikit-learn,larsmans/scikit-learn,jorik041/scikit-learn,Vimos/scikit-learn,jorge2703/scikit-learn,moutai/scikit-learn,alexeyum/scikit-learn,toastedcornflakes/scikit-learn,schets/scikit-learn,nhejazi/scikit-learn,billy-inn/scikit-learn,evgchz/scikit-learn,samuel1208/scikit-learn,jmschrei/scikit-learn,procoder317/scikit-learn,manhhomienbienthuy/scikit-learn,huobaowangxi/scikit-learn,icdishb/scikit-learn,bnaul/scikit-learn,voxlol/scikit-learn,depet/scikit-learn,anirudhjayaraman/scikit-learn,samzhang111/scikit-learn,rahuldhote/scikit-learn,Nyker510/scikit-learn,liangz0707/scikit-learn,iismd17/scikit-learn,lucidfrontier45/scikit-learn,lenovor/scikit-learn,heli522/scikit-learn,frank-tancf/scikit-learn,aewhatley/scikit-learn,ishanic/scikit-learn,robbymeals/scikit-learn,JsNoNo/scikit-learn,jaidevd/scikit-learn,waterponey/scikit-learn,PatrickOReilly/scikit-learn,voxlol/scikit-learn,yanlend/scikit-learn,kagayakidan/scikit-learn,wzbozon/scikit-learn,jlegendary/scikit-learn,stylianos-kampakis/scikit-learn,abimannans/scikit-learn,Akshay0724/scikit-learn,sumspr/scikit-learn,loli/semisupervisedforests,IshankGulati/scikit-learn,ElDeveloper/scikit-learn,jorik041/scikit-learn,mehdidc/scikit-learn,JPFrancoia/scikit-learn,scikit-learn/scikit-learn,vshtanko/scikit-learn,trungnt13/scikit-learn,mjgrav2001/scikit-learn,jayflo/scikit-learn,Nyker510/scikit-learn,q1ang/scikit-learn,Djabbz/scikit-learn,zorojean/scikit-learn,tomlof/scikit-learn,icdishb/scikit-learn,jlegendary/scikit-learn,cainiaocome/scikit-learn,eickenberg/scikit-learn,andrewnc/scikit-learn,RPGOne/scikit-learn,shikhardb/scikit-learn,wlamond/scikit-learn,sanketloke/scikit-learn,marcocaccin/scikit-learn,JosmanPS/scikit-learn,jmetzen/scikit-learn,UNR-AERIAL/scikit-learn,jkarnows/scikit-learn,mwv/scikit-learn,iismd17/scikit-learn,nvoron23/scikit-learn,jblackburne/scikit-learn,xubenben/scikit-learn,gclenaghan/scikit-learn,andrewnc/scikit-learn,shahankhatch/scikit-learn,sgenoud/scikit-learn,liberatorqjw/scikit-learn,wanggang3333/scikit-learn,jm-begon/scikit-learn,ephes/scikit-learn,alexsavio/scikit-learn,nesterione/scikit-learn,scikit-learn/scikit-learn,0x0all/scikit-learn,rexshihaoren/scikit-learn,kagayakidan/scikit-learn,jorge2703/scikit-learn,Obus/scikit-learn,eg-zhang/scikit-learn,shyamalschandra/scikit-learn,samuel1208/scikit-learn,luo66/scikit-learn,jblackburne/scikit-learn,murali-munna/scikit-learn,rohanp/scikit-learn,JosmanPS/scikit-learn,pypot/scikit-learn,alvarofierroclavero/scikit-learn,nesterione/scikit-learn,plissonf/scikit-learn,jpautom/scikit-learn,tmhm/scikit-learn,zihua/scikit-learn,thientu/scikit-learn,zuku1985/scikit-learn,ilo10/scikit-learn,shahankhatch/scikit-learn,ClimbsRocks/scikit-learn,mjudsp/Tsallis,f3r/scikit-learn,mhdella/scikit-learn,RachitKansal/scikit-learn,DSLituiev/scikit-learn,arabenjamin/scikit-learn,DonBeo/scikit-learn,gclenaghan/scikit-learn,stylianos-kampakis/scikit-learn,PrashntS/scikit-learn,samuel1208/scikit-learn,rahuldhote/scikit-learn,0x0all/scikit-learn,zuku1985/scikit-learn,simon-pepin/scikit-learn,ycaihua/scikit-learn,sgenoud/scikit-learn,mikebenfield/scikit-learn,JPFrancoia/scikit-learn,kylerbrown/scikit-learn,Garrett-R/scikit-learn,amueller/scikit-learn,chrisburr/scikit-learn,loli/semisupervisedforests,glemaitre/scikit-learn,mojoboss/scikit-learn,kashif/scikit-learn,hainm/scikit-learn,jlegendary/scikit-learn,sonnyhu/scikit-learn,Achuth17/scikit-learn,xuewei4d/scikit-learn,qifeigit/scikit-learn,akionakamura/scikit-learn,eickenberg/scikit-learn,wazeerzulfikar/scikit-learn,zorroblue/scikit-learn,h2educ/scikit-learn,mhdella/scikit-learn,Vimos/scikit-learn,billy-inn/scikit-learn,JeanKossaifi/scikit-learn,abhishekgahlot/scikit-learn,maheshakya/scikit-learn,mayblue9/scikit-learn,ycaihua/scikit-learn,yyjiang/scikit-learn,xyguo/scikit-learn,fabianp/scikit-learn,vermouthmjl/scikit-learn,arabenjamin/scikit-learn,dingocuster/scikit-learn,abhishekkrthakur/scikit-learn,phdowling/scikit-learn,ZENGXH/scikit-learn,manashmndl/scikit-learn,maheshakya/scikit-learn,dhruv13J/scikit-learn,depet/scikit-learn,hsiaoyi0504/scikit-learn,yunfeilu/scikit-learn,cwu2011/scikit-learn,LiaoPan/scikit-learn,dsquareindia/scikit-learn,betatim/scikit-learn,pratapvardhan/scikit-learn,akionakamura/scikit-learn,bhargav/scikit-learn,NelisVerhoef/scikit-learn,harshaneelhg/scikit-learn,jjx02230808/project0223,ky822/scikit-learn,jjx02230808/project0223,ZENGXH/scikit-learn,stylianos-kampakis/scikit-learn,moutai/scikit-learn,kaichogami/scikit-learn,marcocaccin/scikit-learn,ycaihua/scikit-learn,altairpearl/scikit-learn,mlyundin/scikit-learn,joernhees/scikit-learn,terkkila/scikit-learn,LohithBlaze/scikit-learn,PatrickChrist/scikit-learn,joernhees/scikit-learn,kagayakidan/scikit-learn,shangwuhencc/scikit-learn,wazeerzulfikar/scikit-learn,pianomania/scikit-learn,Garrett-R/scikit-learn,mikebenfield/scikit-learn,fengzhyuan/scikit-learn,herilalaina/scikit-learn,jblackburne/scikit-learn,akionakamura/scikit-learn,treycausey/scikit-learn,arahuja/scikit-learn,petosegan/scikit-learn,ChanderG/scikit-learn,mrshu/scikit-learn,thilbern/scikit-learn,Lawrence-Liu/scikit-learn,dingocuster/scikit-learn,pianomania/scikit-learn,xyguo/scikit-learn,robbymeals/scikit-learn,PrashntS/scikit-learn,imaculate/scikit-learn,alvarofierroclavero/scikit-learn,huzq/scikit-learn,3manuek/scikit-learn,mattgiguere/scikit-learn,Lawrence-Liu/scikit-learn,rrohan/scikit-learn,cauchycui/scikit-learn,jakirkham/scikit-learn,h2educ/scikit-learn,Achuth17/scikit-learn,evgchz/scikit-learn,russel1237/scikit-learn,CforED/Machine-Learning,procoder317/scikit-learn,dsquareindia/scikit-learn,OshynSong/scikit-learn,victorbergelin/scikit-learn,ChanderG/scikit-learn,heli522/scikit-learn,mattilyra/scikit-learn,mwv/scikit-learn,Garrett-R/scikit-learn,zorroblue/scikit-learn,rsivapr/scikit-learn,rvraghav93/scikit-learn,mayblue9/scikit-learn,ZENGXH/scikit-learn,IssamLaradji/scikit-learn,waterponey/scikit-learn,wzbozon/scikit-learn,scikit-learn/scikit-learn,tmhm/scikit-learn,deepesch/scikit-learn,mhue/scikit-learn,theoryno3/scikit-learn,jaidevd/scikit-learn,robbymeals/scikit-learn,akionakamura/scikit-learn,themrmax/scikit-learn,RPGOne/scikit-learn,Aasmi/scikit-learn,ilo10/scikit-learn,pratapvardhan/scikit-learn,TomDLT/scikit-learn,espg/scikit-learn,terkkila/scikit-learn,cainiaocome/scikit-learn,alexsavio/scikit-learn,anntzer/scikit-learn,rexshihaoren/scikit-learn,vybstat/scikit-learn,h2educ/scikit-learn,xwolf12/scikit-learn,idlead/scikit-learn,lazywei/scikit-learn,ephes/scikit-learn,nmayorov/scikit-learn,roxyboy/scikit-learn,JsNoNo/scikit-learn,hsiaoyi0504/scikit-learn,LohithBlaze/scikit-learn,rohanp/scikit-learn,plissonf/scikit-learn,xwolf12/scikit-learn,khkaminska/scikit-learn,jkarnows/scikit-learn,Adai0808/scikit-learn,jm-begon/scikit-learn,kevin-intel/scikit-learn,IshankGulati/scikit-learn,glennq/scikit-learn,giorgiop/scikit-learn,michigraber/scikit-learn,rrohan/scikit-learn,Djabbz/scikit-learn,lbishal/scikit-learn,arjoly/scikit-learn,jayflo/scikit-learn,kylerbrown/scikit-learn,sergeyf/scikit-learn,rahuldhote/scikit-learn,rrohan/scikit-learn,wanggang3333/scikit-learn,mehdidc/scikit-learn,florian-f/sklearn,ogrisel/scikit-learn,PatrickOReilly/scikit-learn,hitszxp/scikit-learn,macks22/scikit-learn,PatrickChrist/scikit-learn,phdowling/scikit-learn,mattgiguere/scikit-learn,huzq/scikit-learn,IndraVikas/scikit-learn,vshtanko/scikit-learn,lin-credible/scikit-learn,nmayorov/scikit-learn,elkingtonmcb/scikit-learn,thilbern/scikit-learn,PatrickOReilly/scikit-learn,yunfeilu/scikit-learn,RomainBrault/scikit-learn,nmayorov/scikit-learn,ilo10/scikit-learn,equialgo/scikit-learn,ashhher3/scikit-learn,Sentient07/scikit-learn,gclenaghan/scikit-learn,fyffyt/scikit-learn,anirudhjayaraman/scikit-learn,PatrickChrist/scikit-learn,clemkoa/scikit-learn,jm-begon/scikit-learn,lesteve/scikit-learn,tosolveit/scikit-learn,untom/scikit-learn,DonBeo/scikit-learn,lesteve/scikit-learn,plissonf/scikit-learn,NelisVerhoef/scikit-learn,thientu/scikit-learn,vinayak-mehta/scikit-learn,vortex-ape/scikit-learn,glouppe/scikit-learn,simon-pepin/scikit-learn,jmschrei/scikit-learn,hainm/scikit-learn,AlexRobson/scikit-learn,qifeigit/scikit-learn,nesterione/scikit-learn,aminert/scikit-learn,alexsavio/scikit-learn,macks22/scikit-learn,arjoly/scikit-learn,zorojean/scikit-learn,cauchycui/scikit-learn,manhhomienbienthuy/scikit-learn,OshynSong/scikit-learn,mattgiguere/scikit-learn,Aasmi/scikit-learn,djgagne/scikit-learn,Jimmy-Morzaria/scikit-learn,ogrisel/scikit-learn,kashif/scikit-learn,vybstat/scikit-learn,fyffyt/scikit-learn,IndraVikas/scikit-learn,jm-begon/scikit-learn,f3r/scikit-learn,kjung/scikit-learn,YinongLong/scikit-learn,jpautom/scikit-learn,djgagne/scikit-learn,rahul-c1/scikit-learn,dingocuster/scikit-learn,pianomania/scikit-learn,aabadie/scikit-learn,jakobworldpeace/scikit-learn,mrshu/scikit-learn,tmhm/scikit-learn,AnasGhrab/scikit-learn,Obus/scikit-learn,appapantula/scikit-learn,anirudhjayaraman/scikit-learn,lazywei/scikit-learn,abhishekgahlot/scikit-learn,icdishb/scikit-learn,mjgrav2001/scikit-learn,PrashntS/scikit-learn,moutai/scikit-learn,sarahgrogan/scikit-learn,xuewei4d/scikit-learn,massmutual/scikit-learn,hsuantien/scikit-learn,vibhorag/scikit-learn,pratapvardhan/scikit-learn,vigilv/scikit-learn,chrsrds/scikit-learn,fredhusser/scikit-learn,ilyes14/scikit-learn,ChanderG/scikit-learn,0asa/scikit-learn,vybstat/scikit-learn,murali-munna/scikit-learn,cwu2011/scikit-learn,chrsrds/scikit-learn,devanshdalal/scikit-learn,anirudhjayaraman/scikit-learn,ky822/scikit-learn,RayMick/scikit-learn,tomlof/scikit-learn,etkirsch/scikit-learn,r-mart/scikit-learn,potash/scikit-learn,Clyde-fare/scikit-learn,manashmndl/scikit-learn,equialgo/scikit-learn,maheshakya/scikit-learn,saiwing-yeung/scikit-learn,MechCoder/scikit-learn,HolgerPeters/scikit-learn,JPFrancoia/scikit-learn,cainiaocome/scikit-learn,vinayak-mehta/scikit-learn,evgchz/scikit-learn,rohanp/scikit-learn,NunoEdgarGub1/scikit-learn,russel1237/scikit-learn,Akshay0724/scikit-learn,cl4rke/scikit-learn,vortex-ape/scikit-learn,yask123/scikit-learn,betatim/scikit-learn,xiaoxiamii/scikit-learn,theoryno3/scikit-learn,espg/scikit-learn,MohammedWasim/scikit-learn,ZenDevelopmentSystems/scikit-learn,Fireblend/scikit-learn,Srisai85/scikit-learn,RayMick/scikit-learn,terkkila/scikit-learn,lesteve/scikit-learn,sarahgrogan/scikit-learn,0asa/scikit-learn,elkingtonmcb/scikit-learn,samzhang111/scikit-learn,appapantula/scikit-learn,vinayak-mehta/scikit-learn,hainm/scikit-learn,sgenoud/scikit-learn,aflaxman/scikit-learn,mehdidc/scikit-learn,ankurankan/scikit-learn,luo66/scikit-learn,rsivapr/scikit-learn,themrmax/scikit-learn,zaxtax/scikit-learn,vigilv/scikit-learn,466152112/scikit-learn,abimannans/scikit-learn,Akshay0724/scikit-learn,shusenl/scikit-learn,sergeyf/scikit-learn,roxyboy/scikit-learn,jaidevd/scikit-learn,CVML/scikit-learn,belltailjp/scikit-learn,fbagirov/scikit-learn,cl4rke/scikit-learn,fzalkow/scikit-learn,NelisVerhoef/scikit-learn,ishanic/scikit-learn,trankmichael/scikit-learn,hdmetor/scikit-learn,massmutual/scikit-learn,LohithBlaze/scikit-learn,etkirsch/scikit-learn,rsivapr/scikit-learn,fredhusser/scikit-learn,rajat1994/scikit-learn,pypot/scikit-learn,gclenaghan/scikit-learn,f3r/scikit-learn,pythonvietnam/scikit-learn,hdmetor/scikit-learn,abhishekgahlot/scikit-learn,tawsifkhan/scikit-learn,fredhusser/scikit-learn,ishanic/scikit-learn,harshaneelhg/scikit-learn,wazeerzulfikar/scikit-learn,trankmichael/scikit-learn,alexsavio/scikit-learn,Fireblend/scikit-learn,treycausey/scikit-learn,idlead/scikit-learn,NelisVerhoef/scikit-learn,AlexanderFabisch/scikit-learn,jzt5132/scikit-learn,nrhine1/scikit-learn,mhue/scikit-learn,anurag313/scikit-learn,victorbergelin/scikit-learn,zhenv5/scikit-learn,eg-zhang/scikit-learn,xyguo/scikit-learn,walterreade/scikit-learn,cybernet14/scikit-learn,pv/scikit-learn,Sentient07/scikit-learn,smartscheduling/scikit-learn-categorical-tree,trungnt13/scikit-learn,JeanKossaifi/scikit-learn,DSLituiev/scikit-learn,HolgerPeters/scikit-learn,yonglehou/scikit-learn,adamgreenhall/scikit-learn,nvoron23/scikit-learn,andaag/scikit-learn,larsmans/scikit-learn,hlin117/scikit-learn,qifeigit/scikit-learn,huobaowangxi/scikit-learn,larsmans/scikit-learn,sonnyhu/scikit-learn,ankurankan/scikit-learn,elkingtonmcb/scikit-learn,Clyde-fare/scikit-learn,3manuek/scikit-learn,AlexRobson/scikit-learn,eickenberg/scikit-learn,arahuja/scikit-learn,gotomypc/scikit-learn,murali-munna/scikit-learn,abhishekgahlot/scikit-learn,cwu2011/scikit-learn,lesteve/scikit-learn,ClimbsRocks/scikit-learn,fengzhyuan/scikit-learn,nelson-liu/scikit-learn,jorik041/scikit-learn,Srisai85/scikit-learn,ChanChiChoi/scikit-learn,siutanwong/scikit-learn,0asa/scikit-learn,MartinSavc/scikit-learn,beepee14/scikit-learn,glennq/scikit-learn,shusenl/scikit-learn,tawsifkhan/scikit-learn,equialgo/scikit-learn,NunoEdgarGub1/scikit-learn,macks22/scikit-learn,mwv/scikit-learn,madjelan/scikit-learn,Aasmi/scikit-learn,ominux/scikit-learn,ahoyosid/scikit-learn,andaag/scikit-learn,shenzebang/scikit-learn,yonglehou/scikit-learn,xubenben/scikit-learn,fengzhyuan/scikit-learn,treycausey/scikit-learn,glouppe/scikit-learn,jereze/scikit-learn,sonnyhu/scikit-learn,arjoly/scikit-learn,glennq/scikit-learn,untom/scikit-learn,AlexandreAbraham/scikit-learn,phdowling/scikit-learn,elkingtonmcb/scikit-learn,giorgiop/scikit-learn,jseabold/scikit-learn,yonglehou/scikit-learn,ningchi/scikit-learn,idlead/scikit-learn,justincassidy/scikit-learn,marcocaccin/scikit-learn,0x0all/scikit-learn,tawsifkhan/scikit-learn,ivannz/scikit-learn,frank-tancf/scikit-learn,pompiduskus/scikit-learn,shenzebang/scikit-learn,ChanChiChoi/scikit-learn,robin-lai/scikit-learn,mlyundin/scikit-learn,ankurankan/scikit-learn,roxyboy/scikit-learn,ssaeger/scikit-learn,fbagirov/scikit-learn,shusenl/scikit-learn,murali-munna/scikit-learn,vortex-ape/scikit-learn,herilalaina/scikit-learn,appapantula/scikit-learn,xzh86/scikit-learn,imaculate/scikit-learn,vinayak-mehta/scikit-learn,mrshu/scikit-learn,IshankGulati/scikit-learn,yonglehou/scikit-learn,khkaminska/scikit-learn,theoryno3/scikit-learn,mjudsp/Tsallis,pkruskal/scikit-learn,schets/scikit-learn,lin-credible/scikit-learn,henridwyer/scikit-learn,mojoboss/scikit-learn,devanshdalal/scikit-learn,harshaneelhg/scikit-learn,pkruskal/scikit-learn,OshynSong/scikit-learn,chrsrds/scikit-learn,bhargav/scikit-learn,0asa/scikit-learn,mxjl620/scikit-learn,Barmaley-exe/scikit-learn,hsuantien/scikit-learn,abimannans/scikit-learn,MartinDelzant/scikit-learn,ltiao/scikit-learn,huobaowangxi/scikit-learn,costypetrisor/scikit-learn,appapantula/scikit-learn,kmike/scikit-learn,JosmanPS/scikit-learn,cdegroc/scikit-learn,chrisburr/scikit-learn,tmhm/scikit-learn,mayblue9/scikit-learn,ldirer/scikit-learn,RachitKansal/scikit-learn,yanlend/scikit-learn,clemkoa/scikit-learn,altairpearl/scikit-learn,MohammedWasim/scikit-learn,liangz0707/scikit-learn,Titan-C/scikit-learn,AlexandreAbraham/scikit-learn,altairpearl/scikit-learn,rahul-c1/scikit-learn,loli/sklearn-ensembletrees,MartinSavc/scikit-learn,jorge2703/scikit-learn,jaidevd/scikit-learn,MatthieuBizien/scikit-learn,joshloyal/scikit-learn,samzhang111/scikit-learn,aetilley/scikit-learn,CforED/Machine-Learning,r-mart/scikit-learn,imaculate/scikit-learn,manashmndl/scikit-learn,lin-credible/scikit-learn,nmayorov/scikit-learn,trankmichael/scikit-learn,anurag313/scikit-learn,nrhine1/scikit-learn,mattilyra/scikit-learn,ashhher3/scikit-learn,Windy-Ground/scikit-learn,mblondel/scikit-learn,rexshihaoren/scikit-learn,q1ang/scikit-learn,dhruv13J/scikit-learn,depet/scikit-learn,nomadcube/scikit-learn,alexeyum/scikit-learn,raghavrv/scikit-learn,waterponey/scikit-learn,sumspr/scikit-learn,jakobworldpeace/scikit-learn,MechCoder/scikit-learn,shangwuhencc/scikit-learn,ElDeveloper/scikit-learn,deepesch/scikit-learn,imaculate/scikit-learn,ngoix/OCRF,PatrickChrist/scikit-learn,mehdidc/scikit-learn,samzhang111/scikit-learn,ZenDevelopmentSystems/scikit-learn,nhejazi/scikit-learn,waterponey/scikit-learn,wzbozon/scikit-learn,jakirkham/scikit-learn,kaichogami/scikit-learn,bthirion/scikit-learn,olologin/scikit-learn,clemkoa/scikit-learn,hrjn/scikit-learn,jjx02230808/project0223,jpautom/scikit-learn,spallavolu/scikit-learn,justincassidy/scikit-learn,bikong2/scikit-learn,schets/scikit-learn,ilyes14/scikit-learn,mattilyra/scikit-learn,petosegan/scikit-learn,dsullivan7/scikit-learn,fbagirov/scikit-learn,xyguo/scikit-learn,madjelan/scikit-learn,florian-f/sklearn,xzh86/scikit-learn,xzh86/scikit-learn,sinhrks/scikit-learn,hlin117/scikit-learn,vivekmishra1991/scikit-learn,JeanKossaifi/scikit-learn,cdegroc/scikit-learn,belltailjp/scikit-learn,rajat1994/scikit-learn,TomDLT/scikit-learn,dsullivan7/scikit-learn,adamgreenhall/scikit-learn,potash/scikit-learn,olologin/scikit-learn,IssamLaradji/scikit-learn,yyjiang/scikit-learn,victorbergelin/scikit-learn,ningchi/scikit-learn,lin-credible/scikit-learn,IssamLaradji/scikit-learn,liyu1990/sklearn,pv/scikit-learn,wanggang3333/scikit-learn,aminert/scikit-learn,dsquareindia/scikit-learn,sgenoud/scikit-learn,jkarnows/scikit-learn,MatthieuBizien/scikit-learn,zuku1985/scikit-learn,wazeerzulfikar/scikit-learn,glouppe/scikit-learn,andrewnc/scikit-learn,LohithBlaze/scikit-learn,JosmanPS/scikit-learn,mhue/scikit-learn,Myasuka/scikit-learn,jjx02230808/project0223,iismd17/scikit-learn,yanlend/scikit-learn,fabianp/scikit-learn,davidgbe/scikit-learn,Nyker510/scikit-learn,madjelan/scikit-learn,zaxtax/scikit-learn,ngoix/OCRF,vibhorag/scikit-learn,untom/scikit-learn,jmetzen/scikit-learn,kashif/scikit-learn,mlyundin/scikit-learn,Obus/scikit-learn,loli/sklearn-ensembletrees,pnedunuri/scikit-learn,loli/sklearn-ensembletrees,Myasuka/scikit-learn,fabioticconi/scikit-learn,btabibian/scikit-learn,andrewnc/scikit-learn,ssaeger/scikit-learn,ephes/scikit-learn,kaichogami/scikit-learn,roxyboy/scikit-learn,devanshdalal/scikit-learn,ldirer/scikit-learn,robin-lai/scikit-learn,BiaDarkia/scikit-learn,ngoix/OCRF,raghavrv/scikit-learn,bigdataelephants/scikit-learn,rrohan/scikit-learn,hdmetor/scikit-learn,MartinDelzant/scikit-learn,ningchi/scikit-learn,michigraber/scikit-learn,yyjiang/scikit-learn,quheng/scikit-learn,lazywei/scikit-learn,pnedunuri/scikit-learn,mjudsp/Tsallis,davidgbe/scikit-learn,wanggang3333/scikit-learn,IndraVikas/scikit-learn,simon-pepin/scikit-learn,Myasuka/scikit-learn,ldirer/scikit-learn,vybstat/scikit-learn,gotomypc/scikit-learn,poryfly/scikit-learn,hlin117/scikit-learn,aetilley/scikit-learn,poryfly/scikit-learn,rvraghav93/scikit-learn,mlyundin/scikit-learn,davidgbe/scikit-learn,CforED/Machine-Learning,dsullivan7/scikit-learn,DonBeo/scikit-learn,henridwyer/scikit-learn,glennq/scikit-learn,trungnt13/scikit-learn,henrykironde/scikit-learn,rishikksh20/scikit-learn,Vimos/scikit-learn,RachitKansal/scikit-learn,Achuth17/scikit-learn,CforED/Machine-Learning,massmutual/scikit-learn,glemaitre/scikit-learn,q1ang/scikit-learn,RayMick/scikit-learn,saiwing-yeung/scikit-learn,jseabold/scikit-learn,rsivapr/scikit-learn,simon-pepin/scikit-learn,xubenben/scikit-learn,MartinDelzant/scikit-learn,sinhrks/scikit-learn,zhenv5/scikit-learn,jblackburne/scikit-learn,kashif/scikit-learn,nomadcube/scikit-learn,carrillo/scikit-learn,ominux/scikit-learn,TomDLT/scikit-learn,eickenberg/scikit-learn,ivannz/scikit-learn,nrhine1/scikit-learn,YinongLong/scikit-learn,nesterione/scikit-learn,liangz0707/scikit-learn,jorge2703/scikit-learn,mxjl620/scikit-learn,deepesch/scikit-learn,IssamLaradji/scikit-learn,hsuantien/scikit-learn,pianomania/scikit-learn,depet/scikit-learn,ClimbsRocks/scikit-learn,ashhher3/scikit-learn,vivekmishra1991/scikit-learn,treycausey/scikit-learn,nhejazi/scikit-learn,Fireblend/scikit-learn,Titan-C/scikit-learn,aabadie/scikit-learn,eg-zhang/scikit-learn,hitszxp/scikit-learn,hsuantien/scikit-learn,siutanwong/scikit-learn,JsNoNo/scikit-learn,equialgo/scikit-learn,andaag/scikit-learn,Windy-Ground/scikit-learn,lazywei/scikit-learn,bnaul/scikit-learn,cybernet14/scikit-learn,CVML/scikit-learn,aflaxman/scikit-learn,glemaitre/scikit-learn,icdishb/scikit-learn,chrsrds/scikit-learn,pv/scikit-learn,Fireblend/scikit-learn,zaxtax/scikit-learn,kmike/scikit-learn,466152112/scikit-learn,mjgrav2001/scikit-learn,ashhher3/scikit-learn,AnasGhrab/scikit-learn,rohanp/scikit-learn,costypetrisor/scikit-learn,wlamond/scikit-learn,meduz/scikit-learn,kylerbrown/scikit-learn,zorojean/scikit-learn,amueller/scikit-learn,bhargav/scikit-learn,fabianp/scikit-learn,bthirion/scikit-learn,tosolveit/scikit-learn,ZenDevelopmentSystems/scikit-learn,DSLituiev/scikit-learn,mugizico/scikit-learn,poryfly/scikit-learn,rishikksh20/scikit-learn,ssaeger/scikit-learn,sarahgrogan/scikit-learn,aflaxman/scikit-learn,LiaoPan/scikit-learn,dingocuster/scikit-learn,mxjl620/scikit-learn,lucidfrontier45/scikit-learn,Lawrence-Liu/scikit-learn,luo66/scikit-learn,mjgrav2001/scikit-learn,Clyde-fare/scikit-learn,carrillo/scikit-learn,mrshu/scikit-learn,rexshihaoren/scikit-learn,sumspr/scikit-learn,vermouthmjl/scikit-learn,rishikksh20/scikit-learn,MartinSavc/scikit-learn,RomainBrault/scikit-learn,hugobowne/scikit-learn,AIML/scikit-learn,xavierwu/scikit-learn,mfjb/scikit-learn,liyu1990/sklearn,wlamond/scikit-learn,anntzer/scikit-learn,Djabbz/scikit-learn,cl4rke/scikit-learn,AlexanderFabisch/scikit-learn,jzt5132/scikit-learn,djgagne/scikit-learn,tdhopper/scikit-learn,lbishal/scikit-learn,mjudsp/Tsallis,mattilyra/scikit-learn,cauchycui/scikit-learn,espg/scikit-learn,spallavolu/scikit-learn,mfjb/scikit-learn,etkirsch/scikit-learn,tomlof/scikit-learn,sarahgrogan/scikit-learn,aetilley/scikit-learn,vigilv/scikit-learn,ahoyosid/scikit-learn,JsNoNo/scikit-learn,jkarnows/scikit-learn,HolgerPeters/scikit-learn,AIML/scikit-learn,ephes/scikit-learn,kjung/scikit-learn,robin-lai/scikit-learn,MechCoder/scikit-learn,hsiaoyi0504/scikit-learn,liangz0707/scikit-learn,fredhusser/scikit-learn,jakirkham/scikit-learn,procoder317/scikit-learn,alvarofierroclavero/scikit-learn,BiaDarkia/scikit-learn,fzalkow/scikit-learn,dsullivan7/scikit-learn,fzalkow/scikit-learn,idlead/scikit-learn,xavierwu/scikit-learn,nelson-liu/scikit-learn,pratapvardhan/scikit-learn,hitszxp/scikit-learn,mikebenfield/scikit-learn,justincassidy/scikit-learn,walterreade/scikit-learn,B3AU/waveTree,billy-inn/scikit-learn,shyamalschandra/scikit-learn,hugobowne/scikit-learn,mwv/scikit-learn,vermouthmjl/scikit-learn,tomlof/scikit-learn,carrillo/scikit-learn,Nyker510/scikit-learn,cainiaocome/scikit-learn,NunoEdgarGub1/scikit-learn,Garrett-R/scikit-learn,tdhopper/scikit-learn,shikhardb/scikit-learn,ZenDevelopmentSystems/scikit-learn,ndingwall/scikit-learn,rahuldhote/scikit-learn,jayflo/scikit-learn,Jimmy-Morzaria/scikit-learn,f3r/scikit-learn,nhejazi/scikit-learn,procoder317/scikit-learn,giorgiop/scikit-learn,Myasuka/scikit-learn,pkruskal/scikit-learn,RayMick/scikit-learn,RPGOne/scikit-learn,nelson-liu/scikit-learn,cwu2011/scikit-learn,0x0all/scikit-learn,voxlol/scikit-learn,pompiduskus/scikit-learn,huzq/scikit-learn,DonBeo/scikit-learn,ominux/scikit-learn,herilalaina/scikit-learn,ycaihua/scikit-learn,pypot/scikit-learn,hrjn/scikit-learn,xiaoxiamii/scikit-learn,Obus/scikit-learn,kmike/scikit-learn,wzbozon/scikit-learn,AlexandreAbraham/scikit-learn,smartscheduling/scikit-learn-categorical-tree,loli/sklearn-ensembletrees,lenovor/scikit-learn,xuewei4d/scikit-learn,khkaminska/scikit-learn,lenovor/scikit-learn,saiwing-yeung/scikit-learn,jakirkham/scikit-learn,alexeyum/scikit-learn,potash/scikit-learn,AlexanderFabisch/scikit-learn,quheng/scikit-learn,jmetzen/scikit-learn,yanlend/scikit-learn,sanketloke/scikit-learn,jzt5132/scikit-learn,quheng/scikit-learn,fyffyt/scikit-learn,beepee14/scikit-learn,jmschrei/scikit-learn,gotomypc/scikit-learn,mayblue9/scikit-learn,frank-tancf/scikit-learn,florian-f/sklearn,pompiduskus/scikit-learn,ky822/scikit-learn,shangwuhencc/scikit-learn,pkruskal/scikit-learn,ahoyosid/scikit-learn,rsivapr/scikit-learn,maheshakya/scikit-learn,mrshu/scikit-learn,Adai0808/scikit-learn,joshloyal/scikit-learn,shangwuhencc/scikit-learn,mxjl620/scikit-learn,zorojean/scikit-learn,liberatorqjw/scikit-learn,jmschrei/scikit-learn,kylerbrown/scikit-learn,UNR-AERIAL/scikit-learn,vortex-ape/scikit-learn,quheng/scikit-learn,LiaoPan/scikit-learn,bigdataelephants/scikit-learn,mattgiguere/scikit-learn,fabianp/scikit-learn,Adai0808/scikit-learn,Lawrence-Liu/scikit-learn,aminert/scikit-learn,meduz/scikit-learn,ElDeveloper/scikit-learn,zihua/scikit-learn,trungnt13/scikit-learn,ky822/scikit-learn,fyffyt/scikit-learn,mhdella/scikit-learn,mikebenfield/scikit-learn,3manuek/scikit-learn,rajat1994/scikit-learn,bigdataelephants/scikit-learn,bikong2/scikit-learn,amueller/scikit-learn,zhenv5/scikit-learn,pnedunuri/scikit-learn,ltiao/scikit-learn,tosolveit/scikit-learn,rishikksh20/scikit-learn,MatthieuBizien/scikit-learn,Akshay0724/scikit-learn,michigraber/scikit-learn,bnaul/scikit-learn,aabadie/scikit-learn,zaxtax/scikit-learn,dsquareindia/scikit-learn,JPFrancoia/scikit-learn,siutanwong/scikit-learn,ltiao/scikit-learn,scikit-learn/scikit-learn,arabenjamin/scikit-learn,costypetrisor/scikit-learn,xuewei4d/scikit-learn,henrykironde/scikit-learn,BiaDarkia/scikit-learn,bikong2/scikit-learn,walterreade/scikit-learn,bthirion/scikit-learn,loli/semisupervisedforests,UNR-AERIAL/scikit-learn,CVML/scikit-learn,pv/scikit-learn,loli/sklearn-ensembletrees,costypetrisor/scikit-learn,AnasGhrab/scikit-learn,billy-inn/scikit-learn,hrjn/scikit-learn,hitszxp/scikit-learn,phdowling/scikit-learn,CVML/scikit-learn,larsmans/scikit-learn,sinhrks/scikit-learn,zorroblue/scikit-learn,justincassidy/scikit-learn,HolgerPeters/scikit-learn,zuku1985/scikit-learn,xavierwu/scikit-learn,Garrett-R/scikit-learn,henrykironde/scikit-learn,Titan-C/scikit-learn,ivannz/scikit-learn,mhdella/scikit-learn,B3AU/waveTree,nikitasingh981/scikit-learn,fabioticconi/scikit-learn,RachitKansal/scikit-learn,Barmaley-exe/scikit-learn,jzt5132/scikit-learn,Sentient07/scikit-learn,shenzebang/scikit-learn,ChanChiChoi/scikit-learn,jorik041/scikit-learn,toastedcornflakes/scikit-learn,qifeigit/scikit-learn,michigraber/scikit-learn,betatim/scikit-learn,eickenberg/scikit-learn,nikitasingh981/scikit-learn,MatthieuBizien/scikit-learn,ankurankan/scikit-learn,jereze/scikit-learn,loli/semisupervisedforests,shikhardb/scikit-learn,sonnyhu/scikit-learn,IshankGulati/scikit-learn,djgagne/scikit-learn,siutanwong/scikit-learn,yask123/scikit-learn,davidgbe/scikit-learn,zorroblue/scikit-learn,RPGOne/scikit-learn,cdegroc/scikit-learn,madjelan/scikit-learn,mjudsp/Tsallis,hsiaoyi0504/scikit-learn,rvraghav93/scikit-learn,cybernet14/scikit-learn,ivannz/scikit-learn,krez13/scikit-learn,adamgreenhall/scikit-learn,sinhrks/scikit-learn,meduz/scikit-learn,walterreade/scikit-learn,aewhatley/scikit-learn,AlexandreAbraham/scikit-learn,Vimos/scikit-learn,xwolf12/scikit-learn,vivekmishra1991/scikit-learn,AIML/scikit-learn,fengzhyuan/scikit-learn,joernhees/scikit-learn,mblondel/scikit-learn,shyamalschandra/scikit-learn,shusenl/scikit-learn,liyu1990/sklearn,0asa/scikit-learn,anurag313/scikit-learn,sgenoud/scikit-learn,ElDeveloper/scikit-learn,chrisburr/scikit-learn,YinongLong/scikit-learn,eg-zhang/scikit-learn,mhue/scikit-learn,466152112/scikit-learn,zihua/scikit-learn,arjoly/scikit-learn,raghavrv/scikit-learn,RomainBrault/scikit-learn,alvarofierroclavero/scikit-learn,henridwyer/scikit-learn,aewhatley/scikit-learn,cybernet14/scikit-learn,AlexanderFabisch/scikit-learn,ominux/scikit-learn,Jimmy-Morzaria/scikit-learn,giorgiop/scikit-learn,rajat1994/scikit-learn,kevin-intel/scikit-learn,betatim/scikit-learn,ndingwall/scikit-learn,ClimbsRocks/scikit-learn,sanketloke/scikit-learn,krez13/scikit-learn,jayflo/scikit-learn,jpautom/scikit-learn,smartscheduling/scikit-learn-categorical-tree,bikong2/scikit-learn,aabadie/scikit-learn,devanshdalal/scikit-learn,arabenjamin/scikit-learn,khkaminska/scikit-learn,rahul-c1/scikit-learn,abhishekkrthakur/scikit-learn,kevin-intel/scikit-learn,Titan-C/scikit-learn,ycaihua/scikit-learn,aminert/scikit-learn,treycausey/scikit-learn,lenovor/scikit-learn,joernhees/scikit-learn,hugobowne/scikit-learn,herilalaina/scikit-learn,Clyde-fare/scikit-learn,carrillo/scikit-learn,fabioticconi/scikit-learn,rahul-c1/scikit-learn,JeanKossaifi/scikit-learn,nikitasingh981/scikit-learn,alexeyum/scikit-learn,heli522/scikit-learn,huzq/scikit-learn,evgchz/scikit-learn,beepee14/scikit-learn,ilyes14/scikit-learn,petosegan/scikit-learn,jereze/scikit-learn,Srisai85/scikit-learn,Windy-Ground/scikit-learn,ltiao/scikit-learn,vermouthmjl/scikit-learn,bigdataelephants/scikit-learn,nelson-liu/scikit-learn,mfjb/scikit-learn,Windy-Ground/scikit-learn,evgchz/scikit-learn,xiaoxiamii/scikit-learn,r-mart/scikit-learn,ningchi/scikit-learn,depet/scikit-learn,smartscheduling/scikit-learn-categorical-tree,luo66/scikit-learn,pythonvietnam/scikit-learn,B3AU/waveTree,belltailjp/scikit-learn,terkkila/scikit-learn,hdmetor/scikit-learn,AlexRobson/scikit-learn,nvoron23/scikit-learn,olologin/scikit-learn,deepesch/scikit-learn,lucidfrontier45/scikit-learn,manhhomienbienthuy/scikit-learn,bthirion/scikit-learn,plissonf/scikit-learn,ngoix/OCRF,Adai0808/scikit-learn,ldirer/scikit-learn,massmutual/scikit-learn,raghavrv/scikit-learn,ngoix/OCRF,beepee14/scikit-learn,q1ang/scikit-learn,victorbergelin/scikit-learn,yunfeilu/scikit-learn,MartinDelzant/scikit-learn,iismd17/scikit-learn,MartinSavc/scikit-learn,liberatorqjw/scikit-learn,jereze/scikit-learn,cauchycui/scikit-learn,xwolf12/scikit-learn,kevin-intel/scikit-learn,jlegendary/scikit-learn,vivekmishra1991/scikit-learn,andaag/scikit-learn,jakobworldpeace/scikit-learn,nikitasingh981/scikit-learn,BiaDarkia/scikit-learn,potash/scikit-learn,lucidfrontier45/scikit-learn,espg/scikit-learn,mugizico/scikit-learn,dhruv13J/scikit-learn,harshaneelhg/scikit-learn,bnaul/scikit-learn,MechCoder/scikit-learn,ndingwall/scikit-learn,ssaeger/scikit-learn,samuel1208/scikit-learn,nomadcube/scikit-learn,Aasmi/scikit-learn,arahuja/scikit-learn,adamgreenhall/scikit-learn,hugobowne/scikit-learn,RomainBrault/scikit-learn,thientu/scikit-learn,fabioticconi/scikit-learn,vibhorag/scikit-learn,anntzer/scikit-learn,joshloyal/scikit-learn,ZENGXH/scikit-learn,B3AU/waveTree,Achuth17/scikit-learn,abhishekkrthakur/scikit-learn,saiwing-yeung/scikit-learn,fzalkow/scikit-learn,Sentient07/scikit-learn,cdegroc/scikit-learn,Djabbz/scikit-learn,3manuek/scikit-learn,kmike/scikit-learn,untom/scikit-learn,ogrisel/scikit-learn,xavierwu/scikit-learn,larsmans/scikit-learn,robbymeals/scikit-learn,jseabold/scikit-learn,jakobworldpeace/scikit-learn,robin-lai/scikit-learn,pythonvietnam/scikit-learn,liyu1990/sklearn,abhishekgahlot/scikit-learn,kjung/scikit-learn,pnedunuri/scikit-learn,hainm/scikit-learn,MohammedWasim/scikit-learn,themrmax/scikit-learn,YinongLong/scikit-learn,nvoron23/scikit-learn,Jimmy-Morzaria/scikit-learn,NunoEdgarGub1/scikit-learn,theoryno3/scikit-learn,maheshakya/scikit-learn,Barmaley-exe/scikit-learn,themrmax/scikit-learn,florian-f/sklearn,kaichogami/scikit-learn,ilo10/scikit-learn,abhishekkrthakur/scikit-learn,btabibian/scikit-learn,russel1237/scikit-learn,mugizico/scikit-learn,gotomypc/scikit-learn,xiaoxiamii/scikit-learn,sanketloke/scikit-learn,meduz/scikit-learn,hitszxp/scikit-learn,vibhorag/scikit-learn,OshynSong/scikit-learn,florian-f/sklearn,Barmaley-exe/scikit-learn,cl4rke/scikit-learn
--- +++ @@ -1,7 +1,6 @@ """Test the 20news downloader, if the data is available.""" import numpy as np from nose.tools import assert_equal -from nose.tools import assert_true from nose.plugins.skip import SkipTest from scikits.learn import datasets @@ -33,6 +32,3 @@ entry2 = data.data[np.where(data.target == label)[0][0]] assert_equal(entry1, entry2) - # check that the filenames are available too - assert_true(data.filenames[0].endswith( - "20news_home/20news-bydate-test/talk.politics.mideast/76560"))
de7abaa3e1de7b7de1c10daa43b621daaee628fd
roundware/rw/fields.py
roundware/rw/fields.py
from django.forms import forms from south.modelsinspector import add_introspection_rules from validatedfile.fields import ValidatedFileField import pyclamav class RWValidatedFileField(ValidatedFileField): """ Same as FileField, but you can specify: * content_types - list containing allowed content_types. Example: ['application/pdf', 'image/jpeg'] """ def __init__(self, content_types=None, **kwargs): if content_types: self.content_types = content_types super(RWValidatedFileField, self).__init__(**kwargs) def clean(self, *args, **kwargs): # ValidatedFileField.clean will check the MIME type from the # http headers and by peeking in the file data = super(RWValidatedFileField, self).clean(*args, **kwargs) file = data.file # next scan with pyclamav tmpfile = file.file.name has_virus, virus_name = pyclamav.scanfile(tmpfile) if has_virus: fn = file.name raise forms.ValidationError( 'The file %s you uploaded appears to contain a virus or be' 'malware (%s).' % (fn, virus_name) ) return data add_introspection_rules([], ["^roundware\.rw\.fields\.RWValidatedFileField"])
from django.forms import forms from south.modelsinspector import add_introspection_rules from validatedfile.fields import ValidatedFileField class RWValidatedFileField(ValidatedFileField): """ Same as FileField, but you can specify: * content_types - list containing allowed content_types. Example: ['application/pdf', 'image/jpeg'] """ def __init__(self, content_types=None, **kwargs): if content_types: self.content_types = content_types super(RWValidatedFileField, self).__init__(**kwargs) def clean(self, *args, **kwargs): # ValidatedFileField.clean will check the MIME type from the # http headers and by peeking in the file data = super(RWValidatedFileField, self).clean(*args, **kwargs) file = data.file # next scan with pyclamav tmpfile = file.file.name import pyclamav has_virus, virus_name = pyclamav.scanfile(tmpfile) if has_virus: fn = file.name raise forms.ValidationError( 'The file %s you uploaded appears to contain a virus or be' 'malware (%s).' % (fn, virus_name) ) return data add_introspection_rules([], ["^roundware\.rw\.fields\.RWValidatedFileField"])
Move pyclamav import inside of clean method on RWValidatedFileField so that it doesn't get imported by streamscript or unless as needed for field validation
Move pyclamav import inside of clean method on RWValidatedFileField so that it doesn't get imported by streamscript or unless as needed for field validation
Python
agpl-3.0
IMAmuseum/roundware-server,Karlamon/roundware-server,IMAmuseum/roundware-server,jslootbeek/roundware-server,IMAmuseum/roundware-server,eosrei/roundware-server,IMAmuseum/roundware-server,eosrei/roundware-server,eosrei/roundware-server,Karlamon/roundware-server,probabble/roundware-server,yangjackascd/roundware-server,Karlamon/roundware-server,probabble/roundware-server,yangjackascd/roundware-server,probabble/roundware-server,jslootbeek/roundware-server,Karlamon/roundware-server,yangjackascd/roundware-server,eosrei/roundware-server,probabble/roundware-server,jslootbeek/roundware-server,yangjackascd/roundware-server,jslootbeek/roundware-server
--- +++ @@ -1,7 +1,6 @@ from django.forms import forms from south.modelsinspector import add_introspection_rules from validatedfile.fields import ValidatedFileField -import pyclamav class RWValidatedFileField(ValidatedFileField): @@ -25,6 +24,7 @@ # next scan with pyclamav tmpfile = file.file.name + import pyclamav has_virus, virus_name = pyclamav.scanfile(tmpfile) if has_virus: fn = file.name
2c45c405887e415744ea0b447936848b9b6fd355
makerbot_driver/Preprocessors/Preprocessor.py
makerbot_driver/Preprocessors/Preprocessor.py
""" An interface that all future preprocessors should inherit from """ import os import re from errors import * from .. import Gcode class Preprocessor(object): def __init__(self): pass def process_file(self, input_path, output_path): pass def inputs_are_gcode(self, input_path, output_path): for path in (input_path, output_path): name, ext = os.path.splitext(path) if ext != '.gcode': raise NotGCodeFileError def _remove_variables(self, input_line): variable_regex = "#[^ ^\n^\r]*" m = re.search(variable_regex, input_line) while m is not None: input_line = input_line.replace(m.group(), '0') m = re.search(variable_regex, input_line) return input_line
""" An interface that all future preprocessors should inherit from """ import os import re from errors import * from .. import Gcode class Preprocessor(object): def __init__(self): pass def process_file(self, input_path, output_path): pass def inputs_are_gcode(self, input_path, output_path): pass def _remove_variables(self, input_line): variable_regex = "#[^ ^\n^\r]*" m = re.search(variable_regex, input_line) while m is not None: input_line = input_line.replace(m.group(), '0') m = re.search(variable_regex, input_line) return input_line
Disable check for .gcode file extension when preprocessing gcode.
Disable check for .gcode file extension when preprocessing gcode.
Python
agpl-3.0
makerbot/s3g,makerbot/s3g,makerbot/s3g,makerbot/s3g,Jnesselr/s3g,Jnesselr/s3g
--- +++ @@ -9,7 +9,6 @@ from .. import Gcode class Preprocessor(object): - def __init__(self): pass @@ -17,10 +16,7 @@ pass def inputs_are_gcode(self, input_path, output_path): - for path in (input_path, output_path): - name, ext = os.path.splitext(path) - if ext != '.gcode': - raise NotGCodeFileError + pass def _remove_variables(self, input_line): variable_regex = "#[^ ^\n^\r]*"
84e20f231c6a9f8d6f5c76b3e2853f3860173fe0
yarn_api_client/__init__.py
yarn_api_client/__init__.py
# -*- coding: utf-8 -*- __version__ = '1.0.2' __all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager'] from .application_master import ApplicationMaster from .history_server import HistoryServer from .node_manager import NodeManager from .resource_manager import ResourceManager
# -*- coding: utf-8 -*- __version__ = '2.0.0.dev0' __all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager'] from .application_master import ApplicationMaster from .history_server import HistoryServer from .node_manager import NodeManager from .resource_manager import ResourceManager
Prepare for next development iteration
Prepare for next development iteration
Python
bsd-3-clause
toidi/hadoop-yarn-api-python-client
--- +++ @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -__version__ = '1.0.2' +__version__ = '2.0.0.dev0' __all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager'] from .application_master import ApplicationMaster
7201a7f6c87efa74165ca22c4a2db9ce292bae62
baseline.py
baseline.py
#/usr/bin/python """ Baseline example that needs to be beaten """ import numpy as np import matplotlib.pyplot as plt x, y, yerr = np.loadtxt("data/data.txt", unpack=True) A = np.vstack((np.ones_like(x), x)).T C = np.diag(yerr * yerr) cov = np.linalg.inv(np.dot(A.T, np.linalg.solve(C, A))) b_ls, m_ls = np.dot(cov, np.dot(A.T, np.linalg.solve(C, y))) fig, ax = plt.subplots() ax.errorbar(x, y, yerr=yerr, c="k", fmt="o") x_range = np.array([min(x), max(x)]) ax.plot(x_range, m_ls * x_range + b_ls, c="#666666", lw=2, zorder=-100) ax.set_xlabel("x") ax.set_ylabel("y") fig.savefig("assets/result.png") print m_ls, b_ls
#/usr/bin/python """ Baseline example that needs to be beaten """ import os import numpy as np import matplotlib.pyplot as plt x, y, yerr = np.loadtxt("data/data.txt", unpack=True) A = np.vstack((np.ones_like(x), x)).T C = np.diag(yerr * yerr) cov = np.linalg.inv(np.dot(A.T, np.linalg.solve(C, A))) b_ls, m_ls = np.dot(cov, np.dot(A.T, np.linalg.solve(C, y))) fig, ax = plt.subplots() ax.errorbar(x, y, yerr=yerr, c="k", fmt="o") x_range = np.array([min(x), max(x)]) ax.plot(x_range, m_ls * x_range + b_ls, c="#666666", lw=2, zorder=-100) ax.set_xlabel("x") ax.set_ylabel("y") fig.savefig("assets/result.png") print("Results of m, b: ({0:.4f} {1:.4f})".format(m_ls, b_ls)) # Let's store result parameters in environment variables, and we will deal # with more complex values (e.g., uncertainties, etc) later os.environ["RESULT_M"] = "{0:.5f}".format(m_ls) os.environ["RESULT_B"] = "{0:.5f}".format(b_ls)
Add RESULT_M and RESULT_B to environment varaible
Add RESULT_M and RESULT_B to environment varaible [ci skip]
Python
mit
arfon/dottravis,arfon/dottravis
--- +++ @@ -2,6 +2,7 @@ """ Baseline example that needs to be beaten """ +import os import numpy as np import matplotlib.pyplot as plt @@ -20,4 +21,9 @@ ax.set_ylabel("y") fig.savefig("assets/result.png") -print m_ls, b_ls +print("Results of m, b: ({0:.4f} {1:.4f})".format(m_ls, b_ls)) + +# Let's store result parameters in environment variables, and we will deal +# with more complex values (e.g., uncertainties, etc) later +os.environ["RESULT_M"] = "{0:.5f}".format(m_ls) +os.environ["RESULT_B"] = "{0:.5f}".format(b_ls)
dde82212ddf255ffb15b2b083352d7cf5b4b5b34
tutorials/urls.py
tutorials/urls.py
from django.conf.urls import include, url from tutorials import views urlpatterns = [ url(r'^$', views.ListTutorials.as_view()), url(r'add/', views.NewTutorial.as_view(), name='add_tutorial'), url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'), # This must be last, otherwise it will match anything url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'), ]
from django.conf.urls import include, url from tutorials import views urlpatterns = [ url(r'^$', views.ListTutorials.as_view(), name='list_tutorials'), url(r'add/', views.CreateNewTutorial.as_view(), name='add_tutorial'), url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'), url(r'(?P<tutorial_id>[\w\-]+)/delete/', views.DeleteTutorial.as_view(), name='delete_tutorial'), # This must be last, otherwise it will match anything url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'), ]
Add url name to ListView, New url for delete view, Refactor ViewClass name for NewTutorials to CreateNewTutorials
Add url name to ListView, New url for delete view, Refactor ViewClass name for NewTutorials to CreateNewTutorials
Python
agpl-3.0
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
--- +++ @@ -3,9 +3,10 @@ from tutorials import views urlpatterns = [ - url(r'^$', views.ListTutorials.as_view()), - url(r'add/', views.NewTutorial.as_view(), name='add_tutorial'), + url(r'^$', views.ListTutorials.as_view(), name='list_tutorials'), + url(r'add/', views.CreateNewTutorial.as_view(), name='add_tutorial'), url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'), + url(r'(?P<tutorial_id>[\w\-]+)/delete/', views.DeleteTutorial.as_view(), name='delete_tutorial'), # This must be last, otherwise it will match anything url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
bc2b8d04398f9df9985452b2b8a016208cf216cd
salesforce/__init__.py
salesforce/__init__.py
# django-salesforce # # by Phil Christensen # (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org) # See LICENSE.md for details # """ A database backend for the Django ORM. Allows access to all Salesforce objects accessible via the SOQL API. """ import logging import warnings import django DJANGO_18_PLUS = django.VERSION[:2] >= (1, 8) DJANGO_184_PLUS = django.VERSION[:3] >= (1, 8, 4) DJANGO_19_PLUS = django.VERSION[:3] >= (1, 9) if not django.VERSION[:2] >= (1, 7): raise ImportError("Django 1.7 or higher is required for django-salesforce.") if django.VERSION[:2] >= (1, 8): warnings.warn("Some methods working with Django 1.7 can be unimplemented for Django 1.8 and 1.9. " "See the django-salesforce README, the first pragraph") log = logging.getLogger(__name__)
# django-salesforce # # by Phil Christensen # (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org) # See LICENSE.md for details # """ A database backend for the Django ORM. Allows access to all Salesforce objects accessible via the SOQL API. """ import logging import warnings import django DJANGO_18_PLUS = django.VERSION[:2] >= (1, 8) DJANGO_184_PLUS = django.VERSION[:3] >= (1, 8, 4) DJANGO_19_PLUS = django.VERSION[:3] >= (1, 9) if not django.VERSION[:2] >= (1, 7): raise ImportError("Django 1.7 or higher is required for django-salesforce.") log = logging.getLogger(__name__)
Remove Django 1.8/1.9 warnings; much better supported now.
Remove Django 1.8/1.9 warnings; much better supported now.
Python
mit
chromakey/django-salesforce,django-salesforce/django-salesforce,chromakey/django-salesforce,django-salesforce/django-salesforce,hynekcer/django-salesforce,chromakey/django-salesforce,hynekcer/django-salesforce,django-salesforce/django-salesforce,hynekcer/django-salesforce
--- +++ @@ -19,8 +19,5 @@ DJANGO_19_PLUS = django.VERSION[:3] >= (1, 9) if not django.VERSION[:2] >= (1, 7): raise ImportError("Django 1.7 or higher is required for django-salesforce.") -if django.VERSION[:2] >= (1, 8): - warnings.warn("Some methods working with Django 1.7 can be unimplemented for Django 1.8 and 1.9. " - "See the django-salesforce README, the first pragraph") log = logging.getLogger(__name__)
c5158020475e62d7e8b86a613a02c0a659038f88
formish/tests/testish/testish/lib/xformish.py
formish/tests/testish/testish/lib/xformish.py
""" General purpose formish extensions. """ from formish import validation, widgets, Form class DateParts(widgets.DateParts): def __init__(self, **k): k['day_first'] = k.pop('l10n').is_day_first() super(DateParts, self).__init__(**k) class ApproximateDateParts(widgets.DateParts): _template = 'ApproximateDateParts' def pre_render(self, schema_type, data): if data is None: return {'year': [''], 'month': [''], 'day': ['']} parts = [i for i in data.split('-')] parts.extend(['']*(3-len(parts))) return {'year': [parts[0]], 'month': [parts[1]], 'day': [parts[2]]} def convert(self, schema_type, data): # Collect all the parts from the request. parts = (data['year'][0].strip(), data['month'][0], data['day'][0]) if not parts[0] and (parts[1] or parts[2]): raise validation.FieldValidationError("Invalid date") elif not parts[1] and parts[2]: raise validation.FieldValidationError("Invalid date") # Discard the unspecified parts parts = [p for p in parts if p] # Ensure they're all integers (don't record the result, we don't care). try: [int(p) for p in parts] except ValueError: raise validation.FieldValidationError("Invalid date") return '-'.join(parts)
""" General purpose formish extensions. """ from formish import validation, widgets, Form from convertish.convert import ConvertError class DateParts(widgets.DateParts): def __init__(self, **k): k['day_first'] = k.pop('l10n').is_day_first() super(DateParts, self).__init__(**k) class ApproximateDateParts(widgets.DateParts): _template = 'ApproximateDateParts' def pre_render(self, schema_type, data): if data is None: return {'year': [''], 'month': [''], 'day': ['']} parts = [i for i in data.split('-')] parts.extend(['']*(3-len(parts))) return {'year': [parts[0]], 'month': [parts[1]], 'day': [parts[2]]} def convert(self, schema_type, data): # Collect all the parts from the request. parts = (data['year'][0].strip(), data['month'][0], data['day'][0]) if not parts[0] and (parts[1] or parts[2]): raise ConvertError("Invalid date") elif not parts[1] and parts[2]: raise ConvertError("Invalid date") # Discard the unspecified parts parts = [p for p in parts if p] # Ensure they're all integers (don't record the result, we don't care). try: [int(p) for p in parts] except ValueError: raise ConvertError("Invalid date") return '-'.join(parts)
Fix custom widget to raise correct exception type.
Fix custom widget to raise correct exception type.
Python
bsd-3-clause
ish/formish,ish/formish,ish/formish
--- +++ @@ -3,6 +3,7 @@ """ from formish import validation, widgets, Form +from convertish.convert import ConvertError class DateParts(widgets.DateParts): @@ -27,15 +28,15 @@ # Collect all the parts from the request. parts = (data['year'][0].strip(), data['month'][0], data['day'][0]) if not parts[0] and (parts[1] or parts[2]): - raise validation.FieldValidationError("Invalid date") + raise ConvertError("Invalid date") elif not parts[1] and parts[2]: - raise validation.FieldValidationError("Invalid date") + raise ConvertError("Invalid date") # Discard the unspecified parts parts = [p for p in parts if p] # Ensure they're all integers (don't record the result, we don't care). try: [int(p) for p in parts] except ValueError: - raise validation.FieldValidationError("Invalid date") + raise ConvertError("Invalid date") return '-'.join(parts)
8ffaeda7d9be151e20aef9c06518574c7c7a6727
utils/__init__.py
utils/__init__.py
import time def time_func(f): def wrap(*args, **kwargs): time1 = time.time() ret = f(*args, **kwargs) time2 = time.time() print '%s function took %0.3f ms' % (f.func_name, (time2-time1)*1000.0) return ret return wrap
Add decorator for timing functions
Add decorator for timing functions
Python
agpl-3.0
kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu
--- +++ @@ -0,0 +1,10 @@ +import time + +def time_func(f): + def wrap(*args, **kwargs): + time1 = time.time() + ret = f(*args, **kwargs) + time2 = time.time() + print '%s function took %0.3f ms' % (f.func_name, (time2-time1)*1000.0) + return ret + return wrap
750dc7d4eddf691117cebf815e163a4d10af39cb
src/TulsiGenerator/Scripts/bazel_options.py
src/TulsiGenerator/Scripts/bazel_options.py
# Copyright 2017 The Tulsi Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an 'AS IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Logic to translate Xcode options to Bazel options.""" class BazelOptions(object): """Converts Xcode features into Bazel command line flags.""" def __init__(self, xcode_env): """Creates a new BazelOptions object. Args: xcode_env: A dictionary of Xcode environment variables. Returns: A BazelOptions instance. """ self.xcode_env = xcode_env def bazel_feature_flags(self): """Returns a list of bazel flags for the current Xcode env configuration.""" flags = [] if self.xcode_env.get('ENABLE_ADDRESS_SANITIZER') == 'YES': flags.extend([ '--features=asan', ]) return flags
# Copyright 2017 The Tulsi Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an 'AS IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Logic to translate Xcode options to Bazel options.""" class BazelOptions(object): """Converts Xcode features into Bazel command line flags.""" def __init__(self, xcode_env): """Creates a new BazelOptions object. Args: xcode_env: A dictionary of Xcode environment variables. Returns: A BazelOptions instance. """ self.xcode_env = xcode_env def bazel_feature_flags(self): """Returns a list of bazel flags for the current Xcode env configuration.""" flags = [] if self.xcode_env.get('ENABLE_ADDRESS_SANITIZER') == 'YES': flags.append('--features=asan') if self.xcode_env.get('ENABLE_THREAD_SANITIZER') == 'YES': flags.append('--features=tsan') if self.xcode_env.get('ENABLE_UNDEFINED_BEHAVIOR_SANITIZER') == 'YES': flags.append('--features=ubsan') return flags
Support enabling tsan and ubsan from Xcode UI
Support enabling tsan and ubsan from Xcode UI Xcode won't let you enable ubsan from the UI as it requires a 'Compile Sources' phase with (Objective-)C(++) sources, but if you manually edit the scheme and enable it or equivalently add the phase with a dummy file, enable it from the UI, and then remove the phase, ubsan will work. PiperOrigin-RevId: 196831918
Python
apache-2.0
pinterest/tulsi,bazelbuild/tulsi,bazelbuild/tulsi,bazelbuild/tulsi,bazelbuild/tulsi,pinterest/tulsi,bazelbuild/tulsi,bazelbuild/tulsi,pinterest/tulsi,pinterest/tulsi,pinterest/tulsi,pinterest/tulsi
--- +++ @@ -33,8 +33,10 @@ """Returns a list of bazel flags for the current Xcode env configuration.""" flags = [] if self.xcode_env.get('ENABLE_ADDRESS_SANITIZER') == 'YES': - flags.extend([ - '--features=asan', - ]) + flags.append('--features=asan') + if self.xcode_env.get('ENABLE_THREAD_SANITIZER') == 'YES': + flags.append('--features=tsan') + if self.xcode_env.get('ENABLE_UNDEFINED_BEHAVIOR_SANITIZER') == 'YES': + flags.append('--features=ubsan') return flags
1e90db8de39bd8c4b1a4d58148b991af8b5c32dd
storage/models/fighter.py
storage/models/fighter.py
from storage.models.base import * class Fighter(Base): __tablename__ = 'fighters' id = Column(Integer, primary_key=True) ref = Column(String(STR_SIZE), unique=True, nullable=False) name = Column(String(STR_SIZE), nullable=False) country = Column(String(STR_SIZE)) city = Column(String(STR_SIZE)) birthday = Column(Date) height = Column(Integer) # centimeters weight = Column(Integer) # kg reach = Column(Integer) # centimeters specialization = Column(String) fights = relationship( "Fight", primaryjoin="or_(Fighter.id == Fight.fighter1_id, Fighter.id == Fight.fighter2_id)")
from storage.models.base import * class Fighter(Base): __tablename__ = 'fighters' id = Column(Integer, primary_key=True) ref = Column(String(STR_SIZE), unique=True, nullable=False) name = Column(String(STR_SIZE), nullable=False) country = Column(String(STR_SIZE)) city = Column(String(STR_SIZE)) birthday = Column(Date) height = Column(Integer) # centimeters weight = Column(Integer) # kg reach = Column(Integer) # centimeters specialization = Column(String(STR_SIZE)) fights = relationship( "Fight", primaryjoin="or_(Fighter.id == Fight.fighter1_id, Fighter.id == Fight.fighter2_id)")
Add restriction for specialization string in db
Add restriction for specialization string in db
Python
apache-2.0
Some1Nebo/ufcpy
--- +++ @@ -13,7 +13,7 @@ height = Column(Integer) # centimeters weight = Column(Integer) # kg reach = Column(Integer) # centimeters - specialization = Column(String) + specialization = Column(String(STR_SIZE)) fights = relationship( "Fight",
146463512e17a6bae0dfc0e8f3aa8d99200a5e9c
transfers/examples/pre-transfer/archivesspace_ids.py
transfers/examples/pre-transfer/archivesspace_ids.py
#!/usr/bin/env python from __future__ import print_function import csv import errno import os import sys def main(transfer_path): """ Generate archivesspaceids.csv with reference IDs based on filenames. """ as_ids = [] for dirpath, _, filenames in os.walk(transfer_path): for filename in filenames: identifier = os.path.splitext(filename)[0] relative_path = os.path.join(dirpath, filename).replace(transfer_path, '') if not identifier or not relative_path: continue as_ids.append([relative_path, identifier]) print(as_ids) # Write out CSV try: os.mkdir(os.path.join(transfer_path, 'metadata')) except OSError as e: if e.errno == errno.EEXIST: pass # Already exists else: raise with open(os.path.join(transfer_path, 'metadata', 'archivesspaceids.csv'), 'w') as f: writer = csv.writer(f) writer.writerows(as_ids) if __name__ == '__main__': transfer_path = sys.argv[1] sys.exit(main(transfer_path))
#!/usr/bin/env python from __future__ import print_function import csv import errno import os import sys def main(transfer_path): """ Generate archivesspaceids.csv with reference IDs based on filenames. """ archivesspaceids_path = os.path.join(transfer_path, 'metadata', 'archivesspaceids.csv') if os.path.exists(archivesspaceids_path): print(archivesspaceids_path, 'already exists, exiting') return as_ids = [] for dirpath, _, filenames in os.walk(transfer_path): for filename in filenames: identifier = os.path.splitext(filename)[0] relative_path = os.path.join(dirpath, filename).replace(transfer_path, '') if not identifier or not relative_path: continue as_ids.append([relative_path, identifier]) print(as_ids) # Write out CSV try: os.mkdir(os.path.join(transfer_path, 'metadata')) except OSError as e: if e.errno == errno.EEXIST: pass # Already exists else: raise with open(archivesspaceids_path, 'w') as f: writer = csv.writer(f) writer.writerows(as_ids) if __name__ == '__main__': transfer_path = sys.argv[1] sys.exit(main(transfer_path))
Automate transfers: archivesspace example checks if output file already exists
Automate transfers: archivesspace example checks if output file already exists Check if archivesspaceids.csv already exists (presumably user provided). Do not generate one automatically in that case.
Python
agpl-3.0
artefactual/automation-tools,artefactual/automation-tools
--- +++ @@ -11,6 +11,11 @@ """ Generate archivesspaceids.csv with reference IDs based on filenames. """ + archivesspaceids_path = os.path.join(transfer_path, 'metadata', 'archivesspaceids.csv') + if os.path.exists(archivesspaceids_path): + print(archivesspaceids_path, 'already exists, exiting') + return + as_ids = [] for dirpath, _, filenames in os.walk(transfer_path): @@ -30,7 +35,7 @@ pass # Already exists else: raise - with open(os.path.join(transfer_path, 'metadata', 'archivesspaceids.csv'), 'w') as f: + with open(archivesspaceids_path, 'w') as f: writer = csv.writer(f) writer.writerows(as_ids)
1c19d7fb5914554b470a6d067902a9c61882ff4a
packs/softlayer/actions/destroy_instance.py
packs/softlayer/actions/destroy_instance.py
from lib.softlayer import SoftlayerBaseAction class SoftlayerDeleteInstance(SoftlayerBaseAction): def run(self, name): driver = self._get_driver() # go from name to Node Object node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0] # destroy the node self.logger.info('Destroying node...') node = driver.destroy_node(node) self.logger.info('Node successfully destroyed: {}'.format(node)) return
from lib.softlayer import SoftlayerBaseAction class SoftlayerDeleteInstance(SoftlayerBaseAction): def run(self, name): driver = self._get_driver() # go from name to Node Object try: node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0] except IndexError: raise Exception("Node with name {} not found in Softlayer".format(name)) # destroy the node self.logger.info('Destroying node...') node = driver.destroy_node(node) self.logger.info('Node successfully destroyed: {}'.format(node)) return
Return a sane error if there is no Nodes with that name instead of IndexError
Return a sane error if there is no Nodes with that name instead of IndexError
Python
apache-2.0
tonybaloney/st2contrib,StackStorm/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,meirwah/st2contrib,psychopenguin/st2contrib,pidah/st2contrib,digideskio/st2contrib,digideskio/st2contrib,psychopenguin/st2contrib,meirwah/st2contrib,lmEshoo/st2contrib,pinterb/st2contrib,tonybaloney/st2contrib,pidah/st2contrib,lmEshoo/st2contrib,armab/st2contrib,pidah/st2contrib,dennybaa/st2contrib,StackStorm/st2contrib,dennybaa/st2contrib,pearsontechnology/st2contrib,StackStorm/st2contrib,armab/st2contrib,armab/st2contrib,pearsontechnology/st2contrib,pinterb/st2contrib,pearsontechnology/st2contrib
--- +++ @@ -5,7 +5,10 @@ def run(self, name): driver = self._get_driver() # go from name to Node Object - node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0] + try: + node = [n for n in driver.list_nodes() if n.extra['hostname'] == name][0] + except IndexError: + raise Exception("Node with name {} not found in Softlayer".format(name)) # destroy the node self.logger.info('Destroying node...') node = driver.destroy_node(node)
0219907b3351fea2467ad961fef750481b62e205
dask_ndmeasure/_test_utils.py
dask_ndmeasure/_test_utils.py
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*- from __future__ import absolute_import import dask.array.utils def _assert_eq_nan(a, b, **kwargs): a = a.copy() b = b.copy() a_nan = (a != a) b_nan = (b != b) a[a_nan] = 0 b[b_nan] = 0 dask.array.utils.assert_eq(a_nan, b_nan, **kwargs) dask.array.utils.assert_eq(a, b, **kwargs)
Add _assert_eq_nan to compare arrays that have NaN
Add _assert_eq_nan to compare arrays that have NaN As comparisons with NaN are false even if both values are NaN, using the `assert_eq` does not work correctly in this case. To fix it, we add this shim function around `assert_eq`. First we verify that they have the same NaN values using a duck type friendly strategy (comparing the arrays to themselves) and then comparing the masks of each array to the other. Second we zero out all the NaN values and compare the resulting arrays, which should now work as normal. This works just as well whether the arrays are from NumPy, Dask, or some other library given they support these basic functions.
Python
bsd-3-clause
dask-image/dask-ndmeasure
--- +++ @@ -1 +1,19 @@ # -*- coding: utf-8 -*- + +from __future__ import absolute_import + +import dask.array.utils + + +def _assert_eq_nan(a, b, **kwargs): + a = a.copy() + b = b.copy() + + a_nan = (a != a) + b_nan = (b != b) + + a[a_nan] = 0 + b[b_nan] = 0 + + dask.array.utils.assert_eq(a_nan, b_nan, **kwargs) + dask.array.utils.assert_eq(a, b, **kwargs)
1c494f21cde384b611998d237baa430384dcefbc
Challenges/chall_22.py
Challenges/chall_22.py
#!/usr/local/bin/python3 # Python Challenge - 22 # http://www.pythonchallenge.com/pc/hex/copper.html # http://www.pythonchallenge.com/pc/hex/white.gif # Username: butter; Password: fly # Keyword: ''' Uses Anaconda environment with Pillow for image processing - Python 3.7, numpy, and Pillow (PIL) - Run `source activate imgPIL`, `python chall_22.py` ''' from PIL import Image import numpy as np def main(): ''' Hint: emulate (picture of joystick) <!-- or maybe white.gif would be more bright --> http://www.pythonchallenge.com/pc/hex/white.gif shows a 200x200 black square, download has 133 pages in preview (frames?) ''' img_path = './joystick_chall_22/white.gif' white = Image.open(img_path) hist = white.histogram() print(len(hist)) return 0 if __name__ == '__main__': main()
#!/usr/local/bin/python3 # Python Challenge - 22 # http://www.pythonchallenge.com/pc/hex/copper.html # http://www.pythonchallenge.com/pc/hex/white.gif # Username: butter; Password: fly # Keyword: ''' Uses Anaconda environment with Pillow for image processing - Python 3.7, numpy, and Pillow (PIL) - Run `source activate imgPIL`, `python chall_22.py` ''' from PIL import Image, ImageDraw def main(): ''' Hint: emulate (picture of joystick) <!-- or maybe white.gif would be more bright --> http://www.pythonchallenge.com/pc/hex/white.gif shows a 200x200 black square, download has 133 pages in preview (frames?) ''' img_path = './joystick_chall_22/white.gif' with Image.open(img_path) as gif: hist = gif.histogram() # 1 pixel in hist bin 8 (0-255) print(hist.index(1)) data = list(gif.getdata()) print(data.index(8)) # 20100 return 0 if __name__ == '__main__': main()
Refactor image open to with block
Refactor image open to with block
Python
mit
HKuz/PythonChallenge
--- +++ @@ -11,8 +11,7 @@ - Run `source activate imgPIL`, `python chall_22.py` ''' -from PIL import Image -import numpy as np +from PIL import Image, ImageDraw def main(): @@ -23,10 +22,12 @@ square, download has 133 pages in preview (frames?) ''' img_path = './joystick_chall_22/white.gif' - white = Image.open(img_path) - hist = white.histogram() - print(len(hist)) + with Image.open(img_path) as gif: + hist = gif.histogram() # 1 pixel in hist bin 8 (0-255) + print(hist.index(1)) + data = list(gif.getdata()) + print(data.index(8)) # 20100 return 0
26934dae71cb006baf0dcf77ddec4938b8c4fdbd
pinry/settings/docker.py
pinry/settings/docker.py
import logging from .base import * # SECURITY WARNING: keep the secret key used in production secret! if 'SECRET_KEY' not in os.environ: logging.warning( "No SECRET_KEY given in environ, please have a check" ) SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME") # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False # SECURITY WARNING: use your actual domain name in production! ALLOWED_HOSTS = ['*'] # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'postgres', 'USER': 'postgres', 'HOST': 'db', 'PORT': 5432, } } USE_X_FORWARDED_HOST = True REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = [ 'rest_framework.renderers.JSONRenderer', ] try: from .local_settings import * except ImportError: pass
import logging from .base import * # SECURITY WARNING: keep the secret key used in production secret! if 'SECRET_KEY' not in os.environ: logging.warning( "No SECRET_KEY given in environ, please have a check." "If you have a local_settings file, please ignore this warning." ) SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME") # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False # SECURITY WARNING: use your actual domain name in production! ALLOWED_HOSTS = ['*'] # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'postgres', 'USER': 'postgres', 'HOST': 'db', 'PORT': 5432, } } USE_X_FORWARDED_HOST = True REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = [ 'rest_framework.renderers.JSONRenderer', ] try: from .local_settings import * except ImportError: pass
Add ignore info for secret-key env-test
Doc: Add ignore info for secret-key env-test
Python
bsd-2-clause
lapo-luchini/pinry,pinry/pinry,pinry/pinry,lapo-luchini/pinry,lapo-luchini/pinry,pinry/pinry,lapo-luchini/pinry,pinry/pinry
--- +++ @@ -6,7 +6,8 @@ # SECURITY WARNING: keep the secret key used in production secret! if 'SECRET_KEY' not in os.environ: logging.warning( - "No SECRET_KEY given in environ, please have a check" + "No SECRET_KEY given in environ, please have a check." + "If you have a local_settings file, please ignore this warning." ) SECRET_KEY = os.environ.get('SECRET_KEY', "PLEASE_REPLACE_ME")
24207f3681b0e546937a1d59ecffa1aa6630b825
todolist.py
todolist.py
# -*- coding: utf-8 -*- from app import create_app, db app = create_app('development') @app.cli.command() def test(): """Run the unit tests.""" import unittest tests = unittest.TestLoader().discover('tests') unittest.TextTestRunner(verbosity=2).run(tests) @app.cli.command() def fill_db(): """Fill database with random data. By default 10 users, 40 todolists and 160 todos. WARNING: will delete existing data. For testing purposes only. """ from utils.fake_generator import FakeGenerator FakeGenerator().start() # side effect: deletes existing data
# -*- coding: utf-8 -*- from app import create_app, db app = create_app('development') @app.cli.command() def test(): """Runs the unit tests.""" import unittest tests = unittest.TestLoader().discover('tests') unittest.TextTestRunner(verbosity=2).run(tests) @app.cli.command() def fill_db(): """Fills database with random data. By default 10 users, 40 todolists and 160 todos. WARNING: will delete existing data. For testing purposes only. """ from utils.fake_generator import FakeGenerator FakeGenerator().start() # side effect: deletes existing data
Fix grammar in doc comments
Fix grammar in doc comments
Python
mit
rtzll/flask-todolist,rtzll/flask-todolist,0xfoo/flask-todolist,polyfunc/flask-todolist,0xfoo/flask-todolist,polyfunc/flask-todolist,0xfoo/flask-todolist,rtzll/flask-todolist,polyfunc/flask-todolist,rtzll/flask-todolist
--- +++ @@ -7,7 +7,7 @@ @app.cli.command() def test(): - """Run the unit tests.""" + """Runs the unit tests.""" import unittest tests = unittest.TestLoader().discover('tests') unittest.TextTestRunner(verbosity=2).run(tests) @@ -15,7 +15,7 @@ @app.cli.command() def fill_db(): - """Fill database with random data. + """Fills database with random data. By default 10 users, 40 todolists and 160 todos. WARNING: will delete existing data. For testing purposes only. """
cc839453b88b4cd5f2b4f7f4c007405eabb02679
release.py
release.py
CLASSIFIERS = """\ Development Status :: 3 - Alpha Intended Audience :: Science/Research Intended Audience :: Developers License :: OSI Approved Programming Language :: Python Topic :: Software Development Topic :: Scientific/Engineering Operating System :: Microsoft :: Windows Operating System :: POSIX Operating System :: Unix Operating System :: MacOS """ NAME = 'numscons' VERSION = '0.3.3dev' DESCRIPTION = 'Enable to use scons within distutils to build extensions' CLASSIFIERS = filter(None, CLASSIFIERS.split('\n')) AUTHOR = 'David Cournapeau' AUTHOR_EMAIL = 'david@ar.media.kyoto-u.ac.jp' PACKAGES = ['numscons', 'numscons.core', 'numscons.checkers', 'numscons.tools', 'numscons.numdist', 'numscons.checkers.tests', 'numscons.checkers.fortran'] PACKAGE_DATA = {'numscons.core' : ['compiler.cfg', 'fcompiler.cfg'], 'numscons.checkers' : ['perflib.cfg']} DATA_DIR = ['numscons/scons-local', 'tests', 'doc']
CLASSIFIERS = """\ Development Status :: 3 - Alpha Intended Audience :: Science/Research Intended Audience :: Developers License :: OSI Approved Programming Language :: Python Topic :: Software Development Topic :: Scientific/Engineering Operating System :: Microsoft :: Windows Operating System :: POSIX Operating System :: Unix Operating System :: MacOS """ NAME = 'numscons' VERSION = '0.3.3' DESCRIPTION = 'Enable to use scons within distutils to build extensions' CLASSIFIERS = filter(None, CLASSIFIERS.split('\n')) AUTHOR = 'David Cournapeau' AUTHOR_EMAIL = 'david@ar.media.kyoto-u.ac.jp' PACKAGES = ['numscons', 'numscons.core', 'numscons.checkers', 'numscons.tools', 'numscons.numdist', 'numscons.checkers.tests', 'numscons.checkers.fortran'] PACKAGE_DATA = {'numscons.core' : ['compiler.cfg', 'fcompiler.cfg'], 'numscons.checkers' : ['perflib.cfg']} DATA_DIR = ['numscons/scons-local', 'tests', 'doc']
Update distutils version to 0.3.3
Update distutils version to 0.3.3
Python
bsd-3-clause
cournape/numscons,cournape/numscons,cournape/numscons
--- +++ @@ -13,7 +13,7 @@ """ NAME = 'numscons' -VERSION = '0.3.3dev' +VERSION = '0.3.3' DESCRIPTION = 'Enable to use scons within distutils to build extensions' CLASSIFIERS = filter(None, CLASSIFIERS.split('\n')) AUTHOR = 'David Cournapeau'
f7a8f66047e2277cd95b553cd7aadfa24fbaad95
scuole/stats/models.py
scuole/stats/models.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class SchoolYear(models.Model): name = models.CharField(max_length=9) def __str__(self): return self.name @python_2_unicode_compatible class StatsBase(models.Model): """ An abstract model representing stats commonly tracked across all entities in TEA data. Meant to be the base used by other apps for establishing their stats models. Example: class CampusStats(StatsBase): ... """ # Student counts all_students_count = models.IntegerField('Number of students') asian_count = models.IntegerField('Number of Asian students') hispanic_count = models.IntegerField('Number of Hispanic students') pacific_islander_count = models.IntegerField( 'Number of Pacific Islander students') white_count = models.IntegerField('Number of White students') class Meta: abstract = True
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class SchoolYear(models.Model): name = models.CharField(max_length=9) def __str__(self): return self.name @python_2_unicode_compatible class StatsBase(models.Model): """ An abstract model representing stats commonly tracked across all entities in TEA data. Meant to be the base used by other apps for establishing their stats models. Example: class CampusStats(StatsBase): ... """ # Student counts all_students_count = models.IntegerField('Number of students') african_american_count = models.IntegerField( 'Number of African American students') asian_count = models.IntegerField('Number of Asian students') hispanic_count = models.IntegerField('Number of Hispanic students') pacific_islander_count = models.IntegerField( 'Number of Pacific Islander students') two_or_more_races_count = models.IntegerField( 'Number of Two or More Races students') white_count = models.IntegerField('Number of White students') class Meta: abstract = True
Add two more fields to StatsBase
Add two more fields to StatsBase
Python
mit
texastribune/scuole,texastribune/scuole,texastribune/scuole,texastribune/scuole
--- +++ @@ -29,10 +29,14 @@ # Student counts all_students_count = models.IntegerField('Number of students') + african_american_count = models.IntegerField( + 'Number of African American students') asian_count = models.IntegerField('Number of Asian students') hispanic_count = models.IntegerField('Number of Hispanic students') pacific_islander_count = models.IntegerField( 'Number of Pacific Islander students') + two_or_more_races_count = models.IntegerField( + 'Number of Two or More Races students') white_count = models.IntegerField('Number of White students') class Meta:
06f045d51b24ee834f7bbb572ccce304431fc602
merlin/engine/battle.py
merlin/engine/battle.py
class Prepare(object): """ Prepare the champions for the battle! Usage: hero = Prepare(name="Aragorn", base_attack=100, base_hp=100) or like this: aragorn = {"name": "Aragorn", "base_attack": 100, "base_hp": 100} hero = Prepare(**aragorn) """ def __init__(self, name, base_attack, base_hp): self.name = name self.base_attack = base_attack self.base_hp = base_hp def attack(self, foe): if not isinstance(foe, Prepare): raise TypeError('foe should be a Prepare object') if foe.base_hp <= 0: raise Exception('foe is already dead! Stop hit him!') foe.base_hp = foe.base_hp - self.base_attack if foe.base_hp <= 0: print 'foe is dead.' return foe.base_hp
class Prepare(object): """ Prepare the champions for the battle! Usage: hero = Prepare(name="Aragorn", base_attack=100, base_hp=100) or like this: aragorn = {"name": "Aragorn", "base_attack": 100, "base_hp": 100} hero = Prepare(**aragorn) """ def __init__(self, name, base_attack, base_hp): self.name = name self.base_attack = base_attack self.base_hp = base_hp @property def status(self): return self.__dict__ def attack(self, foe): if not isinstance(foe, Prepare): raise TypeError('foe should be a Prepare object') if foe.base_hp <= 0: raise Exception('foe is already dead! Stop hit him!') foe.base_hp = foe.base_hp - self.base_attack if foe.base_hp <= 0: print 'foe is dead.' return foe.base_hp
Add property status in Prepare
Add property status in Prepare
Python
mit
lerrua/merlin-engine
--- +++ @@ -15,6 +15,10 @@ self.base_attack = base_attack self.base_hp = base_hp + @property + def status(self): + return self.__dict__ + def attack(self, foe): if not isinstance(foe, Prepare): raise TypeError('foe should be a Prepare object')
542bd2696f75ad58cf8b0015024b3011af14851c
config.py
config.py
import os # Grabs the folder where the script runs. basedir = os.path.abspath(os.path.dirname(__file__)) # Enable debug mode. DEBUG = True # Secret key for session management. You can generate random strings here: # http://clsc.net/tools-old/random-string-generator.php SECRET_KEY = 'my precious' # Connect to the database SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'database.db')
import os # Grabs the folder where the script runs. basedir = os.path.abspath(os.path.dirname(__file__)) # Enable debug mode. DEBUG = True # Secret key for session management. You can generate random strings here: # http://clsc.net/tools-old/random-string-generator.php SECRET_KEY = '-%\4~3(_6*' # Connect to the database SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'database.db')
Include randomized string for secret key
Include randomized string for secret key
Python
apache-2.0
AntiPiracy/webapp,AntiPiracy/webapp,tcyrus-hackathon/scurvy-webapp,tcyrus-hackathon/scurvy-webapp,AntiPiracy/webapp,tcyrus-hackathon/scurvy-webapp
--- +++ @@ -8,7 +8,7 @@ # Secret key for session management. You can generate random strings here: # http://clsc.net/tools-old/random-string-generator.php -SECRET_KEY = 'my precious' +SECRET_KEY = '-%\4~3(_6*' # Connect to the database SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'database.db')
c201fc0feef5f7eeede327d6239fc3082ae24180
server/worker/queue.py
server/worker/queue.py
"""Process queues.""" from datetime import datetime from server.extensions import db from server.models import QueueEntry def finished_entries(): """Process finished entries.""" queue_entries = db.session.query(QueueEntry) \ .filter(QueueEntry.finishes_at <= datetime.now()) \ .all() for entry in queue_entries: if entry.module: entry.module.level = entry.level db.session.add(entry.module) entry.module.pod.update_resources() elif entry.research: entry.research.level += 1 entry.research.researched = True db.session.add(entry.research) queue = entry.queue db.session.delete(entry) queue = queue.next_entry() db.session.commit()
"""Process queues.""" from datetime import datetime from server.extensions import db from server.models import QueueEntry def finished_entries(): """Process finished entries.""" queue_entries = db.session.query(QueueEntry) \ .filter(QueueEntry.finishes_at <= datetime.now()) \ .all() for entry in queue_entries: if entry.module: entry.module.level = entry.level db.session.add(entry.module) entry.module.pod.update_resources() elif entry.research: entry.research.level = entry.level entry.research.researched = True db.session.add(entry.research) queue = entry.queue db.session.delete(entry) queue = queue.next_entry() db.session.commit()
Set research level in ticker
Set research level in ticker
Python
mit
Nukesor/spacesurvival,Nukesor/spacesurvival,Nukesor/spacesurvival,Nukesor/spacesurvival
--- +++ @@ -18,7 +18,7 @@ entry.module.pod.update_resources() elif entry.research: - entry.research.level += 1 + entry.research.level = entry.level entry.research.researched = True db.session.add(entry.research)
eb169af3b56ef44d50a6f4596debf0c0c9efa532
config.py
config.py
import os ROOT_DIR = os.path.dirname(os.path.realpath(__file__)) class BaseConfig(object): DEBUG = False TESTING = False FREEZER_REMOVE_EXTRA_FILES = True FREEZER_DESTINATION = os.path.join(ROOT_DIR, 'html') FREEZER_RELATIVE_URLS = True class ProductionConfig(BaseConfig): GOOGLE_API_KEY = os.path.join(ROOT_DIR, 'client_id.json') GOOGLE_SHEET_ID = '1Z1XF9nabneWBDbFwaovI_n9YcazeNQq4hon1wsIxrus' GOOGLE_SHEET_TAB = 'Data' class DevelopmentConfig(BaseConfig): DATA_FILE = os.path.join(ROOT_DIR, 'WaterTesting', 'data.csv') DEBUG = True TESTING = True class TestingConfig(BaseConfig): DEBUG = False TESTING = True
import os ROOT_DIR = os.path.dirname(os.path.realpath(__file__)) class BaseConfig(object): DEBUG = False TESTING = False FREEZER_REMOVE_EXTRA_FILES = True FREEZER_DESTINATION = os.path.join(ROOT_DIR, 'html') FREEZER_RELATIVE_URLS = True class ProductionConfig(BaseConfig): GOOGLE_API_KEY = os.path.join(ROOT_DIR, 'client_id.json') GOOGLE_SHEET_ID = '1Z1XF9nabneWBDbFwaovI_n9YcazeNQq4hon1wsIxrus' GOOGLE_SHEET_TAB = 'Data' class DevelopmentConfig(BaseConfig): DATA_FILE = os.path.join(ROOT_DIR, 'data.csv') DEBUG = True TESTING = True class TestingConfig(BaseConfig): DEBUG = False TESTING = True
Change location of development data file.
Change location of development data file.
Python
mit
JamesRiverHomeBrewers/WaterTesting,JamesRiverHomeBrewers/WaterTesting,JamesRiverHomeBrewers/WaterTesting,JamesRiverHomeBrewers/WaterTesting
--- +++ @@ -16,7 +16,7 @@ class DevelopmentConfig(BaseConfig): - DATA_FILE = os.path.join(ROOT_DIR, 'WaterTesting', 'data.csv') + DATA_FILE = os.path.join(ROOT_DIR, 'data.csv') DEBUG = True TESTING = True
738d080512f36939ce4a23f3d3db0b378550564a
tests/test_build_chess.py
tests/test_build_chess.py
# -*- coding: utf-8 -*- from app.chess.chess import Chess import unittest class TestBuildChess(unittest.TestCase): """ `TestBuildChess()` class is unit-testing the class Chess(). """ # /////////////////////////////////////////////////// def setUp(self): params = [4, 4] pieces = {'King': 2, 'Queen': 1, 'Bishop': 0, 'Rook': 0, 'Knight': 0} params.append(pieces) self.chess = Chess(params) # /////////////////////////////////////////////////// def test_solve(self): """Tests validity of solution""" self.assertEqual(self.chess.pieces_types == ['K', 'K', 'Q'], True) self.assertEqual(self.chess.number_pieces == 3, True) # self.assertEqual(self.chess.solutions == 1, True) if __name__ == '__main__': unittest.main()
# -*- coding: utf-8 -*- from app.chess.chess import Chess import unittest class TestBuildChess(unittest.TestCase): """ `TestBuildChess()` class is unit-testing the class Chess(). """ # /////////////////////////////////////////////////// def setUp(self): params = [4, 4] pieces = {'King': 2, 'Queen': 1, 'Bishop': 0, 'Rook': 0, 'Knight': 0} params.append(pieces) self.chess = Chess(params) # /////////////////////////////////////////////////// def test_solve(self): """Tests validity of solution""" self.assertEqual(self.chess.pieces_types == ['K', 'K', 'Q'], True) self.assertEqual(self.chess.number_pieces == 3, True) # self.assertEqual(self.chess.solutions == 1, True) def test_solution_only_kings(self): params = [5, 5] pieces = {'King': 2, 'Queen': 0, 'Bishop': 0, 'Rook': 0, 'Knight': 0} params.append(pieces) self.chess = Chess(params) self.chess.run_game() self.assertEqual(self.chess.solutions == 228, True) if __name__ == '__main__': unittest.main()
Add a TDD funct to test the solution (only kings)
Add a TDD funct to test the solution (only kings)
Python
mit
aymguesmi/ChessChallenge
--- +++ @@ -24,7 +24,15 @@ self.assertEqual(self.chess.pieces_types == ['K', 'K', 'Q'], True) self.assertEqual(self.chess.number_pieces == 3, True) # self.assertEqual(self.chess.solutions == 1, True) - + + def test_solution_only_kings(self): + params = [5, 5] + pieces = {'King': 2, 'Queen': 0, 'Bishop': 0, 'Rook': 0, 'Knight': 0} + params.append(pieces) + self.chess = Chess(params) + self.chess.run_game() + self.assertEqual(self.chess.solutions == 228, True) + if __name__ == '__main__': unittest.main()
64feb1fe3eafc7fae4b9894f5e240d4c05eebb78
lava_server/__init__.py
lava_server/__init__.py
# Copyright (C) 2010, 2011 Linaro Limited # # Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org> # # This file is part of LAVA Server. # # LAVA Server is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License version 3 # as published by the Free Software Foundation # # LAVA Server is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with LAVA Server. If not, see <http://www.gnu.org/licenses/>. __version__ = (0, 3, 0, "beta", 2)
# Copyright (C) 2010, 2011 Linaro Limited # # Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org> # # This file is part of LAVA Server. # # LAVA Server is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License version 3 # as published by the Free Software Foundation # # LAVA Server is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with LAVA Server. If not, see <http://www.gnu.org/licenses/>. __version__ = (0, 3, 0, "beta", 3)
Bump version to beta 3
Bump version to beta 3
Python
agpl-3.0
OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server
--- +++ @@ -17,4 +17,4 @@ # along with LAVA Server. If not, see <http://www.gnu.org/licenses/>. -__version__ = (0, 3, 0, "beta", 2) +__version__ = (0, 3, 0, "beta", 3)
7a7cd83b5d49961e8d0cdd851caf11b7110c2779
app.py
app.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from flask import Flask, request, json from flask.ext.cors import CORS import database import rsser # Update data before application is allowed to start database.update_database() app = Flask(__name__) CORS(app) @app.route('/speakercast/speakers') def speakers(): speakers = [{'name': name, 'talks': count} for count, name in database.get_all_speaker_and_counts()] return json.dumps(speakers) @app.route('/speakercast/generate', methods=['POST', 'OPTIONS']) def generate(): data = json.loads(request.data) speakers = data['speakers'] id_ = database.generate_id(speakers) return id_ @app.route('/speakercast/feed/<id>') def feed(id): speakers = database.get_speakers(id) if speakers is None: # TODO: Send some error return "ERROR" talks = database.get_talks(speakers) return rsser.create_rss_feed(talks=talks, speakers=list(speakers)) if __name__ == "__main__": app.run(debug=True)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from flask import Flask, request, json from flask.ext.cors import CORS import database import rsser # Update data before application is allowed to start database.update_database() app = Flask(__name__) CORS(app) @app.route('/speakercast/speakers') def speakers(): speakers = [{'name': name, 'talks': count} for count, name in database.get_all_speaker_and_counts()] return json.dumps(speakers) @app.route('/speakercast/generate', methods=['POST', 'OPTIONS']) def generate(): if request.method == 'OPTIONS': return "" data = json.loads(request.data) speakers = data['speakers'] id_ = database.generate_id(speakers) print("Generating id ({}) for {}".format(id_, speakers)) return id_ @app.route('/speakercast/feed/<id>') def feed(id): speakers = database.get_speakers(id) if speakers is None: # TODO: Send some error return "ERROR" talks = database.get_talks(speakers) return rsser.create_rss_feed(talks=talks, speakers=list(speakers)) if __name__ == "__main__": app.run(debug=True)
Handle OPTION requests for generate
Handle OPTION requests for generate Just return nothing. Chrome seems to be happy with that response.
Python
bsd-3-clause
philipbl/talk_feed,philipbl/SpeakerCast
--- +++ @@ -22,10 +22,14 @@ @app.route('/speakercast/generate', methods=['POST', 'OPTIONS']) def generate(): + if request.method == 'OPTIONS': + return "" + data = json.loads(request.data) speakers = data['speakers'] id_ = database.generate_id(speakers) + print("Generating id ({}) for {}".format(id_, speakers)) return id_
0817bdcd26de6627ee36edcee2072727f3174d4c
cfsite/settings/prod.py
cfsite/settings/prod.py
"""Development settings and globals.""" from common import * import dj_database_url ########## DEBUG CONFIGURATION DEBUG = True TEMPLATE_DEBUG = DEBUG ########## END DEBUG CONFIGURATION # Parse database configuration from $DATABASE_URL DATABASES = { 'default': { 'ENGINE':'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } DATABASES['default'] = dj_database_url.config() # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Allow all host headers ALLOWED_HOSTS = ['*']
"""Development settings and globals.""" from common import * import dj_database_url ########## DEBUG CONFIGURATION DEBUG = False TEMPLATE_DEBUG = DEBUG ########## END DEBUG CONFIGURATION # Parse database configuration from $DATABASE_URL DATABASES = { 'default': { 'ENGINE':'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } DATABASES['default'] = dj_database_url.config() # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Allow all host headers ALLOWED_HOSTS = ['*']
Revert to debug=false for deployment
Revert to debug=false for deployment
Python
mit
susanctu/Crazyfish-Public,susanctu/Crazyfish-Public
--- +++ @@ -5,7 +5,7 @@ import dj_database_url ########## DEBUG CONFIGURATION -DEBUG = True +DEBUG = False TEMPLATE_DEBUG = DEBUG ########## END DEBUG CONFIGURATION
baca7b88893f175a222d7130ef1889893ed6b970
iterm2_tools/images.py
iterm2_tools/images.py
""" Functions for displaying images inline in iTerm2. See https://iterm2.com/images.html. """ from __future__ import print_function, division, absolute_import import sys import os import base64 IMAGE_CODE = '\033]1337;File={file};inline={inline};size={size}:{base64_img}\a' def display_image_bytes(b, filename=None, inline=1): """ Display the image given by the bytes b in the terminal. If filename=None the filename defaults to "Unnamed file". """ data = { 'file': base64.b64encode((filename or 'Unnamed file').encode('utf-8')).decode('ascii'), 'inline': inline, 'size': len(b), 'base64_img': base64.b64encode(b).decode('ascii'), } return (IMAGE_CODE.format(**data)) def display_image_file(fn): """ Display an image in the terminal. A newline is not printed. """ with open(os.path.realpath(os.path.expanduser(fn)), 'rb') as f: sys.stdout.write(display_image_bytes(f.read(), filename=fn)) # Backwards compatibility image_bytes = display_image_bytes
""" Functions for displaying images inline in iTerm2. See https://iterm2.com/images.html. """ from __future__ import print_function, division, absolute_import import sys import os import base64 IMAGE_CODE = '\033]1337;File={file};inline={inline};size={size}:{base64_img}\a' def display_image_bytes(b, filename=None, inline=1): """ Display the image given by the bytes b in the terminal. If filename=None the filename defaults to "Unnamed file". """ data = { 'file': base64.b64encode((filename or 'Unnamed file').encode('utf-8')).decode('ascii'), 'inline': inline, 'size': len(b), 'base64_img': base64.b64encode(b).decode('ascii'), } return (IMAGE_CODE.format(**data)) # Backwards compatibility def image_bytes(b, filename=None, inline=1): """ **DEPRECATED**: Use display_image_bytes. """ return display_image_file(b, filename=filename, inline=inline) def display_image_file(fn): """ Display an image in the terminal. A newline is not printed. """ with open(os.path.realpath(os.path.expanduser(fn)), 'rb') as f: sys.stdout.write(display_image_bytes(f.read(), filename=fn))
Add a deprecation message to the docstring of image_bytes()
Add a deprecation message to the docstring of image_bytes()
Python
mit
asmeurer/iterm2-tools
--- +++ @@ -26,6 +26,13 @@ } return (IMAGE_CODE.format(**data)) +# Backwards compatibility +def image_bytes(b, filename=None, inline=1): + """ + **DEPRECATED**: Use display_image_bytes. + """ + return display_image_file(b, filename=filename, inline=inline) + def display_image_file(fn): """ Display an image in the terminal. @@ -34,6 +41,3 @@ """ with open(os.path.realpath(os.path.expanduser(fn)), 'rb') as f: sys.stdout.write(display_image_bytes(f.read(), filename=fn)) - -# Backwards compatibility -image_bytes = display_image_bytes
a2f1cdc05e63b7b68c16f3fd1e5203608888b059
traits/util/deprecated.py
traits/util/deprecated.py
""" A decorator for marking methods/functions as deprecated. """ # Standard library imports. import logging # We only warn about each function or method once! _cache = {} def deprecated(message): """ A factory for decorators for marking methods/functions as deprecated. """ def decorator(fn): """ A decorator for marking methods/functions as deprecated. """ def wrapper(*args, **kw): """ The method/function wrapper. """ global _cache module_name = fn.__module__ function_name = fn.__name__ if (module_name, function_name) not in _cache: logging.getLogger(module_name).warning( 'DEPRECATED: %s.%s, %s' % ( module_name, function_name, message ) ) _cache[(module_name, function_name)] = True return fn(*args, **kw) wrapper.__doc__ = fn.__doc__ wrapper.__name__ = fn.__name__ return wrapper return decorator #### EOF ######################################################################
# Test the 'trait_set', 'trait_get' interface to # the HasTraits class. # # Copyright (c) 2014, Enthought, Inc. # All rights reserved. # # This software is provided without warranty under the terms of the BSD # License included in /LICENSE.txt and may be redistributed only under the # conditions described in the aforementioned license. The license is also # available online at http://www.enthought.com/licenses/BSD.txt # # Thanks for using Enthought open source! """ A decorator for marking methods/functions as deprecated. """ # Standard library imports. import functools import warnings def deprecated(message): """ A factory for decorators for marking methods/functions as deprecated. """ def decorator(fn): """ A decorator for marking methods/functions as deprecated. """ @functools.wraps(fn) def wrapper(*args, **kw): """ The method/function wrapper. """ warnings.warn(message, DeprecationWarning, stacklevel=2) return fn(*args, **kw) return wrapper return decorator
Simplify deprecation machinery: don't cache previous messages, and use warnings instead of logging.
Simplify deprecation machinery: don't cache previous messages, and use warnings instead of logging.
Python
bsd-3-clause
burnpanck/traits,burnpanck/traits
--- +++ @@ -1,45 +1,37 @@ +# Test the 'trait_set', 'trait_get' interface to +# the HasTraits class. +# +# Copyright (c) 2014, Enthought, Inc. +# All rights reserved. +# +# This software is provided without warranty under the terms of the BSD +# License included in /LICENSE.txt and may be redistributed only under the +# conditions described in the aforementioned license. The license is also +# available online at http://www.enthought.com/licenses/BSD.txt +# +# Thanks for using Enthought open source! + """ A decorator for marking methods/functions as deprecated. """ - # Standard library imports. -import logging - -# We only warn about each function or method once! -_cache = {} +import functools +import warnings def deprecated(message): """ A factory for decorators for marking methods/functions as deprecated. """ - def decorator(fn): """ A decorator for marking methods/functions as deprecated. """ + @functools.wraps(fn) def wrapper(*args, **kw): """ The method/function wrapper. """ - global _cache - - module_name = fn.__module__ - function_name = fn.__name__ - - if (module_name, function_name) not in _cache: - logging.getLogger(module_name).warning( - 'DEPRECATED: %s.%s, %s' % ( - module_name, function_name, message - ) - ) - - _cache[(module_name, function_name)] = True - + warnings.warn(message, DeprecationWarning, stacklevel=2) return fn(*args, **kw) - - wrapper.__doc__ = fn.__doc__ - wrapper.__name__ = fn.__name__ return wrapper return decorator - -#### EOF ######################################################################
3900c8173bff6c3b1175ff9d6cffec1b98db7c74
address_book/address_book.py
address_book/address_book.py
__all__ = ['AddressBook'] class AddressBook(object): def __init__(self): self.persons = [] def add_person(self, person): self.persons.append(person)
from person import Person __all__ = ['AddressBook'] class AddressBook(object): def __init__(self): self.persons = [] def add_person(self, person): self.persons.append(person) def __contains__(self, item): if isinstance(item, Person): return item in self.persons return False
Add ability to check is the Person in AddressBook or not
Add ability to check is the Person in AddressBook or not
Python
mit
dizpers/python-address-book-assignment
--- +++ @@ -1,3 +1,5 @@ +from person import Person + __all__ = ['AddressBook'] @@ -8,3 +10,8 @@ def add_person(self, person): self.persons.append(person) + + def __contains__(self, item): + if isinstance(item, Person): + return item in self.persons + return False
d08e8144b90d3fe89fd449d31bdb655d62f3a749
serfclient/connection.py
serfclient/connection.py
import socket import sys class SerfConnectionError(Exception): pass class SerfConnection(object): """ Manages RPC communication to and from a Serf agent. """ def __init__(self, host='localhost', port=7373): self.host, self.port = host, port self._socket = None def __repr__(self): return "%(class)s<host=%(host)s,port=%(port)s>" % { 'class': self.__class__.__name__, 'host': self.host, 'port': self.port, } def handshake(self): """ Sets up the connection with the Serf agent and does the initial handshake. """ if self._socket: return try: self._socket = socket.create_connection((self.host, self.port)) return True except socket.error: e = sys.exc_info()[1] raise SerfConnectionError(self._error_message(e)) def _error_message(self, exception): return "Error %s connecting %s:%s. %s." % \ (exception.args[0], self.host, self.port, exception.args[1])
import socket import sys class SerfConnectionError(Exception): pass class SerfConnection(object): """ Manages RPC communication to and from a Serf agent. """ def __init__(self, host='localhost', port=7373): self.host, self.port = host, port self._socket = None def __repr__(self): return "%(class)s<host=%(host)s,port=%(port)s>" % { 'class': self.__class__.__name__, 'host': self.host, 'port': self.port, } def handshake(self): """ Sets up the connection with the Serf agent and does the initial handshake. """ if self._socket: return True else: self._socket = self._connect() return True def _connect(self): try: return socket.create_connection((self.host, self.port), 3) except socket.error: e = sys.exc_info()[1] raise SerfConnectionError(self._error_message(e)) def _error_message(self, exception): return "Error %s connecting %s:%s. %s." % \ (exception.args[0], self.host, self.port, exception.args[1])
Move all 'connect' logic into a private method
Move all 'connect' logic into a private method
Python
mit
charleswhchan/serfclient-py,KushalP/serfclient-py
--- +++ @@ -28,10 +28,14 @@ initial handshake. """ if self._socket: - return + return True + else: + self._socket = self._connect() + return True + + def _connect(self): try: - self._socket = socket.create_connection((self.host, self.port)) - return True + return socket.create_connection((self.host, self.port), 3) except socket.error: e = sys.exc_info()[1] raise SerfConnectionError(self._error_message(e))
14c41706d6437247bbe69e0e574c03863fbe5bda
api/v2/views/maintenance_record.py
api/v2/views/maintenance_record.py
from rest_framework.serializers import ValidationError from core.models import MaintenanceRecord from api.permissions import CanEditOrReadOnly from api.v2.serializers.details import MaintenanceRecordSerializer from api.v2.views.base import AuthOptionalViewSet class MaintenanceRecordViewSet(AuthOptionalViewSet): """ API endpoint that allows records to be viewed or edited. """ http_method_names = ['get', 'post', 'put', 'patch', 'head', 'options', 'trace'] queryset = MaintenanceRecord.objects.order_by('-start_date') permission_classes = (CanEditOrReadOnly,) serializer_class = MaintenanceRecordSerializer
import django_filters from rest_framework import filters from rest_framework.serializers import ValidationError from core.models import AtmosphereUser, MaintenanceRecord from core.query import only_current from api.permissions import CanEditOrReadOnly from api.v2.serializers.details import MaintenanceRecordSerializer from api.v2.views.base import AuthOptionalViewSet class MaintenanceRecordFilterBackend(filters.BaseFilterBackend): """ Filter MaintenanceRecords using the request_user and 'query_params' """ def filter_queryset(self, request, queryset, view): request_params = request.query_params active = request_params.get('active') if isinstance(active, basestring) and active.lower() == 'true'\ or isinstance(active, bool) and active: queryset = MaintenanceRecord.active() return queryset class MaintenanceRecordViewSet(AuthOptionalViewSet): """ API endpoint that allows records to be viewed or edited. """ http_method_names = ['get', 'post', 'put', 'patch', 'head', 'options', 'trace'] queryset = MaintenanceRecord.objects.order_by('-start_date') permission_classes = (CanEditOrReadOnly,) serializer_class = MaintenanceRecordSerializer filter_backends = (filters.DjangoFilterBackend, filters.SearchFilter, MaintenanceRecordFilterBackend)
Add '?active=' filter for Maintenance Record
[ATMO-1200] Add '?active=' filter for Maintenance Record
Python
apache-2.0
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
--- +++ @@ -1,11 +1,27 @@ +import django_filters + +from rest_framework import filters from rest_framework.serializers import ValidationError -from core.models import MaintenanceRecord +from core.models import AtmosphereUser, MaintenanceRecord +from core.query import only_current from api.permissions import CanEditOrReadOnly from api.v2.serializers.details import MaintenanceRecordSerializer from api.v2.views.base import AuthOptionalViewSet + +class MaintenanceRecordFilterBackend(filters.BaseFilterBackend): + """ + Filter MaintenanceRecords using the request_user and 'query_params' + """ + def filter_queryset(self, request, queryset, view): + request_params = request.query_params + active = request_params.get('active') + if isinstance(active, basestring) and active.lower() == 'true'\ + or isinstance(active, bool) and active: + queryset = MaintenanceRecord.active() + return queryset class MaintenanceRecordViewSet(AuthOptionalViewSet): @@ -16,3 +32,4 @@ queryset = MaintenanceRecord.objects.order_by('-start_date') permission_classes = (CanEditOrReadOnly,) serializer_class = MaintenanceRecordSerializer + filter_backends = (filters.DjangoFilterBackend, filters.SearchFilter, MaintenanceRecordFilterBackend)
611c34eee4b5aa263669f1b7321b97fab9a98b5e
dask/distributed/tests/test_ipython_utils.py
dask/distributed/tests/test_ipython_utils.py
from dask.distributed import dask_client_from_ipclient def test_dask_client_from_ipclient(): from IPython.parallel import Client c = Client() dc = dask_client_from_ipclient(c) assert 2 == dc.get({'a': 1, 'b': (lambda x: x + 1, 'a')}, 'b') dc.close(close_workers=True, close_scheduler=True)
from dask.distributed import dask_client_from_ipclient import numpy as np from numpy.testing import assert_array_almost_equal import dask.array as da def test_dask_client_from_ipclient(): from IPython.parallel import Client c = Client() dask_client = dask_client_from_ipclient(c) # data a = np.arange(100).reshape(10, 10) d = da.from_array(a, ((5, 5), (5, 5))) # test array.mean expected = a.mean(axis=0) d1 = d.mean(axis=0) result = d1.compute(get=dask_client.get) assert_array_almost_equal(result, expected) # test ghosting d2 = da.ghost.ghost(d, depth=1, boundary='reflect') d3 = da.ghost.trim_internal(d2, {0: 1, 1: 1}) result1 = d3.compute(get=dask_client.get) assert_array_almost_equal(result1, a) # close the workers dask_client.close(close_workers=True, close_scheduler=True)
Remove lambda test. Add dask array tests.
Remove lambda test. Add dask array tests.
Python
bsd-3-clause
PhE/dask,clarkfitzg/dask,jayhetee/dask,simudream/dask,mikegraham/dask,vikhyat/dask,PhE/dask,wiso/dask,jcrist/dask,esc/dask,mraspaud/dask,esc/dask,marianotepper/dask,vikhyat/dask,pombredanne/dask,simudream/dask,freeman-lab/dask,cpcloud/dask,blaze/dask,marianotepper/dask,jcrist/dask,hainm/dask,ContinuumIO/dask,blaze/dask,wiso/dask,mraspaud/dask,chrisbarber/dask,cowlicks/dask,gameduell/dask,freeman-lab/dask,jayhetee/dask,jakirkham/dask,jakirkham/dask,mrocklin/dask,mrocklin/dask,dask/dask,pombredanne/dask,hainm/dask,clarkfitzg/dask,ContinuumIO/dask,ssanderson/dask,dask/dask,ssanderson/dask
--- +++ @@ -1,8 +1,29 @@ from dask.distributed import dask_client_from_ipclient +import numpy as np +from numpy.testing import assert_array_almost_equal +import dask.array as da + def test_dask_client_from_ipclient(): from IPython.parallel import Client c = Client() - dc = dask_client_from_ipclient(c) - assert 2 == dc.get({'a': 1, 'b': (lambda x: x + 1, 'a')}, 'b') - dc.close(close_workers=True, close_scheduler=True) + dask_client = dask_client_from_ipclient(c) + + # data + a = np.arange(100).reshape(10, 10) + d = da.from_array(a, ((5, 5), (5, 5))) + + # test array.mean + expected = a.mean(axis=0) + d1 = d.mean(axis=0) + result = d1.compute(get=dask_client.get) + assert_array_almost_equal(result, expected) + + # test ghosting + d2 = da.ghost.ghost(d, depth=1, boundary='reflect') + d3 = da.ghost.trim_internal(d2, {0: 1, 1: 1}) + result1 = d3.compute(get=dask_client.get) + assert_array_almost_equal(result1, a) + + # close the workers + dask_client.close(close_workers=True, close_scheduler=True)
6fbf3edb489059f93cee6684bf5046386f538391
src/attendance/wsgi.py
src/attendance/wsgi.py
""" WSGI config for openservices project. It exposes the WSGI callable as a module-level variable named ``application``. """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "attendance.settings") application = get_wsgi_application()
""" WSGI config for openservices project. It exposes the WSGI callable as a module-level variable named ``application``. """ import os import sys sys.path.append(os.path.abspath(os.path.join(__file__, '../..'))) from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "attendance.settings") application = get_wsgi_application()
Append source path in WSGI mode.
Append source path in WSGI mode.
Python
bsd-2-clause
OpenServicesEU/python-attendance
--- +++ @@ -5,6 +5,9 @@ """ import os +import sys + +sys.path.append(os.path.abspath(os.path.join(__file__, '../..'))) from django.core.wsgi import get_wsgi_application
1629d6d369bce079c33986aa62a12a1ad3a8a47d
test/test_grequest.py
test/test_grequest.py
from mpi4py import MPI import mpiunittest as unittest class GReqCtx(object): source = 1 tag = 7 completed = False free_called = False def query(self, status): status.Set_source(self.source) status.Set_tag(self.tag) def free(self): self.free_called = True def cancel(self, completed): if completed is not self.completed: #raise AssertionError() raise MPI.Exception(MPI.ERR_PENDING) class TestGrequest(unittest.TestCase): def testAll(self): ctx = GReqCtx() greq = MPI.Grequest.Start(ctx.query, ctx.free, ctx.cancel) self.assertFalse(greq.Test()) self.assertFalse(ctx.free_called) greq.Cancel() greq.Complete() ctx.completed = True greq.Cancel() status = MPI.Status() self.assertTrue(greq.Test(status)) self.assertEqual(status.Get_source(), ctx.source) self.assertEqual(status.Get_tag(), ctx.tag) greq.Wait() self.assertTrue(ctx.free_called) if MPI.Get_version() < (2, 0): del GReqCtx del TestGrequest if __name__ == '__main__': unittest.main()
from mpi4py import MPI import mpiunittest as unittest class GReqCtx(object): source = 1 tag = 7 completed = False free_called = False def query(self, status): status.Set_source(self.source) status.Set_tag(self.tag) def free(self): self.free_called = True def cancel(self, completed): if completed is not self.completed: raise MPI.Exception(MPI.ERR_PENDING) class TestGrequest(unittest.TestCase): def testAll(self): ctx = GReqCtx() greq = MPI.Grequest.Start(ctx.query, ctx.free, ctx.cancel) self.assertFalse(greq.Test()) self.assertFalse(ctx.free_called) greq.Cancel() greq.Complete() ctx.completed = True greq.Cancel() status = MPI.Status() self.assertTrue(greq.Test(status)) self.assertEqual(status.Get_source(), ctx.source) self.assertEqual(status.Get_tag(), ctx.tag) greq.Wait() self.assertTrue(ctx.free_called) if MPI.Get_version() < (2, 0): del GReqCtx del TestGrequest if __name__ == '__main__': unittest.main()
Remove commented-out line in testcase
Remove commented-out line in testcase
Python
bsd-2-clause
pressel/mpi4py,mpi4py/mpi4py,mpi4py/mpi4py,mpi4py/mpi4py,pressel/mpi4py,pressel/mpi4py,pressel/mpi4py
--- +++ @@ -14,7 +14,6 @@ self.free_called = True def cancel(self, completed): if completed is not self.completed: - #raise AssertionError() raise MPI.Exception(MPI.ERR_PENDING) class TestGrequest(unittest.TestCase):
1c22ac91789c2e7230f1c97e9d4def0dfcf13638
app/drivers/mslookup/base.py
app/drivers/mslookup/base.py
from app.lookups import base as lookups from app.drivers.base import BaseDriver class LookupDriver(BaseDriver): def __init__(self, **kwargs): super().__init__(**kwargs) self.lookupfn = kwargs.get('lookup', None) self.initialize_lookup() def initialize_lookup(self): if self.lookupfn is not None: self.lookup = lookups.get_lookup(self.lookupfn, self.lookuptype) else: # FIXME MUST be a set or mzml lookup? here is place to assert # correct lookuptype! self.lookupfn = 'msstitcher_lookup.sqlite' self.lookup = lookups.create_new_lookup(self.lookupfn) self.lookup.add_tables() def run(self): self.create_lookup()
import os from app.lookups import base as lookups from app.drivers.base import BaseDriver class LookupDriver(BaseDriver): def __init__(self, **kwargs): super().__init__(**kwargs) self.lookupfn = kwargs.get('lookup', None) self.initialize_lookup() def initialize_lookup(self): if self.lookupfn is not None: self.lookup = lookups.get_lookup(self.lookupfn, self.lookuptype) else: # FIXME MUST be a set or mzml lookup? here is place to assert # correct lookuptype! self.lookupfn = os.path.join(self.outdir, 'msstitcher_lookup.sqlite') self.lookup = lookups.create_new_lookup(self.lookupfn) self.lookup.add_tables() def run(self): self.create_lookup()
Add out directory to path to new sqlite lookup
Add out directory to path to new sqlite lookup
Python
mit
glormph/msstitch
--- +++ @@ -1,3 +1,5 @@ +import os + from app.lookups import base as lookups from app.drivers.base import BaseDriver @@ -14,7 +16,8 @@ else: # FIXME MUST be a set or mzml lookup? here is place to assert # correct lookuptype! - self.lookupfn = 'msstitcher_lookup.sqlite' + self.lookupfn = os.path.join(self.outdir, + 'msstitcher_lookup.sqlite') self.lookup = lookups.create_new_lookup(self.lookupfn) self.lookup.add_tables()
8c2a52ce4eb47e89450677d0beed9c3d45b417e0
tests/test_default.py
tests/test_default.py
import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( '.molecule/ansible_inventory').get_hosts('all') def test_hosts_file(File): f = File('/etc/hosts') assert f.exists assert f.user == 'root' assert f.group == 'root'
import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( '.molecule/ansible_inventory').get_hosts('all') def test_service_running_and_enabled(Service): collectd = Service("collectd") collectd.is_running collectd.is_enabled
Write a sensible (if post hoc) test.
Write a sensible (if post hoc) test. I had a hard time getting this test to fail so I could prove it works, but it's a simple test and its main purpose is to provide an example for later tests, so I'm calling it Good Enough.
Python
mit
idi-ops/ansible-collectd
--- +++ @@ -4,9 +4,7 @@ '.molecule/ansible_inventory').get_hosts('all') -def test_hosts_file(File): - f = File('/etc/hosts') - - assert f.exists - assert f.user == 'root' - assert f.group == 'root' +def test_service_running_and_enabled(Service): + collectd = Service("collectd") + collectd.is_running + collectd.is_enabled
a72b20a7c614c86a196585a6703b218613f6d74b
modules/githubsearch.py
modules/githubsearch.py
import requests import simplejson as json class GithubSearch(object): def __init__(self): self.api_url = "https://api.github.com/search/code?q=" self.repo = "OpenTreeOfLife/treenexus" def search(self,term): search_url = "%s+repo:%s" % (self.api_url, self.repo) r = requests.get(search_url) if r.ok: search_json = json.loads(r.text or r.content)
import requests import simplejson as json class GithubSearch(object): def __init__(self): self.api_url = "https://api.github.com/search/code?q=" self.repo = "OpenTreeOfLife/treenexus" def search(self,term): search_url = "%s%s+repo:%s" % (self.api_url, term, self.repo) print "Requesting %s" % search_url r = requests.get(search_url) if r.ok: search_json = json.loads(r.text or r.content) return search_json
Add a simple search controller which wraps around the Github code search API
Add a simple search controller which wraps around the Github code search API
Python
bsd-2-clause
OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api
--- +++ @@ -6,7 +6,9 @@ self.api_url = "https://api.github.com/search/code?q=" self.repo = "OpenTreeOfLife/treenexus" def search(self,term): - search_url = "%s+repo:%s" % (self.api_url, self.repo) + search_url = "%s%s+repo:%s" % (self.api_url, term, self.repo) + print "Requesting %s" % search_url r = requests.get(search_url) if r.ok: search_json = json.loads(r.text or r.content) + return search_json
519cba447e9fed9eb40d5328376442e75936fd4a
encbox.py
encbox.py
#!/usr/bin/python #lets import the dropbox module import dropbox #taking the app key and secret key=raw_input('Enter your app key : ') secret=raw_input('Enter your app secret : ') #initializing the flow flow = dropbox.client.DropboxOAuth2FlowNoRedirect(key,secret) #we are ready to start the connection, so we can generate our token authorize_url = flow.start() print '1. Go to: ' + authorize_url print '2. Click "Allow" (you might have to log in first)' print '3. Copy the authorization code.' code = raw_input("Enter the authorization code here: ").strip() #then we copy/paste the token from the website # This will fail if the user enters an invalid authorization code access_token,user_id= flow.finish(code) client = dropbox.client.DropboxClient(access_token) print 'The account has been linked successfully'
Set the connection to Dropbox API
Set the connection to Dropbox API
Python
mit
Aris-Breezy/encbox
--- +++ @@ -0,0 +1,25 @@ +#!/usr/bin/python +#lets import the dropbox module +import dropbox + +#taking the app key and secret +key=raw_input('Enter your app key : ') +secret=raw_input('Enter your app secret : ') +#initializing the flow +flow = dropbox.client.DropboxOAuth2FlowNoRedirect(key,secret) + +#we are ready to start the connection, so we can generate our token +authorize_url = flow.start() + +print '1. Go to: ' + authorize_url +print '2. Click "Allow" (you might have to log in first)' +print '3. Copy the authorization code.' + +code = raw_input("Enter the authorization code here: ").strip() + +#then we copy/paste the token from the website +# This will fail if the user enters an invalid authorization code + +access_token,user_id= flow.finish(code) +client = dropbox.client.DropboxClient(access_token) +print 'The account has been linked successfully'
7ad9930afd6cfd70e8fdf48dc4b2ecadba6426ea
abcpy/distributions.py
abcpy/distributions.py
# -*- coding: utf-8 -*- import scipy.stats as ss import numpy.random as npr from functools import partial from . import core def npr_op(distribution, size, input): prng = npr.RandomState(0) prng.set_state(input['random_state']) distribution = getattr(prng, distribution) size = (input['n'], *size) data = distribution(*input['data'], size=size) return core.to_output(input, data=data, random_state=prng.get_state()) class NumpyRV(core.RandomStateMixin, core.Operation): """ Examples -------- NumpyRV('tau', 'normal', 5, size=(2,3)) """ def __init__(self, name, distribution, *params, size=(1,)): if not isinstance(size, tuple): size = (size,) op = partial(npr_op, distribution, size) super(NumpyRV, self).__init__(name, op, *params) class Prior(NumpyRV): pass class Model(core.ObservedMixin, NumpyRV): def __init__(self, *args, observed=None, size=None, **kwargs): if observed is None: raise ValueError('Observed cannot be None') if size is None: size = observed.shape super(Model, self).__init__(*args, observed=observed, size=size, **kwargs)
# -*- coding: utf-8 -*- import scipy.stats as ss import numpy.random as npr from functools import partial from . import core def npr_op(distribution, size, input): prng = npr.RandomState(0) prng.set_state(input['random_state']) distribution = getattr(prng, distribution) size = (input['n'],)+tuple(size) data = distribution(*input['data'], size=size) return core.to_output(input, data=data, random_state=prng.get_state()) class NumpyRV(core.RandomStateMixin, core.Operation): """ Examples -------- NumpyRV('tau', 'normal', 5, size=(2,3)) """ def __init__(self, name, distribution, *params, size=(1,)): if not isinstance(size, tuple): size = (size,) op = partial(npr_op, distribution, size) super(NumpyRV, self).__init__(name, op, *params) class Prior(NumpyRV): pass class Model(core.ObservedMixin, NumpyRV): def __init__(self, *args, observed=None, size=None, **kwargs): if observed is None: raise ValueError('Observed cannot be None') if size is None: size = observed.shape super(Model, self).__init__(*args, observed=observed, size=size, **kwargs)
Change tuple concatenation to be py3 compatible
Change tuple concatenation to be py3 compatible
Python
mit
akangasr/elfi
--- +++ @@ -10,7 +10,7 @@ prng = npr.RandomState(0) prng.set_state(input['random_state']) distribution = getattr(prng, distribution) - size = (input['n'], *size) + size = (input['n'],)+tuple(size) data = distribution(*input['data'], size=size) return core.to_output(input, data=data, random_state=prng.get_state())
41ad0f842320161c45079f7a75d64df6c8716e5d
london_commute_alert.py
london_commute_alert.py
import datetime import os import requests def update(): requests.packages.urllib3.disable_warnings() resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json() return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp} def email(lines): with open('curl_raw_command.sh') as f: raw_command = f.read() if lines: subject = 'Tube delays for commute' body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items()) else: subject = 'Good service for commute' body = 'Good service on all lines' # We must have this running on PythonAnywhere - Monday to Sunday. # Ignore Saturday and Sunday if datetime.date.today().isoweekday() in range(1, 6): os.system(raw_command.format(subject=subject, body=body)) def main(): commute_lines = ['metropolitan', 'jubilee', 'central'] status = update() delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'} email(delays) if __name__ == '__main__': main()
import datetime import os import requests def update(): requests.packages.urllib3.disable_warnings() resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json() return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp} def email(lines): with open('curl_raw_command.sh') as f: raw_command = f.read() if lines: subject = 'Tube delays for commute' body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items()) else: subject = 'Good service for commute' body = 'Good service on all lines' os.system(raw_command.format(subject=subject, body=body)) def main(): commute_lines = ['metropolitan', 'jubilee', 'central'] status = update() delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'} email(delays) if __name__ == '__main__': main()
Move from python anywhere to webfaction
Move from python anywhere to webfaction
Python
mit
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
--- +++ @@ -21,10 +21,7 @@ subject = 'Good service for commute' body = 'Good service on all lines' - # We must have this running on PythonAnywhere - Monday to Sunday. - # Ignore Saturday and Sunday - if datetime.date.today().isoweekday() in range(1, 6): - os.system(raw_command.format(subject=subject, body=body)) + os.system(raw_command.format(subject=subject, body=body)) def main():
3eb57619a4e8a669cf879b67d96377ccb21de204
babel_util/scripts/wos_to_pajek.py
babel_util/scripts/wos_to_pajek.py
#!/usr/bin/env python3 from parsers.wos import WOSStream from util.PajekFactory import PajekFactory from util.misc import open_file, Checkpoint if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from WOS XML") parser.add_argument('outfile') parser.add_argument('infile', nargs='+') args = parser.parse_args() chk = Checkpoint() nodes = args.outfile + ".nodes" edges = args.outfile + ".edges" nodes_f = open_file(nodes, "w") edges_f = open_file(edges, "w") parsed = 1 total_files = len(args.infile) pjk = PajekFactory(node_stream=nodes_f, edge_stream=edges_f) for file in args.infile: print(chk.checkpoint("Parsing {}: {}/{}: {:.2%}".format(file, parsed, total_files, parsed/float(total_files)))) f = open_file(file) parser = WOSStream(f) for entry in parser.parse(): if "citations" in entry: for citation in entry["citations"]: pjk.add_edge(entry["id"], citation) print(chk.checkpoint(" Done: "+str(pjk))) parsed += 1 with open_file(args.outfile, "w") as f: f.writelines(pjk.make_header()) chk.end("Done")
#!/usr/bin/env python3 from parsers.wos import WOSStream from util.PajekFactory import PajekFactory from util.misc import open_file, Checkpoint if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from WOS XML") parser.add_argument('outfile') parser.add_argument('--wos-only', action="store_true", help="Only include nodes/edges in WOS") parser.add_argument('infile', nargs='+') args = parser.parse_args() chk = Checkpoint() nodes = args.outfile + ".nodes" edges = args.outfile + ".edges" nodes_f = open_file(nodes, "w") edges_f = open_file(edges, "w") parsed = 1 total_files = len(args.infile) pjk = PajekFactory(node_stream=nodes_f, edge_stream=edges_f) for file in args.infile: print(chk.checkpoint("Parsing {}: {}/{}: {:.2%}".format(file, parsed, total_files, parsed/float(total_files)))) f = open_file(file) parser = WOSStream(f, args.wos_only) for entry in parser.parse(): if "citations" in entry: for citation in entry["citations"]: pjk.add_edge(entry["id"], citation) print(chk.checkpoint(" Done: "+str(pjk))) parsed += 1 with open_file(args.outfile, "w") as f: f.writelines(pjk.make_header()) chk.end("Done")
Add wos-only option to script
Add wos-only option to script
Python
agpl-3.0
jevinw/rec_utilities,jevinw/rec_utilities
--- +++ @@ -7,6 +7,7 @@ import argparse parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from WOS XML") parser.add_argument('outfile') + parser.add_argument('--wos-only', action="store_true", help="Only include nodes/edges in WOS") parser.add_argument('infile', nargs='+') args = parser.parse_args() @@ -24,7 +25,7 @@ for file in args.infile: print(chk.checkpoint("Parsing {}: {}/{}: {:.2%}".format(file, parsed, total_files, parsed/float(total_files)))) f = open_file(file) - parser = WOSStream(f) + parser = WOSStream(f, args.wos_only) for entry in parser.parse(): if "citations" in entry: for citation in entry["citations"]:
557e634a3b68c13b1a19151ec3b96f456e17d347
penelophant/database.py
penelophant/database.py
""" Database Module """ from flask_sqlalchemy import SQLAlchemy db = SQLAlchemy()
""" Database Module """ from flask_sqlalchemy import SQLAlchemy from penelophant import app db = SQLAlchemy(app)
Attach app to SQLAlchemy properly
Attach app to SQLAlchemy properly
Python
apache-2.0
kevinoconnor7/penelophant,kevinoconnor7/penelophant
--- +++ @@ -1,4 +1,5 @@ """ Database Module """ from flask_sqlalchemy import SQLAlchemy -db = SQLAlchemy() +from penelophant import app +db = SQLAlchemy(app)
32c676e727845c62e8958514d3c61ea56569a77b
tests/test_settings.py
tests/test_settings.py
DEBUG = True TEMPLATE_DEBUG = DEBUG SECRET_KEY = 'fake_secret' ROOT_URLCONF = 'tests.test_urls' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'irrelevant.db' } } INSTALLED_APPS = ( 'djproxy', ) STATIC_ROOT = '' STATIC_URL = '/' APPEND_SLASH = False
DEBUG = True TEMPLATE_DEBUG = DEBUG SECRET_KEY = 'fake_secret' ROOT_URLCONF = 'tests.test_urls' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'irrelevant.db' } } MIDDLEWARE_CLASSES = [] INSTALLED_APPS = ( 'djproxy', ) STATIC_ROOT = '' STATIC_URL = '/' APPEND_SLASH = False
Add empty middleware setting to quiet warning
Add empty middleware setting to quiet warning Before this change, starting a dev server would give this warning: ``` Performing system checks... System check identified some issues: WARNINGS: ?: (1_7.W001) MIDDLEWARE_CLASSES is not set. HINT: Django 1.7 changed the global defaults for the MIDDLEWARE_CLASSES. django.contrib.sessions.middleware.SessionMiddleware, django.contrib.auth.m iddleware.AuthenticationMiddleware, and django.contrib.messages.middleware.MessageMiddleware were removed from the defaults. If your project needs these mid dleware then you should configure this setting. ```
Python
mit
thomasw/djproxy
--- +++ @@ -12,6 +12,8 @@ } } +MIDDLEWARE_CLASSES = [] + INSTALLED_APPS = ( 'djproxy', )
f76a2070d91d60a261e8f6120a01075491eb785f
conftest.py
conftest.py
# -*- coding: utf-8 -*- pytest_plugins = [ u'ckan.tests.pytest_ckan.ckan_setup', u'ckan.tests.pytest_ckan.fixtures', ]
# -*- coding: utf-8 -*- pytest_plugins = [ ]
Remove pytest plugins from archiver
Remove pytest plugins from archiver
Python
mit
ckan/ckanext-archiver,ckan/ckanext-archiver,ckan/ckanext-archiver
--- +++ @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- pytest_plugins = [ - u'ckan.tests.pytest_ckan.ckan_setup', - u'ckan.tests.pytest_ckan.fixtures', ]
67d08aff211ae1edbae202819f39be7c34812137
hggithub.py
hggithub.py
# Mimic the hggit extension. try: from hggit import * hggit_reposetup = reposetup except ImportError: # Allow this module to be imported without # hg-git installed, eg for setup.py pass __version__ = "0.1.0" def reposetup(ui, repo, **kwargs): """ Automatically adds Bitbucket->GitHub mirror paths to the repo. Also creates a `master` bookmark for the `default` branch. """ hggit_reposetup(ui, repo, **kwargs) bb = "ssh://hg@bitbucket.org/" for pathname, path in ui.configitems("paths"): if path.startswith(bb): user, project = path.replace(bb, "").rstrip("/").split("/") for k, v in ui.configitems("github"): if k == "username": user = v gh_path = "git+ssh://git@github.com/%s/%s.git" % (user, project) if pathname == "default": if "master" not in repo._bookmarks: from mercurial.commands import bookmark bookmark(ui, repo, mark="master", rev="default") gh_pathname = "github" else: gh_pathname = "github-" + pathname ui.setconfig("paths", gh_pathname, gh_path)
# Mimic the hggit extension. try: from hggit import * hggit_reposetup = reposetup except ImportError: # Allow this module to be imported without # hg-git installed, eg for setup.py pass __version__ = "0.1.0" def reposetup(ui, repo, **kwargs): """ Automatically adds Bitbucket->GitHub mirror paths to the repo. Also creates a `master` bookmark for the `default` branch. """ hggit_reposetup(ui, repo, **kwargs) bb = "ssh://hg@bitbucket.org/" for pathname, path in ui.configitems("paths"): if path.startswith(bb): user, project = path.replace(bb, "").split("/", 1) # Strip slash and everything after it, # such as mq patch queue path. project = project.split("/")[0] for k, v in ui.configitems("github"): if k == "username": user = v gh_path = "git+ssh://git@github.com/%s/%s.git" % (user, project) if pathname == "default": if "master" not in repo._bookmarks: from mercurial.commands import bookmark bookmark(ui, repo, mark="master", rev="default") gh_pathname = "github" else: gh_pathname = "github-" + pathname ui.setconfig("paths", gh_pathname, gh_path)
Allow for extra slashes in project paths, such as mq patch queues.
Allow for extra slashes in project paths, such as mq patch queues.
Python
bsd-2-clause
stephenmcd/hg-github
--- +++ @@ -21,7 +21,10 @@ bb = "ssh://hg@bitbucket.org/" for pathname, path in ui.configitems("paths"): if path.startswith(bb): - user, project = path.replace(bb, "").rstrip("/").split("/") + user, project = path.replace(bb, "").split("/", 1) + # Strip slash and everything after it, + # such as mq patch queue path. + project = project.split("/")[0] for k, v in ui.configitems("github"): if k == "username": user = v
0d3740cef051ed08a307dc2b42fe022ce2f1ba28
bot/utils/attributeobject.py
bot/utils/attributeobject.py
class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): pass def _setattr(self, key, value): pass class DictionaryObject(AttributeObject): def __init__(self): super().__init__("_dictionary") self._dictionary = {} def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): new = DictionaryObject() new._dictionary = self._dictionary.copy() return new
class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): pass def _setattr(self, key, value): pass class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary)
Allow to specify initial items on DictionaryObject constructor
Allow to specify initial items on DictionaryObject constructor
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
--- +++ @@ -19,9 +19,9 @@ class DictionaryObject(AttributeObject): - def __init__(self): + def __init__(self, initial_items={}): super().__init__("_dictionary") - self._dictionary = {} + self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) @@ -30,6 +30,4 @@ self._dictionary[key] = value def _copy(self): - new = DictionaryObject() - new._dictionary = self._dictionary.copy() - return new + return DictionaryObject(self._dictionary)
7bbec0e5306766741b22341a100db046d76b82a8
apps/books/models.py
apps/books/models.py
from django.db import models from apps.categories.models import Category from apps.users.models import UserProfile from apps.reviews.models import Review class Book(models.Model): title = models.CharField(max_length=255) slug = models.SlugField(max_length=500) author = models.CharField(max_length=255) pages = models.IntegerField() publish_date = models.DateTimeField() description = models.TextField(blank=True, default='') categories = models.ManyToManyField(Category, db_table='category_book', related_name='books') favourites = models.ManyToManyField(UserProfile, db_table='favourites', related_name='liked_books') class Meta: db_table = 'book' def get_rating(self): reviews = Review.objects.filter(book=self) rating = 0 for review in reviews: rating += review.rating return round(rating / Review.MAX_STARS) class UserProfileBook(models.Model): READING = 1 READ = 2 READING_STATUS = ( (READING, 'Reading'), (READ, 'Read'), ) user_profile = models.ForeignKey(UserProfile) book = models.ForeignKey(Book) status = models.IntegerField(choices=READING_STATUS, default=READING) class Meta: db_table = 'user_book'
from django.db import models from apps.categories.models import Category from apps.users.models import UserProfile from apps.reviews.models import Review class Book(models.Model): title = models.CharField(max_length=255) slug = models.SlugField(max_length=500) author = models.CharField(max_length=255) pages = models.IntegerField() publish_date = models.DateTimeField() description = models.TextField(blank=True, default='') categories = models.ManyToManyField(Category, db_table='category_book', related_name='books') favourites = models.ManyToManyField(UserProfile, db_table='favourites', related_name='liked_books') class Meta: db_table = 'book' def get_rating(self): reviews = Review.objects.filter(book=self) total = reviews.count() rating = 0 for review in reviews: rating += review.rating return round(rating / total) class UserProfileBook(models.Model): READING = 1 READ = 2 READING_STATUS = ( (READING, 'Reading'), (READ, 'Read'), ) user_profile = models.ForeignKey(UserProfile) book = models.ForeignKey(Book) status = models.IntegerField(choices=READING_STATUS, default=READING) class Meta: db_table = 'user_book'
Fix get_rating in Book model
Fix get_rating in Book model
Python
mit
vuonghv/brs,vuonghv/brs,vuonghv/brs,vuonghv/brs
--- +++ @@ -20,10 +20,11 @@ def get_rating(self): reviews = Review.objects.filter(book=self) + total = reviews.count() rating = 0 for review in reviews: rating += review.rating - return round(rating / Review.MAX_STARS) + return round(rating / total) class UserProfileBook(models.Model):
12dbfcbdf35f8d846f39eee4898d032aa6729ab9
hack/boilerplate/boilerplate_test.py
hack/boilerplate/boilerplate_test.py
#!/usr/bin/env python # Copyright 2016 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import boilerplate import unittest import StringIO import os import sys class TestBoilerplate(unittest.TestCase): """ Note: run this test from the hack/boilerplate directory. $ python -m unittest boilerplate_test """ def test_boilerplate(self): os.chdir("test/") class Args(object): def __init__(self): self.filenames = [] self.rootdir = "." self.boilerplate_dir = "../" self.verbose = True # capture stdout old_stdout = sys.stdout sys.stdout = StringIO.StringIO() boilerplate.args = Args() ret = boilerplate.main() output = sorted(sys.stdout.getvalue().split()) sys.stdout = old_stdout self.assertEquals( output, ['././fail.go', '././fail.py'])
#!/usr/bin/env python # Copyright 2016 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import boilerplate import unittest from io import StringIO import os import sys class TestBoilerplate(unittest.TestCase): """ Note: run this test from the hack/boilerplate directory. $ python -m unittest boilerplate_test """ def test_boilerplate(self): os.chdir("test/") class Args(object): def __init__(self): self.filenames = [] self.rootdir = "." self.boilerplate_dir = "../" self.verbose = True # capture stdout old_stdout = sys.stdout sys.stdout = StringIO.StringIO() boilerplate.args = Args() ret = boilerplate.main() output = sorted(sys.stdout.getvalue().split()) sys.stdout = old_stdout self.assertEquals( output, ['././fail.go', '././fail.py'])
Fix up failing boilerplate test
Fix up failing boilerplate test
Python
apache-2.0
cblecker/kubernetes,pmorie/kubernetes,mfojtik/kubernetes,feiskyer/kubernetes,frodenas/kubernetes,Lion-Wei/kubernetes,fanzhangio/kubernetes,chestack/kubernetes,rnaveiras/kubernetes,andrewsykim/kubernetes,intelsdi-x/kubernetes,GulajavaMinistudio/kubernetes,micahhausler/kubernetes,humblec/kubernetes,brendandburns/kubernetes,jagosan/kubernetes,idvoretskyi/kubernetes,ingvagabund/kubernetes,njuicsgz/kubernetes-1,ii/kubernetes,Lion-Wei/kubernetes,liggitt/kubernetes,liggitt/kubernetes,BenTheElder/kubernetes,yarntime/kubernetes,GulajavaMinistudio/kubernetes,ping035627/kubernetes,tallclair/kubernetes,dlorenc/kubernetes,njuicsgz/kubernetes-1,cmluciano/kubernetes,carlory/kubernetes,carlory/kubernetes,warmchang/kubernetes,du2016/kubernetes,mYmNeo/kubernetes,rafax/kubernetes,klaus1982/kubernetes,nak3/kubernetes,saad-ali/kubernetes,ii/kubernetes,yujuhong/kubernetes,verb/kubernetes,dereknex/kubernetes,ii/kubernetes,rnaveiras/kubernetes,thockin/kubernetes,bizhao/kubernetes,mboersma/kubernetes,k82/kubernetes,ravisantoshgudimetla/kubernetes,wanghaoran1988/kubernetes,andrewsykim/kubernetes,rrati/kubernetes,weiwei04/kubernetes,wenlxie/kubernetes,chrislovecnm/kubernetes,bparees/kubernetes,jennybuckley/kubernetes,olivierlemasle/kubernetes,stevesloka/kubernetes,PiotrProkop/kubernetes,mboersma/kubernetes,gnufied/kubernetes,roberthbailey/kubernetes,pweil-/kubernetes,PiotrProkop/kubernetes,x13n/kubernetes,shyamjvs/kubernetes,dixudx/kubernetes,kevensen/kubernetes,tallclair/kubernetes,coolsvap/kubernetes,lichuqiang/kubernetes,Lion-Wei/kubernetes,andrewrynhard/kubernetes,fanzhangio/kubernetes,mml/kubernetes,mml/kubernetes,chrislovecnm/kubernetes,dims/kubernetes,pweil-/kubernetes,Acidburn0zzz/kubernetes,cmluciano/kubernetes,wenlxie/kubernetes,ixdy/kubernetes,mahak/kubernetes,jdef/kubernetes,maciaszczykm/kubernetes,BenTheElder/kubernetes,johscheuer/kubernetes,k82cn/kubernetes,jfrazelle/kubernetes,linux-on-ibm-z/kubernetes,xlgao-zju/kubernetes,ravilr/kubernetes,jagosan/kubernetes,fejta/kubernetes,dereknex/kubernetes,chrislovecnm/kubernetes,tallclair/kubernetes,krmayankk/kubernetes,Hui-Zhi/kubernetes,li-ang/kubernetes,tomerf/kubernetes,zhouhaibing089/kubernetes,micahhausler/kubernetes,PiotrProkop/kubernetes,yarntime/kubernetes,dims/kubernetes,wanghaoran1988/kubernetes,sanjeevm0/kubernetes,chrislovecnm/kubernetes,mahak/kubernetes,ping035627/kubernetes,dixudx/kubernetes,nckturner/kubernetes,mkumatag/kubernetes,andrewrynhard/kubernetes,zetaab/kubernetes,gnufied/kubernetes,xychu/kubernetes,andrewsykim/kubernetes,enj/kubernetes,dims/kubernetes,k82/kubernetes,derekwaynecarr/kubernetes,pweil-/kubernetes,mYmNeo/kubernetes,cadmuxe/kubernetes,erwinvaneyk/kubernetes,jagosan/kubernetes,mikedanese/kubernetes,deads2k/kubernetes,Clarifai/kubernetes,maciaszczykm/kubernetes,DiamantiCom/kubernetes,jingxu97/kubernetes,brendandburns/kubernetes,mengqiy/kubernetes,nak3/kubernetes,du2016/kubernetes,jingxu97/kubernetes,matthyx/kubernetes,cofyc/kubernetes,spzala/kubernetes,andrewrynhard/kubernetes,klaus1982/kubernetes,tomerf/kubernetes,intelsdi-x/kubernetes,u2takey/kubernetes,thockin/kubernetes,kow3ns/kubernetes,tallclair/kubernetes,johscheuer/kubernetes,mml/kubernetes,iameli/kubernetes,maciaszczykm/kubernetes,tcnghia/kubernetes,idvoretskyi/kubernetes,jennybuckley/kubernetes,openshift/kubernetes,MHBauer/kubernetes,cofyc/kubernetes,micahhausler/kubernetes,kitt1987/kubernetes,xychu/kubernetes,lojies/kubernetes,ii/kubernetes,nak3/kubernetes,kpgriffith/kubernetes,intelsdi-x/kubernetes,humblec/kubernetes,LalatenduMohanty/kubernetes,JacobTanenbaum/kubernetes,kitt1987/kubernetes,Acidburn0zzz/kubernetes,ConnorDoyle/kubernetes,stevesloka/kubernetes,xychu/kubernetes,pweil-/kubernetes,MHBauer/kubernetes,stevesloka/kubernetes,mengqiy/kubernetes,linux-on-ibm-z/kubernetes,du2016/kubernetes,ixdy/kubernetes,ingvagabund/kubernetes,wanghaoran1988/kubernetes,pwittrock/kubernetes,verb/kubernetes,weiwei04/kubernetes,cadmuxe/kubernetes,weiwei04/kubernetes,matthyx/kubernetes,sdminonne/kubernetes,rrati/kubernetes,johscheuer/kubernetes,jessfraz/kubernetes,dereknex/kubernetes,dixudx/kubernetes,zhangmingld/kubernetes,monopole/kubernetes,LalatenduMohanty/kubernetes,wongma7/kubernetes,PI-Victor/kubernetes,joelsmith/kubernetes,jackfrancis/kubernetes,iterion/kubernetes,dereknex/kubernetes,klaus1982/kubernetes,Lion-Wei/kubernetes,k82/kubernetes,joelsmith/kubernetes,yarntime/kubernetes,tomerf/kubernetes,luxas/kubernetes,jackfrancis/kubernetes,pmorie/kubernetes,zhouhaibing089/kubernetes,ravisantoshgudimetla/kubernetes,k82cn/kubernetes,zhouhaibing089/kubernetes,tallclair/kubernetes,zetaab/kubernetes,roberthbailey/kubernetes,lojies/kubernetes,JacobTanenbaum/kubernetes,mosoft521/kubernetes,kevensen/kubernetes,spzala/kubernetes,nak3/kubernetes,tomzhang/kubernetes,joelsmith/kubernetes,iterion/kubernetes,tcnghia/kubernetes,x13n/kubernetes,mboersma/kubernetes,humblec/kubernetes,mkumatag/kubernetes,johscheuer/kubernetes,cmluciano/kubernetes,fgimenez/kubernetes,lojies/kubernetes,mengqiy/kubernetes,rrati/kubernetes,mkumatag/kubernetes,ravisantoshgudimetla/kubernetes,ii/kubernetes,MHBauer/kubernetes,carlory/kubernetes,DiamantiCom/kubernetes,weiwei04/kubernetes,warmchang/kubernetes,wanghaoran1988/kubernetes,iameli/kubernetes,nikhita/kubernetes,cblecker/kubernetes,chrislovecnm/kubernetes,hex108/kubernetes,kubernetes/kubernetes,fejta/kubernetes,Hui-Zhi/kubernetes,alejandroEsc/kubernetes,humblec/kubernetes,cantbewong/kubernetes,jagosan/kubernetes,carlory/kubernetes,aledbf/kubernetes,aveshagarwal/kubernetes,dereknex/kubernetes,saad-ali/kubernetes,alejandroEsc/kubernetes,erwinvaneyk/kubernetes,mfojtik/kubernetes,dlorenc/kubernetes,DiamantiCom/kubernetes,iterion/kubernetes,iameli/kubernetes,sanjeevm0/kubernetes,rafax/kubernetes,feiskyer/kubernetes,wanghaoran1988/kubernetes,yujuhong/kubernetes,li-ang/kubernetes,derekwaynecarr/kubernetes,verult/kubernetes,chestack/kubernetes,soltysh/kubernetes,kpgriffith/kubernetes,Acidburn0zzz/kubernetes,k82cn/kubernetes,krmayankk/kubernetes,wenlxie/kubernetes,verult/kubernetes,chestack/kubernetes,rnaveiras/kubernetes,chestack/kubernetes,micahhausler/kubernetes,mikedanese/kubernetes,jsafrane/kubernetes,liggitt/kubernetes,ravilr/kubernetes,x13n/kubernetes,nckturner/kubernetes,maciaszczykm/kubernetes,davidz627/kubernetes,cantbewong/kubernetes,MikeSpreitzer/kubernetes,aledbf/kubernetes,dlorenc/kubernetes,enj/kubernetes,kpgriffith/kubernetes,sdminonne/kubernetes,sanjeevm0/kubernetes,chrislovecnm/kubernetes,mboersma/kubernetes,iterion/kubernetes,andrewsykim/kubernetes,mfojtik/kubernetes,humblec/kubernetes,frodenas/kubernetes,mYmNeo/kubernetes,rnaveiras/kubernetes,tomzhang/kubernetes,cmluciano/kubernetes,bizhao/kubernetes,kubernetes/kubernetes,mahak/kubernetes,aveshagarwal/kubernetes,enj/kubernetes,olivierlemasle/kubernetes,wongma7/kubernetes,roberthbailey/kubernetes,mikebrow/kubernetes,fanzhangio/kubernetes,JacobTanenbaum/kubernetes,spzala/kubernetes,LalatenduMohanty/kubernetes,cblecker/kubernetes,alejandroEsc/kubernetes,PI-Victor/kubernetes,kevin-wangzefeng/kubernetes,huangjiuyuan/kubernetes,tcnghia/kubernetes,jagosan/kubernetes,yujuhong/kubernetes,njuicsgz/kubernetes-1,alejandroEsc/kubernetes,wenlxie/kubernetes,soltysh/kubernetes,brendandburns/kubernetes,cadmuxe/kubernetes,kow3ns/kubernetes,mboersma/kubernetes,shyamjvs/kubernetes,Lion-Wei/kubernetes,lichuqiang/kubernetes,brendandburns/kubernetes,sethpollack/kubernetes,micahhausler/kubernetes,verult/kubernetes,cofyc/kubernetes,Clarifai/kubernetes,zetaab/kubernetes,cblecker/kubernetes,liggitt/kubernetes,ravilr/kubernetes,kpgriffith/kubernetes,lojies/kubernetes,wongma7/kubernetes,tengqm/kubernetes,x13n/kubernetes,mboersma/kubernetes,Hui-Zhi/kubernetes,ravilr/kubernetes,zetaab/kubernetes,mml/kubernetes,andrewrynhard/kubernetes,davidz627/kubernetes,LalatenduMohanty/kubernetes,fanzhangio/kubernetes,xychu/kubernetes,gnufied/kubernetes,jennybuckley/kubernetes,kevin-wangzefeng/kubernetes,stevesloka/kubernetes,verb/kubernetes,carlory/kubernetes,Hui-Zhi/kubernetes,andrewsykim/kubernetes,jackfrancis/kubernetes,tpepper/kubernetes,joelsmith/kubernetes,tcnghia/kubernetes,kevensen/kubernetes,fabriziopandini/kubernetes,pmorie/kubernetes,cmluciano/kubernetes,pwittrock/kubernetes,openshift/kubernetes,maciaszczykm/kubernetes,jdef/kubernetes,cofyc/kubernetes,pires/kubernetes,hex108/kubernetes,iterion/kubernetes,pires/kubernetes,enj/kubernetes,jdef/kubernetes,cantbewong/kubernetes,u2takey/kubernetes,fgimenez/kubernetes,rrati/kubernetes,kevin-wangzefeng/kubernetes,wanghaoran1988/kubernetes,deads2k/kubernetes,tomerf/kubernetes,BenTheElder/kubernetes,davidz627/kubernetes,intelsdi-x/kubernetes,sethpollack/kubernetes,zhangmingld/kubernetes,lichuqiang/kubernetes,shyamjvs/kubernetes,bparees/kubernetes,rafax/kubernetes,gnufied/kubernetes,u2takey/kubernetes,idvoretskyi/kubernetes,k82cn/kubernetes,frodenas/kubernetes,hex108/kubernetes,krmayankk/kubernetes,shyamjvs/kubernetes,erwinvaneyk/kubernetes,nikhita/kubernetes,cblecker/kubernetes,pmorie/kubernetes,pwittrock/kubernetes,tengqm/kubernetes,krmayankk/kubernetes,saad-ali/kubernetes,mikebrow/kubernetes,mYmNeo/kubernetes,bparees/kubernetes,GulajavaMinistudio/kubernetes,saad-ali/kubernetes,mboersma/kubernetes,PiotrProkop/kubernetes,lichuqiang/kubernetes,kitt1987/kubernetes,bparees/kubernetes,nikhita/kubernetes,tengqm/kubernetes,feiskyer/kubernetes,derekwaynecarr/kubernetes,ravisantoshgudimetla/kubernetes,hex108/kubernetes,feiskyer/kubernetes,khenidak/kubernetes,BenTheElder/kubernetes,li-ang/kubernetes,tcnghia/kubernetes,cofyc/kubernetes,soltysh/kubernetes,khenidak/kubernetes,sethpollack/kubernetes,brendandburns/kubernetes,tomerf/kubernetes,mfojtik/kubernetes,njuicsgz/kubernetes-1,huangjiuyuan/kubernetes,mfojtik/kubernetes,jsafrane/kubernetes,LalatenduMohanty/kubernetes,sdminonne/kubernetes,cadmuxe/kubernetes,csrwng/kubernetes,joelsmith/kubernetes,fabriziopandini/kubernetes,cantbewong/kubernetes,huangjiuyuan/kubernetes,mml/kubernetes,tomzhang/kubernetes,mfojtik/kubernetes,zetaab/kubernetes,fanzhangio/kubernetes,soltysh/kubernetes,ping035627/kubernetes,kevin-wangzefeng/kubernetes,johscheuer/kubernetes,sethpollack/kubernetes,njuicsgz/kubernetes-1,fgimenez/kubernetes,monopole/kubernetes,warmchang/kubernetes,MikeSpreitzer/kubernetes,chestack/kubernetes,mYmNeo/kubernetes,intelsdi-x/kubernetes,GulajavaMinistudio/kubernetes,fejta/kubernetes,iterion/kubernetes,davidz627/kubernetes,mikebrow/kubernetes,verb/kubernetes,dlorenc/kubernetes,davidz627/kubernetes,sdminonne/kubernetes,derekwaynecarr/kubernetes,jdef/kubernetes,aledbf/kubernetes,thockin/kubernetes,MikeSpreitzer/kubernetes,huangjiuyuan/kubernetes,idvoretskyi/kubernetes,pires/kubernetes,pwittrock/kubernetes,ixdy/kubernetes,frodenas/kubernetes,MHBauer/kubernetes,soltysh/kubernetes,u2takey/kubernetes,bizhao/kubernetes,dixudx/kubernetes,verb/kubernetes,intelsdi-x/kubernetes,Acidburn0zzz/kubernetes,jackfrancis/kubernetes,tallclair/kubernetes,Lion-Wei/kubernetes,deads2k/kubernetes,kow3ns/kubernetes,fejta/kubernetes,zhouhaibing089/kubernetes,olivierlemasle/kubernetes,wenlxie/kubernetes,fgimenez/kubernetes,kow3ns/kubernetes,thockin/kubernetes,kpgriffith/kubernetes,sdminonne/kubernetes,coolsvap/kubernetes,tpepper/kubernetes,pwittrock/kubernetes,zetaab/kubernetes,jfrazelle/kubernetes,hex108/kubernetes,jackfrancis/kubernetes,erwinvaneyk/kubernetes,LalatenduMohanty/kubernetes,coolsvap/kubernetes,mikebrow/kubernetes,dims/kubernetes,xlgao-zju/kubernetes,k82/kubernetes,zhangmingld/kubernetes,wongma7/kubernetes,tpepper/kubernetes,nikhita/kubernetes,u2takey/kubernetes,feiskyer/kubernetes,mahak/kubernetes,saad-ali/kubernetes,JacobTanenbaum/kubernetes,derekwaynecarr/kubernetes,DiamantiCom/kubernetes,iameli/kubernetes,cofyc/kubernetes,jfrazelle/kubernetes,MHBauer/kubernetes,rafax/kubernetes,mikedanese/kubernetes,deads2k/kubernetes,MikeSpreitzer/kubernetes,tcnghia/kubernetes,luxas/kubernetes,tpepper/kubernetes,du2016/kubernetes,sanjeevm0/kubernetes,yarntime/kubernetes,kevensen/kubernetes,ixdy/kubernetes,jingxu97/kubernetes,spzala/kubernetes,bizhao/kubernetes,monopole/kubernetes,njuicsgz/kubernetes-1,zhangmingld/kubernetes,bizhao/kubernetes,jennybuckley/kubernetes,deads2k/kubernetes,bizhao/kubernetes,u2takey/kubernetes,linux-on-ibm-z/kubernetes,cantbewong/kubernetes,dlorenc/kubernetes,fabriziopandini/kubernetes,hex108/kubernetes,coolsvap/kubernetes,JacobTanenbaum/kubernetes,x13n/kubernetes,coolsvap/kubernetes,klaus1982/kubernetes,wongma7/kubernetes,jessfraz/kubernetes,stevesloka/kubernetes,yarntime/kubernetes,nckturner/kubernetes,linux-on-ibm-z/kubernetes,jdef/kubernetes,tpepper/kubernetes,erwinvaneyk/kubernetes,lichuqiang/kubernetes,frodenas/kubernetes,k82/kubernetes,roberthbailey/kubernetes,linux-on-ibm-z/kubernetes,brendandburns/kubernetes,andrewrynhard/kubernetes,kevensen/kubernetes,Clarifai/kubernetes,mikedanese/kubernetes,gnufied/kubernetes,matthyx/kubernetes,GulajavaMinistudio/kubernetes,openshift/kubernetes,aveshagarwal/kubernetes,wenlxie/kubernetes,Clarifai/kubernetes,Clarifai/kubernetes,verult/kubernetes,ingvagabund/kubernetes,khenidak/kubernetes,ConnorDoyle/kubernetes,dlorenc/kubernetes,PI-Victor/kubernetes,jfrazelle/kubernetes,BenTheElder/kubernetes,pmorie/kubernetes,ravisantoshgudimetla/kubernetes,kitt1987/kubernetes,kitt1987/kubernetes,nikhita/kubernetes,davidz627/kubernetes,nckturner/kubernetes,kubernetes/kubernetes,aledbf/kubernetes,jennybuckley/kubernetes,mikedanese/kubernetes,pwittrock/kubernetes,PI-Victor/kubernetes,mosoft521/kubernetes,Acidburn0zzz/kubernetes,tpepper/kubernetes,olivierlemasle/kubernetes,tomzhang/kubernetes,JacobTanenbaum/kubernetes,andrewsykim/kubernetes,huangjiuyuan/kubernetes,aveshagarwal/kubernetes,yujuhong/kubernetes,luxas/kubernetes,cantbewong/kubernetes,saad-ali/kubernetes,mengqiy/kubernetes,maciaszczykm/kubernetes,fabriziopandini/kubernetes,shyamjvs/kubernetes,mengqiy/kubernetes,ConnorDoyle/kubernetes,thockin/kubernetes,fejta/kubernetes,ping035627/kubernetes,MikeSpreitzer/kubernetes,tomzhang/kubernetes,mahak/kubernetes,ConnorDoyle/kubernetes,nak3/kubernetes,verult/kubernetes,pires/kubernetes,kow3ns/kubernetes,sanjeevm0/kubernetes,xlgao-zju/kubernetes,ravilr/kubernetes,njuicsgz/kubernetes-1,MHBauer/kubernetes,zhangmingld/kubernetes,tengqm/kubernetes,yarntime/kubernetes,warmchang/kubernetes,fabriziopandini/kubernetes,pires/kubernetes,mosoft521/kubernetes,lichuqiang/kubernetes,tomzhang/kubernetes,x13n/kubernetes,frodenas/kubernetes,ConnorDoyle/kubernetes,luxas/kubernetes,jessfraz/kubernetes,ixdy/kubernetes,jsafrane/kubernetes,fanzhangio/kubernetes,huangjiuyuan/kubernetes,tomerf/kubernetes,khenidak/kubernetes,spzala/kubernetes,jennybuckley/kubernetes,luxas/kubernetes,krmayankk/kubernetes,kow3ns/kubernetes,khenidak/kubernetes,aveshagarwal/kubernetes,fejta/kubernetes,li-ang/kubernetes,weiwei04/kubernetes,weiwei04/kubernetes,jdef/kubernetes,k82cn/kubernetes,openshift/kubernetes,erwinvaneyk/kubernetes,ingvagabund/kubernetes,dims/kubernetes,kitt1987/kubernetes,jingxu97/kubernetes,fgimenez/kubernetes,cadmuxe/kubernetes,aledbf/kubernetes,aveshagarwal/kubernetes,pweil-/kubernetes,mboersma/kubernetes,cmluciano/kubernetes,rrati/kubernetes,derekwaynecarr/kubernetes,pires/kubernetes,kubernetes/kubernetes,sethpollack/kubernetes,rnaveiras/kubernetes,li-ang/kubernetes,ping035627/kubernetes,jessfraz/kubernetes,alejandroEsc/kubernetes,kevin-wangzefeng/kubernetes,luxas/kubernetes,dereknex/kubernetes,xlgao-zju/kubernetes,zhouhaibing089/kubernetes,stevesloka/kubernetes,openshift/kubernetes,xychu/kubernetes,kubernetes/kubernetes,johscheuer/kubernetes,pmorie/kubernetes,jsafrane/kubernetes,mosoft521/kubernetes,mml/kubernetes,csrwng/kubernetes,Hui-Zhi/kubernetes,idvoretskyi/kubernetes,cadmuxe/kubernetes,matthyx/kubernetes,khenidak/kubernetes,joelsmith/kubernetes,ixdy/kubernetes,sanjeevm0/kubernetes,krmayankk/kubernetes,ingvagabund/kubernetes,yujuhong/kubernetes,dixudx/kubernetes,liggitt/kubernetes,Acidburn0zzz/kubernetes,jsafrane/kubernetes,verult/kubernetes,PI-Victor/kubernetes,csrwng/kubernetes,enj/kubernetes,idvoretskyi/kubernetes,DiamantiCom/kubernetes,njuicsgz/kubernetes-1,matthyx/kubernetes,mosoft521/kubernetes,monopole/kubernetes,andrewrynhard/kubernetes,tengqm/kubernetes,kpgriffith/kubernetes,tomzhang/kubernetes,fgimenez/kubernetes,yujuhong/kubernetes,jessfraz/kubernetes,PiotrProkop/kubernetes,csrwng/kubernetes,monopole/kubernetes,ingvagabund/kubernetes,xlgao-zju/kubernetes,nak3/kubernetes,pweil-/kubernetes,DiamantiCom/kubernetes,tomzhang/kubernetes,jfrazelle/kubernetes,tomzhang/kubernetes,ravisantoshgudimetla/kubernetes,mkumatag/kubernetes,coolsvap/kubernetes,warmchang/kubernetes,Hui-Zhi/kubernetes,nckturner/kubernetes,lojies/kubernetes,mikebrow/kubernetes,csrwng/kubernetes,Clarifai/kubernetes,mYmNeo/kubernetes,ConnorDoyle/kubernetes,xlgao-zju/kubernetes,xychu/kubernetes,bparees/kubernetes,du2016/kubernetes,mkumatag/kubernetes,jingxu97/kubernetes,olivierlemasle/kubernetes,roberthbailey/kubernetes,iameli/kubernetes,zhangmingld/kubernetes,PI-Victor/kubernetes
--- +++ @@ -16,7 +16,7 @@ import boilerplate import unittest -import StringIO +from io import StringIO import os import sys
d6b3c47169082eeee6f1f01458b8791de2573849
kolibri/plugins/management/kolibri_plugin.py
kolibri/plugins/management/kolibri_plugin.py
from __future__ import absolute_import, print_function, unicode_literals from kolibri.plugins.base import KolibriFrontEndPluginBase class ManagementModule(KolibriFrontEndPluginBase): """ The Management module. """ entry_file = "assets/src/management.js" base_url = "management" template = "management/management.html" def nav_items(self): return ( { "url": "foo/bar", "text": "Management Foo!" }, ) def user_nav_items(self): return ( { "url": "learners", "text": "Learner Management" }, ) PLUGINS = ( ManagementModule, )
from __future__ import absolute_import, print_function, unicode_literals from kolibri.core.webpack import hooks as webpack_hooks from kolibri.plugins.base import KolibriPluginBase class ManagementPlugin(KolibriPluginBase): """ Required boilerplate so that the module is recognized as a plugin """ pass class ManagementAsset(webpack_hooks.WebpackBundleHook): unique_slug = "management_module" src_file = "kolibri/plugins/management/assets/src/management.js" static_dir = "kolibri/plugins/management/static" class ManagementInclusionHook(webpack_hooks.FrontEndBaseSyncHook): bundle_class = ManagementAsset
Use new plugin classes for management
Use new plugin classes for management
Python
mit
66eli77/kolibri,learningequality/kolibri,indirectlylit/kolibri,lyw07/kolibri,jtamiace/kolibri,learningequality/kolibri,aronasorman/kolibri,jamalex/kolibri,christianmemije/kolibri,rtibbles/kolibri,benjaoming/kolibri,jtamiace/kolibri,jayoshih/kolibri,MingDai/kolibri,DXCanas/kolibri,jamalex/kolibri,rtibbles/kolibri,mrpau/kolibri,mrpau/kolibri,aronasorman/kolibri,whitzhu/kolibri,66eli77/kolibri,lyw07/kolibri,MCGallaspy/kolibri,jtamiace/kolibri,mrpau/kolibri,jamalex/kolibri,benjaoming/kolibri,DXCanas/kolibri,jtamiace/kolibri,christianmemije/kolibri,christianmemije/kolibri,jonboiser/kolibri,DXCanas/kolibri,learningequality/kolibri,lyw07/kolibri,ralphiee22/kolibri,indirectlylit/kolibri,MingDai/kolibri,mrpau/kolibri,rtibbles/kolibri,jonboiser/kolibri,ralphiee22/kolibri,ralphiee22/kolibri,lyw07/kolibri,MCGallaspy/kolibri,whitzhu/kolibri,MingDai/kolibri,MingDai/kolibri,MCGallaspy/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,jayoshih/kolibri,jonboiser/kolibri,66eli77/kolibri,whitzhu/kolibri,jonboiser/kolibri,christianmemije/kolibri,learningequality/kolibri,jayoshih/kolibri,rtibbles/kolibri,benjaoming/kolibri,66eli77/kolibri,aronasorman/kolibri,jamalex/kolibri,ralphiee22/kolibri,aronasorman/kolibri,DXCanas/kolibri,benjaoming/kolibri,whitzhu/kolibri,jayoshih/kolibri
--- +++ @@ -1,36 +1,19 @@ - from __future__ import absolute_import, print_function, unicode_literals -from kolibri.plugins.base import KolibriFrontEndPluginBase +from kolibri.core.webpack import hooks as webpack_hooks +from kolibri.plugins.base import KolibriPluginBase -class ManagementModule(KolibriFrontEndPluginBase): - """ - The Management module. - """ - entry_file = "assets/src/management.js" - - base_url = "management" - - template = "management/management.html" - - def nav_items(self): - return ( - { - "url": "foo/bar", - "text": "Management Foo!" - }, - ) - - def user_nav_items(self): - return ( - { - "url": "learners", - "text": "Learner Management" - }, - ) +class ManagementPlugin(KolibriPluginBase): + """ Required boilerplate so that the module is recognized as a plugin """ + pass -PLUGINS = ( - ManagementModule, -) +class ManagementAsset(webpack_hooks.WebpackBundleHook): + unique_slug = "management_module" + src_file = "kolibri/plugins/management/assets/src/management.js" + static_dir = "kolibri/plugins/management/static" + + +class ManagementInclusionHook(webpack_hooks.FrontEndBaseSyncHook): + bundle_class = ManagementAsset
7b73d73b7b61830b955f7ec686570c7371bb16d1
comics/crawler/utils/lxmlparser.py
comics/crawler/utils/lxmlparser.py
#encoding: utf-8 from lxml.html import parse, fromstring class LxmlParser(object): def __init__(self, url=None, string=None): if url: self.root = parse(url).getroot() self.root.make_links_absolute(url) elif string: self.root = fromstring(string) def text(self, selector): return self.select(selector).text_content() def src(self, selector): return self.select(selector).get('src') def alt(self, selector): return self.select(selector).get('alt') def title(self, selector): return self.select(selector).get('title') def remove(self, selector): for element in self.root.cssselect(selector): element.drop_tree() def select(self, selector): elements = self.root.cssselect(selector) if len(elements) == 0: raise DoesNotExist('Noting matched the selector: %s' % selector) elif len(elements) > 1: raise MultipleElementsReturned('Selector matched %d elements: %s' % (len(elements), selector)) return elements[0] class DoesNotExist(Exception): pass class MultipleElementsReturned(Exception): pass
#encoding: utf-8 from lxml.html import parse, fromstring class LxmlParser(object): def __init__(self, url=None, string=None): if url is not None: self.root = parse(url).getroot() self.root.make_links_absolute(url) elif string is not None: self.root = fromstring(string) else: raise LxmlParserException() def text(self, selector): return self.select(selector).text_content() def src(self, selector): return self.select(selector).get('src') def alt(self, selector): return self.select(selector).get('alt') def title(self, selector): return self.select(selector).get('title') def remove(self, selector): for element in self.root.cssselect(selector): element.drop_tree() def select(self, selector): elements = self.root.cssselect(selector) if len(elements) == 0: raise DoesNotExist('Noting matched the selector: %s' % selector) elif len(elements) > 1: raise MultipleElementsReturned('Selector matched %d elements: %s' % (len(elements), selector)) return elements[0] class LxmlParserException(Exception): pass class DoesNotExist(LxmlParserException): pass class MultipleElementsReturned(LxmlParserException): pass
Update exception handling in LxmlParser
Update exception handling in LxmlParser Signed-off-by: Stein Magnus Jodal <e14d2e665cf0bcfd7f54daa10a36c228abaf843a@jodal.no>
Python
agpl-3.0
datagutten/comics,jodal/comics,datagutten/comics,klette/comics,jodal/comics,jodal/comics,klette/comics,datagutten/comics,jodal/comics,datagutten/comics,klette/comics
--- +++ @@ -4,11 +4,13 @@ class LxmlParser(object): def __init__(self, url=None, string=None): - if url: + if url is not None: self.root = parse(url).getroot() self.root.make_links_absolute(url) - elif string: + elif string is not None: self.root = fromstring(string) + else: + raise LxmlParserException() def text(self, selector): return self.select(selector).text_content() @@ -37,8 +39,11 @@ return elements[0] -class DoesNotExist(Exception): +class LxmlParserException(Exception): pass -class MultipleElementsReturned(Exception): +class DoesNotExist(LxmlParserException): pass + +class MultipleElementsReturned(LxmlParserException): + pass
62586dc0e4e9ca8d0fee6c72e296c74875f3a65c
api/swd6/api/app.py
api/swd6/api/app.py
import logging import os import flask import flask_cors from sqlalchemy_jsonapi import flaskext as flask_jsonapi from swd6 import config from swd6.db.models import db CONF = config.CONF DEFAULT_CONF_PATH = '/opt/swd6/api/api.conf' app = None def start(): # pylint: disable=global-statement global app app = flask.Flask(__name__) app.config['DEBUG'] = True app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True app.config['SQLALCHEMY_DATABASE_URI'] = CONF.db.uri app.config['SERVER_NAME'] = CONF.api.host app.logger.setLevel(logging.DEBUG) flask_cors.CORS(app, origins=CONF.api.cors_hosts) logging.getLogger('flask_cors').level = logging.DEBUG db.init_app(app) flask_jsonapi.FlaskJSONAPI(app, db, options={'dasherize': False, 'include_fk_columns': True}) return app logging.basicConfig(level=logging.DEBUG) if os.path.exists(DEFAULT_CONF_PATH): config_files = [DEFAULT_CONF_PATH] else: config_files = [] config.load([], default_config_files=config_files) start()
import logging import os import flask import flask_cors from sqlalchemy_jsonapi import flaskext as flask_jsonapi from swd6 import config from swd6.db.models import db CONF = config.CONF DEFAULT_CONF_PATH = '/opt/swd6/api/api.conf' app = None def start(): # pylint: disable=global-statement global app app = flask.Flask(__name__) app.config['DEBUG'] = True app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True app.config['SQLALCHEMY_DATABASE_URI'] = CONF.db.uri app.config['SERVER_NAME'] = CONF.api.host app.logger.setLevel(logging.DEBUG) flask_cors.CORS(app, origins=CONF.api.cors_hosts, supports_credentials=True) logging.getLogger('flask_cors').level = logging.DEBUG db.init_app(app) flask_jsonapi.FlaskJSONAPI(app, db, options={'dasherize': False, 'include_fk_columns': True}) return app logging.basicConfig(level=logging.DEBUG) if os.path.exists(DEFAULT_CONF_PATH): config_files = [DEFAULT_CONF_PATH] else: config_files = [] config.load([], default_config_files=config_files) start()
Fix CORS to allow for credentials
Fix CORS to allow for credentials Something changed in the client code requiring this setting.
Python
apache-2.0
jimbobhickville/swd6,jimbobhickville/swd6,jimbobhickville/swd6
--- +++ @@ -24,7 +24,7 @@ app.logger.setLevel(logging.DEBUG) - flask_cors.CORS(app, origins=CONF.api.cors_hosts) + flask_cors.CORS(app, origins=CONF.api.cors_hosts, supports_credentials=True) logging.getLogger('flask_cors').level = logging.DEBUG
072d5bf150ff3f8d743a84c636929e7a326bf8ea
src/python/tensorflow_cloud/tuner/constants.py
src/python/tensorflow_cloud/tuner/constants.py
# Lint as: python3 # Copyright 2020 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Constants definitions for tuner sub module.""" # API definition of Cloud AI Platform Optimizer service OPTIMIZER_API_DOCUMENT_FILE = "api/ml_public_google_rest_v1.json" # By default, the Tuner worker(s) always requests one trial at a time because # we would parallelize the tuning loop themselves as opposed to getting multiple # trial suggestions in one tuning loop. SUGGESTION_COUNT_PER_REQUEST = 1 # Number of tries to retry getting study if it was already created NUM_TRIES_FOR_STUDIES = 3
# Lint as: python3 # Copyright 2020 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Constants definitions for tuner sub module.""" import os # API definition of Cloud AI Platform Optimizer service OPTIMIZER_API_DOCUMENT_FILE = os.path.join( os.path.dirname(os.path.abspath(__file__)), "api/ml_public_google_rest_v1.json") # By default, the Tuner worker(s) always requests one trial at a time because # we would parallelize the tuning loop themselves as opposed to getting multiple # trial suggestions in one tuning loop. SUGGESTION_COUNT_PER_REQUEST = 1 # Number of tries to retry getting study if it was already created NUM_TRIES_FOR_STUDIES = 3
Fix path to API doc
Fix path to API doc
Python
apache-2.0
tensorflow/cloud,tensorflow/cloud
--- +++ @@ -14,8 +14,12 @@ # limitations under the License. """Constants definitions for tuner sub module.""" +import os + # API definition of Cloud AI Platform Optimizer service -OPTIMIZER_API_DOCUMENT_FILE = "api/ml_public_google_rest_v1.json" +OPTIMIZER_API_DOCUMENT_FILE = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "api/ml_public_google_rest_v1.json") # By default, the Tuner worker(s) always requests one trial at a time because # we would parallelize the tuning loop themselves as opposed to getting multiple
5a6ff9a69a2d769f6ac363f20afb89a23dd2290d
homeassistant/components/device_tracker/mqtt.py
homeassistant/components/device_tracker/mqtt.py
""" homeassistant.components.device_tracker.mqtt ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ MQTT platform for the device tracker. device_tracker: platform: mqtt qos: 1 devices: paulus_oneplus: /location/paulus annetherese_n4: /location/annetherese """ import logging from homeassistant import util import homeassistant.components.mqtt as mqtt DEPENDENCIES = ['mqtt'] CONF_QOS = 'qos' CONF_DEVICES = 'devices' DEFAULT_QOS = 0 _LOGGER = logging.getLogger(__name__) def setup_scanner(hass, config, see): """ Set up a MQTT tracker. """ devices = config.get(CONF_DEVICES) qos = util.convert(config.get(CONF_QOS), int, DEFAULT_QOS) if not isinstance(devices, dict): _LOGGER.error('Expected %s to be a dict, found %s', CONF_DEVICES, devices) return False dev_id_lookup = {} def device_tracker_message_received(topic, payload, qos): """ MQTT message received. """ see(dev_id=dev_id_lookup[topic], location_name=payload) for dev_id, topic in devices.items(): dev_id_lookup[topic] = dev_id mqtt.subscribe(hass, topic, device_tracker_message_received, qos) return True
""" homeassistant.components.device_tracker.mqtt ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ MQTT platform for the device tracker. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/device_tracker.mqtt.html """ import logging from homeassistant import util import homeassistant.components.mqtt as mqtt DEPENDENCIES = ['mqtt'] CONF_QOS = 'qos' CONF_DEVICES = 'devices' DEFAULT_QOS = 0 _LOGGER = logging.getLogger(__name__) def setup_scanner(hass, config, see): """ Set up a MQTT tracker. """ devices = config.get(CONF_DEVICES) qos = util.convert(config.get(CONF_QOS), int, DEFAULT_QOS) if not isinstance(devices, dict): _LOGGER.error('Expected %s to be a dict, found %s', CONF_DEVICES, devices) return False dev_id_lookup = {} def device_tracker_message_received(topic, payload, qos): """ MQTT message received. """ see(dev_id=dev_id_lookup[topic], location_name=payload) for dev_id, topic in devices.items(): dev_id_lookup[topic] = dev_id mqtt.subscribe(hass, topic, device_tracker_message_received, qos) return True
Move configuration details to docs
Move configuration details to docs
Python
mit
emilhetty/home-assistant,mikaelboman/home-assistant,alexmogavero/home-assistant,devdelay/home-assistant,nevercast/home-assistant,shaftoe/home-assistant,srcLurker/home-assistant,tboyce021/home-assistant,instantchow/home-assistant,DavidLP/home-assistant,Julian/home-assistant,jnewland/home-assistant,florianholzapfel/home-assistant,open-homeautomation/home-assistant,balloob/home-assistant,sander76/home-assistant,devdelay/home-assistant,jaharkes/home-assistant,jamespcole/home-assistant,sffjunkie/home-assistant,Duoxilian/home-assistant,robbiet480/home-assistant,xifle/home-assistant,kennedyshead/home-assistant,philipbl/home-assistant,Julian/home-assistant,sfam/home-assistant,jnewland/home-assistant,robjohnson189/home-assistant,MartinHjelmare/home-assistant,aoakeson/home-assistant,deisi/home-assistant,coteyr/home-assistant,MartinHjelmare/home-assistant,Zac-HD/home-assistant,Smart-Torvy/torvy-home-assistant,mikaelboman/home-assistant,hmronline/home-assistant,sffjunkie/home-assistant,deisi/home-assistant,open-homeautomation/home-assistant,ewandor/home-assistant,varunr047/homefile,ct-23/home-assistant,deisi/home-assistant,balloob/home-assistant,deisi/home-assistant,shaftoe/home-assistant,badele/home-assistant,LinuxChristian/home-assistant,LinuxChristian/home-assistant,xifle/home-assistant,robjohnson189/home-assistant,justyns/home-assistant,miniconfig/home-assistant,philipbl/home-assistant,philipbl/home-assistant,fbradyirl/home-assistant,pschmitt/home-assistant,Zac-HD/home-assistant,mKeRix/home-assistant,GenericStudent/home-assistant,auduny/home-assistant,ma314smith/home-assistant,dmeulen/home-assistant,oandrew/home-assistant,kyvinh/home-assistant,mezz64/home-assistant,morphis/home-assistant,hexxter/home-assistant,keerts/home-assistant,HydrelioxGitHub/home-assistant,dmeulen/home-assistant,leoc/home-assistant,mezz64/home-assistant,Cinntax/home-assistant,ewandor/home-assistant,tchellomello/home-assistant,jamespcole/home-assistant,jaharkes/home-assistant,nugget/home-assistant,soldag/home-assistant,jaharkes/home-assistant,w1ll1am23/home-assistant,tinloaf/home-assistant,fbradyirl/home-assistant,HydrelioxGitHub/home-assistant,mikaelboman/home-assistant,leoc/home-assistant,adrienbrault/home-assistant,Smart-Torvy/torvy-home-assistant,kyvinh/home-assistant,hmronline/home-assistant,Zyell/home-assistant,oandrew/home-assistant,nkgilley/home-assistant,qedi-r/home-assistant,titilambert/home-assistant,rohitranjan1991/home-assistant,ma314smith/home-assistant,ma314smith/home-assistant,Theb-1/home-assistant,Cinntax/home-assistant,philipbl/home-assistant,bdfoster/blumate,happyleavesaoc/home-assistant,oandrew/home-assistant,emilhetty/home-assistant,Duoxilian/home-assistant,rohitranjan1991/home-assistant,sffjunkie/home-assistant,stefan-jonasson/home-assistant,DavidLP/home-assistant,luxus/home-assistant,miniconfig/home-assistant,stefan-jonasson/home-assistant,betrisey/home-assistant,shaftoe/home-assistant,pottzer/home-assistant,srcLurker/home-assistant,open-homeautomation/home-assistant,srcLurker/home-assistant,morphis/home-assistant,aequitas/home-assistant,varunr047/homefile,home-assistant/home-assistant,miniconfig/home-assistant,miniconfig/home-assistant,ct-23/home-assistant,jawilson/home-assistant,home-assistant/home-assistant,stefan-jonasson/home-assistant,PetePriority/home-assistant,persandstrom/home-assistant,lukas-hetzenecker/home-assistant,Smart-Torvy/torvy-home-assistant,coteyr/home-assistant,florianholzapfel/home-assistant,tboyce1/home-assistant,balloob/home-assistant,aoakeson/home-assistant,varunr047/homefile,molobrakos/home-assistant,jaharkes/home-assistant,sffjunkie/home-assistant,coteyr/home-assistant,qedi-r/home-assistant,tboyce1/home-assistant,Duoxilian/home-assistant,nugget/home-assistant,MungoRae/home-assistant,FreekingDean/home-assistant,keerts/home-assistant,kyvinh/home-assistant,Duoxilian/home-assistant,hexxter/home-assistant,Julian/home-assistant,mKeRix/home-assistant,bdfoster/blumate,robbiet480/home-assistant,pottzer/home-assistant,MungoRae/home-assistant,florianholzapfel/home-assistant,betrisey/home-assistant,lukas-hetzenecker/home-assistant,instantchow/home-assistant,toddeye/home-assistant,happyleavesaoc/home-assistant,ct-23/home-assistant,stefan-jonasson/home-assistant,Teagan42/home-assistant,caiuspb/home-assistant,happyleavesaoc/home-assistant,emilhetty/home-assistant,tboyce1/home-assistant,luxus/home-assistant,emilhetty/home-assistant,varunr047/homefile,joopert/home-assistant,auduny/home-assistant,turbokongen/home-assistant,persandstrom/home-assistant,devdelay/home-assistant,badele/home-assistant,hexxter/home-assistant,aoakeson/home-assistant,oandrew/home-assistant,xifle/home-assistant,robjohnson189/home-assistant,persandstrom/home-assistant,PetePriority/home-assistant,nnic/home-assistant,sdague/home-assistant,deisi/home-assistant,florianholzapfel/home-assistant,MungoRae/home-assistant,MungoRae/home-assistant,happyleavesaoc/home-assistant,Danielhiversen/home-assistant,ma314smith/home-assistant,robjohnson189/home-assistant,emilhetty/home-assistant,mikaelboman/home-assistant,adrienbrault/home-assistant,nnic/home-assistant,aronsky/home-assistant,kyvinh/home-assistant,JshWright/home-assistant,hmronline/home-assistant,nevercast/home-assistant,betrisey/home-assistant,dmeulen/home-assistant,molobrakos/home-assistant,sfam/home-assistant,mKeRix/home-assistant,justyns/home-assistant,keerts/home-assistant,PetePriority/home-assistant,JshWright/home-assistant,jawilson/home-assistant,Zyell/home-assistant,varunr047/homefile,ewandor/home-assistant,keerts/home-assistant,turbokongen/home-assistant,pottzer/home-assistant,Theb-1/home-assistant,Teagan42/home-assistant,w1ll1am23/home-assistant,nkgilley/home-assistant,devdelay/home-assistant,auduny/home-assistant,DavidLP/home-assistant,jnewland/home-assistant,ct-23/home-assistant,shaftoe/home-assistant,srcLurker/home-assistant,eagleamon/home-assistant,sfam/home-assistant,GenericStudent/home-assistant,sffjunkie/home-assistant,tinloaf/home-assistant,nugget/home-assistant,eagleamon/home-assistant,Danielhiversen/home-assistant,tchellomello/home-assistant,alexmogavero/home-assistant,mKeRix/home-assistant,ct-23/home-assistant,Zac-HD/home-assistant,bdfoster/blumate,joopert/home-assistant,morphis/home-assistant,tinloaf/home-assistant,MungoRae/home-assistant,jabesq/home-assistant,badele/home-assistant,leppa/home-assistant,LinuxChristian/home-assistant,leoc/home-assistant,partofthething/home-assistant,toddeye/home-assistant,xifle/home-assistant,tboyce1/home-assistant,HydrelioxGitHub/home-assistant,partofthething/home-assistant,jabesq/home-assistant,eagleamon/home-assistant,JshWright/home-assistant,Theb-1/home-assistant,aequitas/home-assistant,molobrakos/home-assistant,leppa/home-assistant,fbradyirl/home-assistant,dmeulen/home-assistant,alexmogavero/home-assistant,hmronline/home-assistant,eagleamon/home-assistant,pschmitt/home-assistant,luxus/home-assistant,caiuspb/home-assistant,Smart-Torvy/torvy-home-assistant,postlund/home-assistant,LinuxChristian/home-assistant,nevercast/home-assistant,nnic/home-assistant,LinuxChristian/home-assistant,sdague/home-assistant,tboyce021/home-assistant,morphis/home-assistant,open-homeautomation/home-assistant,jabesq/home-assistant,titilambert/home-assistant,instantchow/home-assistant,Zac-HD/home-assistant,postlund/home-assistant,mikaelboman/home-assistant,bdfoster/blumate,sander76/home-assistant,betrisey/home-assistant,JshWright/home-assistant,FreekingDean/home-assistant,alexmogavero/home-assistant,aronsky/home-assistant,aequitas/home-assistant,bdfoster/blumate,leoc/home-assistant,MartinHjelmare/home-assistant,Julian/home-assistant,jamespcole/home-assistant,hexxter/home-assistant,caiuspb/home-assistant,rohitranjan1991/home-assistant,Zyell/home-assistant,hmronline/home-assistant,kennedyshead/home-assistant,soldag/home-assistant,justyns/home-assistant
--- +++ @@ -1,15 +1,10 @@ """ homeassistant.components.device_tracker.mqtt ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - MQTT platform for the device tracker. -device_tracker: - platform: mqtt - qos: 1 - devices: - paulus_oneplus: /location/paulus - annetherese_n4: /location/annetherese +For more details about this platform, please refer to the documentation at +https://home-assistant.io/components/device_tracker.mqtt.html """ import logging from homeassistant import util
f9b9023549adf4ee9923ac8ed4b6a0fc0b6a89a5
core/management/commands/delete_old_sessions.py
core/management/commands/delete_old_sessions.py
from datetime import datetime from django.core.management.base import BaseCommand from django.contrib.sessions.models import Session """ >>> def clean(count): ... for idx, s in enumerate(Session.objects.filter(expire_date__lt=now)[:count+1]): ... s.delete() ... if str(idx).endswith('000'): print idx ... print "{0} records left".format(Session.objects.filter(expire_date__lt=now).count()) ... """ class Command(NoArgsCommand): args = '<count count ...>' help = "Delete old sessions" def handle(self, *args, **options): old_sessions = Session.objects.filter(expire_date__lt=datetime.now()) self.stdout.write("Deleting {0} expired sessions".format( old_sessions.count() ) ) for index, session in enumerate(old_sessions): session.delete() if str(idx).endswith('000'): self.stdout.write("{0} records deleted".format(index) self.stdout.write("{0} expired sessions remaining".format( Session.objects.filter(expire_date__lt=datetime.now()) ) )
from datetime import datetime from django.core.management.base import BaseCommand from django.contrib.sessions.models import Session """ >>> def clean(count): ... for idx, s in enumerate(Session.objects.filter(expire_date__lt=now)[:count+1]): ... s.delete() ... if str(idx).endswith('000'): print idx ... print "{0} records left".format(Session.objects.filter(expire_date__lt=now).count()) ... """ class Command(NoArgsCommand): args = '<count count ...>' help = "Delete old sessions" def handle(self, *args, **options): old_sessions = Session.objects.filter(expire_date__lt=datetime.now()) self.stdout.write("Deleting {0} expired sessions".format( old_sessions.count() ) ) for index, session in enumerate(old_sessions): session.delete() if str(idx).endswith('000'): self.stdout.write("{0} records deleted".format(index)) self.stdout.write("{0} expired sessions remaining".format( Session.objects.filter(expire_date__lt=datetime.now()) ) )
Add delete old sessions command
Add delete old sessions command
Python
mit
QLGu/djangopackages,QLGu/djangopackages,pydanny/djangopackages,pydanny/djangopackages,QLGu/djangopackages,pydanny/djangopackages,nanuxbe/djangopackages,nanuxbe/djangopackages,nanuxbe/djangopackages
--- +++ @@ -28,7 +28,7 @@ for index, session in enumerate(old_sessions): session.delete() if str(idx).endswith('000'): - self.stdout.write("{0} records deleted".format(index) + self.stdout.write("{0} records deleted".format(index)) self.stdout.write("{0} expired sessions remaining".format( Session.objects.filter(expire_date__lt=datetime.now())
6a07b94f9c84741fcc399f9dee3945d0339b19e0
download.py
download.py
import youtube_dl, os from multiprocessing.pool import ThreadPool from youtube_dl.utils import DownloadError from datetime import datetime from uuid import uuid4 class Download: link = "" done = False error = False started = None uuid = "" total = 0 finished = 0 title = "" def __init__(self, link): self.link = link self.started = datetime.now() self.uuid = str(uuid4()) def download(self): curr_path = os.path.dirname(os.path.abspath(__file__)) output_path = curr_path + "/downloads/" + self.uuid + "/%(title)s-%(id)s.%(ext)s" try: youtube_dl._real_main(["--yes-playlist", "-R", "10", "-x", "--audio-format", "mp3", "--output", output_path, "--restrict-filenames", "-v", self.link]) except DownloadError: self.error = True finally: self.done = True def start(self): pool = ThreadPool() pool.apply_async(self.download)
import youtube_dl, os from multiprocessing.pool import ThreadPool from youtube_dl.utils import DownloadError from datetime import datetime from uuid import uuid4 class Download: link = "" done = False error = False started = None uuid = "" total = 0 finished = 0 title = "" def __init__(self, link): self.link = link self.started = datetime.now() self.uuid = str(uuid4()) def download(self): curr_path = os.path.dirname(os.path.abspath(__file__)) output_path = curr_path + "/downloads/" + self.uuid + "/%(title)s-%(id)s.%(ext)s" try: youtube_dl._real_main(["--yes-playlist", "-R", "10", "-x", "--audio-format", "mp3", "--output", output_path, "--restrict-filenames", "-v", self.link]) except DownloadError: self.error = True finally: self.done = True def get_files(self): file_path = os.path.dirname(os.path.abspath(__file__)) + "/downloads/" + self.uuid return [f for f in os.listdir(file_path) if os.isfile(os.join(file_path, f))] def start(self): pool = ThreadPool() pool.apply_async(self.download)
Add function to get files for playlist
Add function to get files for playlist
Python
mit
pielambr/PLDownload,pielambr/PLDownload
--- +++ @@ -32,6 +32,10 @@ finally: self.done = True + def get_files(self): + file_path = os.path.dirname(os.path.abspath(__file__)) + "/downloads/" + self.uuid + return [f for f in os.listdir(file_path) if os.isfile(os.join(file_path, f))] + def start(self): pool = ThreadPool() pool.apply_async(self.download)
181c80532d54f2cccf092f8785be0604fda3b99d
derrida/__init__.py
derrida/__init__.py
__version_info__ = (1, 3, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ }
__version_info__ = (1, 2, 3, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__ }
Set version to 1.2.3 release
Set version to 1.2.3 release
Python
apache-2.0
Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django
--- +++ @@ -1,4 +1,4 @@ -__version_info__ = (1, 3, 0, 'dev') +__version_info__ = (1, 2, 3, None) # Dot-connect all but the last. Last is dash-connected if not None.
4eeec96f3c79b9584278639293631ab787132f67
custom/ewsghana/reminders/third_soh_reminder.py
custom/ewsghana/reminders/third_soh_reminder.py
from corehq.apps.locations.models import SQLLocation from corehq.apps.users.models import CommCareUser from custom.ewsghana.reminders.second_soh_reminder import SecondSOHReminder class ThirdSOHReminder(SecondSOHReminder): def get_users_messages(self): for sql_location in SQLLocation.objects.filter(domain=self.domain, location_type__administrative=False): in_charges = sql_location.facilityincharge_set.all() message, kwargs = self.get_message_for_location(sql_location.couch_location) for in_charge in in_charges: user = CommCareUser.get_by_user_id(in_charge.user_id, self.domain) if not user.get_verified_number(): continue kwargs['name'] = user.name if message: yield user.get_verified_number(), message % kwargs
from corehq.apps.locations.dbaccessors import get_web_users_by_location from corehq.apps.locations.models import SQLLocation from corehq.apps.reminders.util import get_preferred_phone_number_for_recipient from corehq.apps.users.models import CommCareUser from custom.ewsghana.reminders.second_soh_reminder import SecondSOHReminder from custom.ewsghana.utils import send_sms, has_notifications_enabled from dimagi.utils.couch.database import iter_docs class ThirdSOHReminder(SecondSOHReminder): def get_users_messages(self): for sql_location in SQLLocation.objects.filter(domain=self.domain, location_type__administrative=False): in_charges = map(CommCareUser.wrap, iter_docs( CommCareUser.get_db(), [in_charge.user_id for in_charge in sql_location.facilityincharge_set.all()] )) web_users = [ web_user for web_user in get_web_users_by_location(self.domain, sql_location.location_id) if has_notifications_enabled(self.domain, web_user) ] message, kwargs = self.get_message_for_location(sql_location.couch_location) for user in web_users + in_charges: phone_number = get_preferred_phone_number_for_recipient(user) if not phone_number: continue kwargs['name'] = user.full_name if message: yield user, phone_number, message % kwargs def send(self): for user, phone_number, message in self.get_users_messages(): send_sms(self.domain, user, phone_number, message)
Send third soh also to web users
Send third soh also to web users
Python
bsd-3-clause
qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
--- +++ @@ -1,20 +1,36 @@ +from corehq.apps.locations.dbaccessors import get_web_users_by_location from corehq.apps.locations.models import SQLLocation +from corehq.apps.reminders.util import get_preferred_phone_number_for_recipient from corehq.apps.users.models import CommCareUser from custom.ewsghana.reminders.second_soh_reminder import SecondSOHReminder +from custom.ewsghana.utils import send_sms, has_notifications_enabled +from dimagi.utils.couch.database import iter_docs class ThirdSOHReminder(SecondSOHReminder): def get_users_messages(self): for sql_location in SQLLocation.objects.filter(domain=self.domain, location_type__administrative=False): - in_charges = sql_location.facilityincharge_set.all() + in_charges = map(CommCareUser.wrap, iter_docs( + CommCareUser.get_db(), + [in_charge.user_id for in_charge in sql_location.facilityincharge_set.all()] + )) + web_users = [ + web_user + for web_user in get_web_users_by_location(self.domain, sql_location.location_id) + if has_notifications_enabled(self.domain, web_user) + ] message, kwargs = self.get_message_for_location(sql_location.couch_location) - for in_charge in in_charges: - user = CommCareUser.get_by_user_id(in_charge.user_id, self.domain) - if not user.get_verified_number(): + for user in web_users + in_charges: + phone_number = get_preferred_phone_number_for_recipient(user) + if not phone_number: continue - kwargs['name'] = user.name + kwargs['name'] = user.full_name if message: - yield user.get_verified_number(), message % kwargs + yield user, phone_number, message % kwargs + + def send(self): + for user, phone_number, message in self.get_users_messages(): + send_sms(self.domain, user, phone_number, message)
859cd49b628bb430a721ba89883c3a0efbbbdbbc
tensorflow/python/autograph/core/config.py
tensorflow/python/autograph/core/config.py
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Global configuration.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.autograph.core import config_lib Action = config_lib.Action Convert = config_lib.Convert DoNotConvert = config_lib.DoNotConvert # This list is evaluated in order and stops at the first rule that tests True # for a definitely_convert of definitely_bypass call. CONVERSION_RULES = ( Convert('tensorflow.python.data.ops'), DoNotConvert('tensorflow'), # TODO(b/133417201): Remove. DoNotConvert('tensorflow_probability'), # TODO(b/130313089): Remove. DoNotConvert('numpy'), DoNotConvert('threading'), )
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Global configuration.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.autograph.core import config_lib Action = config_lib.Action Convert = config_lib.Convert DoNotConvert = config_lib.DoNotConvert # This list is evaluated in order and stops at the first rule that tests True # for a definitely_convert of definitely_bypass call. CONVERSION_RULES = ( DoNotConvert('tensorflow'), # TODO(b/133417201): Remove. DoNotConvert('tensorflow_probability'), # TODO(b/130313089): Remove. DoNotConvert('numpy'), DoNotConvert('threading'), )
Fix breakage: conversion of tf.data was allowed too soon and broke the autograph notebook.
Fix breakage: conversion of tf.data was allowed too soon and broke the autograph notebook. PiperOrigin-RevId: 250764059
Python
apache-2.0
renyi533/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,annarev/tensorflow,ppwwyyxx/tensorflow,DavidNorman/tensorflow,xzturn/tensorflow,arborh/tensorflow,gunan/tensorflow,frreiss/tensorflow-fred,arborh/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,frreiss/tensorflow-fred,gunan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,annarev/tensorflow,adit-chandra/tensorflow,freedomtan/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,jhseu/tensorflow,aldian/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,Intel-Corporation/tensorflow,freedomtan/tensorflow,gunan/tensorflow,arborh/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gunan/tensorflow,adit-chandra/tensorflow,DavidNorman/tensorflow,Intel-Corporation/tensorflow,chemelnucfin/tensorflow,annarev/tensorflow,sarvex/tensorflow,DavidNorman/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,aldian/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alsrgv/tensorflow,renyi533/tensorflow,alsrgv/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,arborh/tensorflow,arborh/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,Intel-tensorflow/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow,freedomtan/tensorflow,alsrgv/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,paolodedios/tensorflow,renyi533/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow,chemelnucfin/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhseu/tensorflow,davidzchen/tensorflow,jhseu/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,ghchinoy/tensorflow,karllessard/tensorflow,ppwwyyxx/tensorflow,aam-at/tensorflow,aam-at/tensorflow,aam-at/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_saved_model,chemelnucfin/tensorflow,davidzchen/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,gunan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jhseu/tensorflow,ghchinoy/tensorflow,chemelnucfin/tensorflow,alsrgv/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alsrgv/tensorflow,sarvex/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,aldian/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,xzturn/tensorflow,adit-chandra/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,xzturn/tensorflow,arborh/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,aam-at/tensorflow,tensorflow/tensorflow,ghchinoy/tensorflow,petewarden/tensorflow,DavidNorman/tensorflow,adit-chandra/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,ghchinoy/tensorflow,gautam1858/tensorflow,adit-chandra/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,chemelnucfin/tensorflow,karllessard/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ghchinoy/tensorflow,annarev/tensorflow,alsrgv/tensorflow,aldian/tensorflow,petewarden/tensorflow,chemelnucfin/tensorflow,aldian/tensorflow,ppwwyyxx/tensorflow,DavidNorman/tensorflow,frreiss/tensorflow-fred,cxxgtxy/tensorflow,cxxgtxy/tensorflow,sarvex/tensorflow,cxxgtxy/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,ppwwyyxx/tensorflow,alsrgv/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,renyi533/tensorflow,ghchinoy/tensorflow,petewarden/tensorflow,paolodedios/tensorflow,annarev/tensorflow,renyi533/tensorflow,gunan/tensorflow,chemelnucfin/tensorflow,ppwwyyxx/tensorflow,frreiss/tensorflow-fred,petewarden/tensorflow,petewarden/tensorflow,jhseu/tensorflow,karllessard/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,jhseu/tensorflow,Intel-tensorflow/tensorflow,ghchinoy/tensorflow,karllessard/tensorflow,DavidNorman/tensorflow,arborh/tensorflow,alsrgv/tensorflow,ghchinoy/tensorflow,cxxgtxy/tensorflow,frreiss/tensorflow-fred,renyi533/tensorflow,annarev/tensorflow,petewarden/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,xzturn/tensorflow,aam-at/tensorflow,petewarden/tensorflow,ghchinoy/tensorflow,annarev/tensorflow,adit-chandra/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,freedomtan/tensorflow,freedomtan/tensorflow,freedomtan/tensorflow,arborh/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,paolodedios/tensorflow,renyi533/tensorflow,karllessard/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,DavidNorman/tensorflow,jhseu/tensorflow,xzturn/tensorflow,yongtang/tensorflow,arborh/tensorflow,davidzchen/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,gautam1858/tensorflow,sarvex/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,davidzchen/tensorflow,yongtang/tensorflow,chemelnucfin/tensorflow,xzturn/tensorflow,chemelnucfin/tensorflow,yongtang/tensorflow,renyi533/tensorflow,Intel-tensorflow/tensorflow,gunan/tensorflow,davidzchen/tensorflow,frreiss/tensorflow-fred,arborh/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,DavidNorman/tensorflow,davidzchen/tensorflow,xzturn/tensorflow,arborh/tensorflow,cxxgtxy/tensorflow,aam-at/tensorflow,Intel-tensorflow/tensorflow,ppwwyyxx/tensorflow,aldian/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,chemelnucfin/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,adit-chandra/tensorflow,annarev/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gunan/tensorflow,renyi533/tensorflow,gunan/tensorflow,sarvex/tensorflow,jhseu/tensorflow,annarev/tensorflow,annarev/tensorflow,yongtang/tensorflow,aam-at/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,xzturn/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,alsrgv/tensorflow,jhseu/tensorflow,cxxgtxy/tensorflow,freedomtan/tensorflow,DavidNorman/tensorflow,xzturn/tensorflow,xzturn/tensorflow,gautam1858/tensorflow,renyi533/tensorflow,gunan/tensorflow,paolodedios/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,aldian/tensorflow,alsrgv/tensorflow,adit-chandra/tensorflow,Intel-Corporation/tensorflow,alsrgv/tensorflow,DavidNorman/tensorflow,aam-at/tensorflow,arborh/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,chemelnucfin/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ppwwyyxx/tensorflow,aldian/tensorflow,chemelnucfin/tensorflow,paolodedios/tensorflow,freedomtan/tensorflow,annarev/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,jhseu/tensorflow,frreiss/tensorflow-fred,DavidNorman/tensorflow,ppwwyyxx/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,aam-at/tensorflow,renyi533/tensorflow,cxxgtxy/tensorflow,renyi533/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,adit-chandra/tensorflow,Intel-Corporation/tensorflow,cxxgtxy/tensorflow,petewarden/tensorflow,alsrgv/tensorflow,gunan/tensorflow,sarvex/tensorflow
--- +++ @@ -28,8 +28,6 @@ # This list is evaluated in order and stops at the first rule that tests True # for a definitely_convert of definitely_bypass call. CONVERSION_RULES = ( - Convert('tensorflow.python.data.ops'), - DoNotConvert('tensorflow'), # TODO(b/133417201): Remove.
172372000f121b31daa0965dca3bf28976b6cba9
aiodocker/exceptions.py
aiodocker/exceptions.py
class DockerError(Exception): def __init__(self, status, data, *args): super().__init__(*args) self.status = status self.message = data['message'] def __repr__(self): return 'DockerError({self.status}, {self.message!r})'.format(self=self) def __str__(self): return 'DockerError({self.status}, {self.message!r})'.format(self=self) class DockerContainerError(DockerError): def __init__(self, status, data, container_id, *args): super().__init__(status, data, *args) self.container_id = container_id def __repr__(self): return 'DockerContainerError({self.status}, {self.message!r}, {self.container_id!r})'.format(self=self) def __str__(self): return 'DockerContainerError({self.status}, {self.message!r}, {self.container_id!r})'.format(self=self)
class DockerError(Exception): def __init__(self, status, data, *args): super().__init__(*args) self.status = status self.message = data['message'] def __repr__(self): return 'DockerError({self.status}, {self.message!r})'.format(self=self) def __str__(self): return 'DockerError({self.status}, {self.message!r})'.format(self=self) class DockerContainerError(DockerError): def __init__(self, status, data, container_id, *args): super().__init__(status, data, *args) self.container_id = container_id def __repr__(self): return ('DockerContainerError(' '{self.status}, {self.message!r}, ' '{self.container_id!r})').format(self=self) def __str__(self): return ('DockerContainerError(' '{self.status}, {self.message!r}, ' '{self.container_id!r})').format(self=self)
Fix flake8 error (too long line)
Fix flake8 error (too long line)
Python
mit
barrachri/aiodocker,gaopeiliang/aiodocker,paultag/aiodocker,barrachri/aiodocker,gaopeiliang/aiodocker,barrachri/aiodocker,gaopeiliang/aiodocker
--- +++ @@ -19,7 +19,11 @@ self.container_id = container_id def __repr__(self): - return 'DockerContainerError({self.status}, {self.message!r}, {self.container_id!r})'.format(self=self) + return ('DockerContainerError(' + '{self.status}, {self.message!r}, ' + '{self.container_id!r})').format(self=self) def __str__(self): - return 'DockerContainerError({self.status}, {self.message!r}, {self.container_id!r})'.format(self=self) + return ('DockerContainerError(' + '{self.status}, {self.message!r}, ' + '{self.container_id!r})').format(self=self)
540273ac75880925934e69275c9da1de61fbd699
PyBingWallpaper.py
PyBingWallpaper.py
#! /usr/bin/python3 import win32gui from urllib.request import urlopen, urlretrieve from xml.dom import minidom from PIL import Image import os #Variables: saveDir = 'C:\BingWallPaper\\' i = 0 while i<1: try: usock = urlopen('http://www.bing.com/HPImageArchive.aspx?format=xml&idx=0&n=1&mkt=zh-CN') except: i = 0 else: i = 1 xmldoc = minidom.parse(usock) num = 1 #Parsing the XML File for element in xmldoc.getElementsByTagName('url'): url = 'http://www.bing.com' + element.firstChild.nodeValue #Get Current Date as fileName for the downloaded Picture picPath = saveDir + 'bingwallpaper' + '%d'%num + '.jpg' urlretrieve(url, picPath) #Convert Image picData = Image.open(picPath) picData.save(picPath.replace('jpg','bmp')) picPath = picPath.replace('jpg','bmp') num = num+1 #Set Wallpaper: win32gui.SystemParametersInfo(0x0014, picPath, 1+2)
#! /usr/bin/python3 import win32gui from urllib.request import urlopen, urlretrieve from xml.dom import minidom from PIL import Image import os if __name__=="__main__": #Variables: saveDir = "C:\\BingWallPaper\\" if (not os.path.exists(saveDir)): os.mkdir(saveDir) i = 0 while i<1: try: usock = urlopen('http://www.bing.com/HPImageArchive.aspx?format=xml&idx=0&n=1&mkt=zh-CN') except: i = 0 else: i = 1 xmldoc = minidom.parse(usock) num = 1 #Parsing the XML File for element in xmldoc.getElementsByTagName('url'): url = 'http://www.bing.com' + element.firstChild.nodeValue #Get Current Date as fileName for the downloaded Picture picPath = saveDir + 'bingwallpaper' + '%d'%num + '.jpg' urlretrieve(url, picPath) #Convert Image picData = Image.open(picPath) picData.save(picPath.replace('jpg','bmp')) picPath = picPath.replace('jpg','bmp') num = num+1 #Set Wallpaper: win32gui.SystemParametersInfo(0x0014, picPath, 1+2)
Create directory in case not exist
Create directory in case not exist
Python
mit
adamadanandy/PyBingWallpaper
--- +++ @@ -6,30 +6,35 @@ from PIL import Image import os - -#Variables: -saveDir = 'C:\BingWallPaper\\' -i = 0 -while i<1: - try: - usock = urlopen('http://www.bing.com/HPImageArchive.aspx?format=xml&idx=0&n=1&mkt=zh-CN') - except: - i = 0 - else: - i = 1 -xmldoc = minidom.parse(usock) -num = 1 -#Parsing the XML File -for element in xmldoc.getElementsByTagName('url'): - url = 'http://www.bing.com' + element.firstChild.nodeValue - - #Get Current Date as fileName for the downloaded Picture - picPath = saveDir + 'bingwallpaper' + '%d'%num + '.jpg' - urlretrieve(url, picPath) - #Convert Image - picData = Image.open(picPath) - picData.save(picPath.replace('jpg','bmp')) - picPath = picPath.replace('jpg','bmp') - num = num+1 -#Set Wallpaper: -win32gui.SystemParametersInfo(0x0014, picPath, 1+2) +if __name__=="__main__": + #Variables: + saveDir = "C:\\BingWallPaper\\" + + if (not os.path.exists(saveDir)): + os.mkdir(saveDir) + + i = 0 + while i<1: + try: + usock = urlopen('http://www.bing.com/HPImageArchive.aspx?format=xml&idx=0&n=1&mkt=zh-CN') + except: + i = 0 + else: + i = 1 + xmldoc = minidom.parse(usock) + num = 1 + + #Parsing the XML File + for element in xmldoc.getElementsByTagName('url'): + url = 'http://www.bing.com' + element.firstChild.nodeValue + + #Get Current Date as fileName for the downloaded Picture + picPath = saveDir + 'bingwallpaper' + '%d'%num + '.jpg' + urlretrieve(url, picPath) + #Convert Image + picData = Image.open(picPath) + picData.save(picPath.replace('jpg','bmp')) + picPath = picPath.replace('jpg','bmp') + num = num+1 + #Set Wallpaper: + win32gui.SystemParametersInfo(0x0014, picPath, 1+2)
e752a0ab47da9d9b34b5ce6f5cd40ac98977ec6e
symUtil.py
symUtil.py
import os import re def mkdir_p(path): if not os.path.exists(path): os.makedirs(path) def GetSymbolFileName(libName): # Guess the name of the .sym file on disk if libName[-4:] == ".pdb": return re.sub(r"\.[^\.]+$", ".sym", libName) return libName + ".sym"
import os def mkdir_p(path): if not os.path.exists(path): os.makedirs(path) def GetSymbolFileName(libName): # Guess the name of the .sym file on disk if libName[-4:] == ".pdb": return libName[:-4] + ".sym" return libName + ".sym"
Refactor out the re. It's not necessary to regex the replacement of an explicitly checked string in an explicit location. This should be simpler.
Refactor out the re. It's not necessary to regex the replacement of an explicitly checked string in an explicit location. This should be simpler.
Python
mpl-2.0
bytesized/Snappy-Symbolication-Server
--- +++ @@ -1,5 +1,4 @@ import os -import re def mkdir_p(path): if not os.path.exists(path): @@ -8,6 +7,6 @@ def GetSymbolFileName(libName): # Guess the name of the .sym file on disk if libName[-4:] == ".pdb": - return re.sub(r"\.[^\.]+$", ".sym", libName) + return libName[:-4] + ".sym" return libName + ".sym"
2b1cd9a58aa51ef53996dc1897a7a0e50f29d7ca
isitopenaccess/plugins/bmc.py
isitopenaccess/plugins/bmc.py
import requests from copy import deepcopy from datetime import datetime from isitopenaccess.plugins import string_matcher def page_license(record): """ To respond to the provider identifier: http://www.biomedcentral.com This should determine the licence conditions of the BMC article and populate the record['bibjson']['license'] (note the US spelling) field. """ # licensing statements to look for on this publisher's pages # take the form of {statement: meaning} # where meaning['type'] identifies the license (see licenses.py) # and meaning['version'] identifies the license version (if available) lic_statements = [ {"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.": {'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False} } ] string_matcher.simple_extract(lic_statements, record)
import requests from copy import deepcopy from datetime import datetime from isitopenaccess.plugins import string_matcher def page_license(record): """ To respond to the provider identifier: http://www.biomedcentral.com This should determine the licence conditions of the BMC article and populate the record['bibjson']['license'] (note the US spelling) field. """ # licensing statements to look for on this publisher's pages # take the form of {statement: meaning} # where meaning['type'] identifies the license (see licenses.py) # and meaning['version'] identifies the license version (if available) lic_statements = [ {"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.": {'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False, # also declare some properties which override info about this license in the licenses list (see licenses module) 'url': 'http://creativecommons.org/licenses/by/2.0'} } ] string_matcher.simple_extract(lic_statements, record)
ADD MISSING FILE TO PREV COMMIT "modify BMC plugin: overwrite URL for CC-BY license. We have a MORE specific URL (from the license statement on the BMC pages) than the Open Definition one"
ADD MISSING FILE TO PREV COMMIT "modify BMC plugin: overwrite URL for CC-BY license. We have a MORE specific URL (from the license statement on the BMC pages) than the Open Definition one"
Python
bsd-3-clause
CottageLabs/OpenArticleGauge,CottageLabs/OpenArticleGauge,CottageLabs/OpenArticleGauge
--- +++ @@ -18,7 +18,9 @@ # and meaning['version'] identifies the license version (if available) lic_statements = [ {"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (<a href='http://creativecommons.org/licenses/by/2.0'>http://creativecommons.org/licenses/by/2.0</a>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.": - {'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False} + {'type': 'cc-by', 'version':'2.0', 'open_access': True, 'BY': True, 'NC': False, 'SA': False, 'ND': False, + # also declare some properties which override info about this license in the licenses list (see licenses module) + 'url': 'http://creativecommons.org/licenses/by/2.0'} } ]