commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
837df000bc86d6bd9c0101f13e3ef18d2873750b
furious/_pkg_meta.py
furious/_pkg_meta.py
version_info = (1, 3, 0) version = '.'.join(map(str, version_info))
version_info = (1, 4, 0) version = '.'.join(map(str, version_info))
Update the version to 1.4.0 for the latest release
Update the version to 1.4.0 for the latest release
Python
apache-2.0
andreleblanc-wf/furious,andreleblanc-wf/furious,Workiva/furious,mattsanders-wf/furious,mattsanders-wf/furious,beaulyddon-wf/furious,beaulyddon-wf/furious,Workiva/furious
--- +++ @@ -1,2 +1,2 @@ -version_info = (1, 3, 0) +version_info = (1, 4, 0) version = '.'.join(map(str, version_info))
3ba6ad452d747135134ec23d62d0b327708e8ef5
python/setup.py
python/setup.py
from setuptools import setup # This is the atca_rapid_response_api Python library. # Jamie Stevens 2017 # ATCA Senior Systems Scientist # Jamie.Stevens@csiro.au setup(name='atca_rapid_response_api', version='1.0', description='ATCA Rapid Response Mode API', url='https://github.com/ste616/atca-rapid-response-api', author='Jamie Stevens', author_email='Jamie.Stevens@csiro.au', license='MIT', packages=[ 'atca_rapid_response_api' ], install_requires=[ 'requests' ], zip_safe=False) # Changelog:
from setuptools import setup # This is the atca_rapid_response_api Python library. # Jamie Stevens 2017 # ATCA Senior Systems Scientist # Jamie.Stevens@csiro.au setup(name='atca_rapid_response_api', version='1.1', description='ATCA Rapid Response Mode API', url='https://github.com/ste616/atca-rapid-response-api', author='Jamie Stevens', author_email='Jamie.Stevens@csiro.au', license='MIT', packages=[ 'atca_rapid_response_api' ], install_requires=[ 'requests' ], zip_safe=False) # Changelog: # 2017-04-04: Prevented some SSL checks, because namoi's SSL is still a little # flaky.
Update the library version to 1.1 for the recent change.
Update the library version to 1.1 for the recent change.
Python
mit
ste616/atca-rapid-response-api
--- +++ @@ -6,7 +6,7 @@ # Jamie.Stevens@csiro.au setup(name='atca_rapid_response_api', - version='1.0', + version='1.1', description='ATCA Rapid Response Mode API', url='https://github.com/ste616/atca-rapid-response-api', author='Jamie Stevens', @@ -19,3 +19,5 @@ zip_safe=False) # Changelog: +# 2017-04-04: Prevented some SSL checks, because namoi's SSL is still a little +# flaky.
8a9a2a860772ec71c517c4ae4dd93aa4ab2ea342
git_gutter_events.py
git_gutter_events.py
import sublime import sublime_plugin import view_collection class GitGutterEvents(sublime_plugin.EventListener): def on_new(self, view): view_collection.ViewCollection.add(view) def on_load(self, view): view_collection.ViewCollection.add(view) def on_modified(self, view): view_collection.ViewCollection.add(view) def on_clone(self, view): view_collection.ViewCollection.add(view)
import sublime import sublime_plugin import view_collection class GitGutterEvents(sublime_plugin.EventListener): def on_load(self, view): view_collection.ViewCollection.add(view) def on_modified(self, view): if view.settings().get('git_gutter_live_mode', True): view_collection.ViewCollection.add(view) def on_clone(self, view): view_collection.ViewCollection.add(view) def on_post_save(self, view): view_collection.ViewCollection.add(view)
Add settings to turn off live mode
Add settings to turn off live mode
Python
mit
michaelhogg/GitGutter,akpersad/GitGutter,bradsokol/VcsGutter,biodamasceno/GitGutter,robfrawley/sublime-git-gutter,natecavanaugh/GitGutter,natecavanaugh/GitGutter,biodamasceno/GitGutter,michaelhogg/GitGutter,robfrawley/sublime-git-gutter,tushortz/GitGutter,akpersad/GitGutter,ariofrio/VcsGutter,tushortz/GitGutter,natecavanaugh/GitGutter,akpersad/GitGutter,robfrawley/sublime-git-gutter,biodamasceno/GitGutter,natecavanaugh/GitGutter,ariofrio/VcsGutter,akpersad/GitGutter,robfrawley/sublime-git-gutter,michaelhogg/GitGutter,tushortz/GitGutter,bradsokol/VcsGutter,michaelhogg/GitGutter,tushortz/GitGutter,jisaacks/GitGutter,biodamasceno/GitGutter
--- +++ @@ -3,14 +3,16 @@ import view_collection class GitGutterEvents(sublime_plugin.EventListener): - def on_new(self, view): - view_collection.ViewCollection.add(view) def on_load(self, view): view_collection.ViewCollection.add(view) def on_modified(self, view): - view_collection.ViewCollection.add(view) + if view.settings().get('git_gutter_live_mode', True): + view_collection.ViewCollection.add(view) def on_clone(self, view): view_collection.ViewCollection.add(view) + + def on_post_save(self, view): + view_collection.ViewCollection.add(view)
fb87194d6409149e71d6ee52c620fe04f8ca482f
scrapi/processing/osf/collision.py
scrapi/processing/osf/collision.py
from __future__ import unicode_literals import requests from scrapi import settings from scrapi.processing.osf.hashing import REPORT_HASH_FUNCTIONS from scrapi.processing.osf.hashing import RESOURCE_HASH_FUNCTIONS def detect_collisions(hashlist, additional=''): uuids = 'uuid:{}'.format(','.join(hashlist)) url = '{}?q={}{}'.format(settings.OSF_APP_URL, uuids, additional) ret = requests.get(url, auth=settings.OSF_AUTH, verify=settings.VERIFY_SSL).json() if ret['total'] > 0: return ret['results'][0]['guid'] return None def generate_hash_list(normalized, hashes): hashlist = [] for hashfunc in hashes: hashlist.append(hashfunc(normalized)) return hashlist def generate_resource_hash_list(normalized): return generate_hash_list(normalized.attributes, RESOURCE_HASH_FUNCTIONS) def generate_report_hash_list(normalized): return generate_hash_list(normalized.attributes, REPORT_HASH_FUNCTIONS)
from __future__ import unicode_literals import json import requests from scrapi import settings from scrapi.processing.osf.hashing import REPORT_HASH_FUNCTIONS from scrapi.processing.osf.hashing import RESOURCE_HASH_FUNCTIONS def detect_collisions(hashlist, is_resource=False): if is_resource: _filter = { 'terms': { 'uuid': hashlist } } else: _filter = { 'and': [ { 'missing': { 'field': 'pid', 'existence': True, 'null_value': True } }, { 'terms': { 'uuid': hashlist } } ] } query = { 'query': { 'filtered': { 'filter': _filter } } } kwargs = { 'auth': settings.OSF_AUTH, 'verify': settings.VERIFY_SSL, 'data': json.dumps(query), 'headers': { 'Content-Type': 'application/json' } } ret = requests.post(settings.OSF_APP_URL, **kwargs).json() if ret['total'] > 0: return ret['results'][0]['attached']['nid'] return None def generate_hash_list(normalized, hashes): hashlist = [] for hashfunc in hashes: hashlist.append(hashfunc(normalized)) return hashlist def generate_resource_hash_list(normalized): return generate_hash_list(normalized.attributes, RESOURCE_HASH_FUNCTIONS) def generate_report_hash_list(normalized): return generate_hash_list(normalized.attributes, REPORT_HASH_FUNCTIONS)
Update to the latest osf API
Update to the latest osf API
Python
apache-2.0
felliott/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,jeffreyliu3230/scrapi,erinspace/scrapi,mehanig/scrapi,ostwald/scrapi,CenterForOpenScience/scrapi,icereval/scrapi,fabianvf/scrapi,mehanig/scrapi,alexgarciac/scrapi,felliott/scrapi,fabianvf/scrapi
--- +++ @@ -1,4 +1,6 @@ from __future__ import unicode_literals + +import json import requests @@ -7,14 +9,51 @@ from scrapi.processing.osf.hashing import RESOURCE_HASH_FUNCTIONS -def detect_collisions(hashlist, additional=''): - uuids = 'uuid:{}'.format(','.join(hashlist)) - url = '{}?q={}{}'.format(settings.OSF_APP_URL, uuids, additional) +def detect_collisions(hashlist, is_resource=False): + if is_resource: + _filter = { + 'terms': { + 'uuid': hashlist + } + } + else: + _filter = { + 'and': [ + { + 'missing': { + 'field': 'pid', + 'existence': True, + 'null_value': True + } + }, + { + 'terms': { + 'uuid': hashlist + } + } + ] + } - ret = requests.get(url, auth=settings.OSF_AUTH, verify=settings.VERIFY_SSL).json() + query = { + 'query': { + 'filtered': { + 'filter': _filter + } + } + } + kwargs = { + 'auth': settings.OSF_AUTH, + 'verify': settings.VERIFY_SSL, + 'data': json.dumps(query), + 'headers': { + 'Content-Type': 'application/json' + } + } + + ret = requests.post(settings.OSF_APP_URL, **kwargs).json() if ret['total'] > 0: - return ret['results'][0]['guid'] + return ret['results'][0]['attached']['nid'] return None
a39c288e9beb506d62de66d72fc95750e54d833b
social_core/backends/universe.py
social_core/backends/universe.py
from .oauth import BaseOAuth2 class UniverseOAuth2(BaseOAuth2): """Universe Ticketing OAuth2 authentication backend""" name = 'universe' AUTHORIZATION_URL = 'https://www.universe.com/oauth/authorize' ACCESS_TOKEN_URL = 'https://www.universe.com/oauth/token' BASE_API_URL = 'https://www.universe.com/api' USER_INFO_URL = BASE_API_URL + '/v2/current_user' ACCESS_TOKEN_METHOD = 'POST' STATE_PARAMETER = True REDIRECT_STATE = True EXTRA_DATA = [ ('id', 'id'), ('slug', 'slug'), ('created_at', 'created_at'), ('updated_at', 'updated_at'), ] def get_user_id(self, details, response): return response['current_user'][self.ID_KEY] def get_user_details(self, response): """Return user details from a Universe account""" # Start with the user data as it was returned user_details = response['current_user'] user_details["username"] = user_details["email"] return user_details def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" return self.get_json(self.USER_INFO_URL, headers={'Authorization': f'Bearer {access_token}'})
from .oauth import BaseOAuth2 class UniverseOAuth2(BaseOAuth2): """Universe Ticketing OAuth2 authentication backend""" name = 'universe' AUTHORIZATION_URL = 'https://www.universe.com/oauth/authorize' ACCESS_TOKEN_URL = 'https://www.universe.com/oauth/token' BASE_API_URL = 'https://www.universe.com/api' USER_INFO_URL = BASE_API_URL + '/v2/current_user' ACCESS_TOKEN_METHOD = 'POST' STATE_PARAMETER = True REDIRECT_STATE = True EXTRA_DATA = [ ('id', 'id'), ('slug', 'slug'), ('created_at', 'created_at'), ('updated_at', 'updated_at'), ] def get_user_id(self, details, response): return response['current_user'][self.ID_KEY] def get_user_details(self, response): """Return user details from a Universe account""" # Start with the user data as it was returned user_details = response['current_user'] user_details["username"] = user_details["email"] return user_details def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" return self.get_json(self.USER_INFO_URL, headers={'Authorization': 'Bearer {}'.format(access_token)})
Remove f-string for pre-Python 3.6 support.
Remove f-string for pre-Python 3.6 support.
Python
bsd-3-clause
python-social-auth/social-core,python-social-auth/social-core
--- +++ @@ -30,4 +30,4 @@ def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" - return self.get_json(self.USER_INFO_URL, headers={'Authorization': f'Bearer {access_token}'}) + return self.get_json(self.USER_INFO_URL, headers={'Authorization': 'Bearer {}'.format(access_token)})
ef38b68aa288bb43bc33a860ac995836f892854e
usr/examples/99-Tests/unittests.py
usr/examples/99-Tests/unittests.py
# OpenMV Unit Tests. # import os, sensor, gc TEST_DIR = "unittest" TEMP_DIR = "unittest/temp" DATA_DIR = "unittest/data" SCRIPT_DIR = "unittest/script" if not (TEST_DIR in os.listdir("")): raise Exception('Unittest dir not found!') print("") test_failed = False def print_result(test, passed): s = "Unittest (%s)"%(test) padding = "."*(60-len(s)) print(s + padding + ("PASSED" if passed == True else "FAILED")) for module in sorted(os.listdir(SCRIPT_DIR)): mod_path = "/".join((SCRIPT_DIR, module)) for test in sorted(os.listdir(mod_path)): if test.endswith(".py"): test_passed = True test_path = "/".join((mod_path, test)) try: gc.collect() exec(open(test_path).read()) if unittest(DATA_DIR, TEMP_DIR) == False: raise Exception() except Exception as e: test_failed = True test_passed = False print_result(test, test_passed) if test_failed: print("\nSome tests have FAILED!!!\n\n") else: print("\nAll tests PASSED.\n\n")
# OpenMV Unit Tests. # import os, sensor, gc TEST_DIR = "unittest" TEMP_DIR = "unittest/temp" DATA_DIR = "unittest/data" SCRIPT_DIR = "unittest/script" if not (TEST_DIR in os.listdir("")): raise Exception('Unittest dir not found!') print("") test_failed = False def print_result(test, passed): s = "Unittest (%s)"%(test) padding = "."*(60-len(s)) print(s + padding + ("PASSED" if passed == True else "FAILED")) for module in sorted(os.listdir(SCRIPT_DIR)): mod_path = "/".join((SCRIPT_DIR, module)) for test in sorted(os.listdir(mod_path)): if test.endswith(".py"): test_passed = True test_path = "/".join((mod_path, test)) try: exec(open(test_path).read()) gc.collect() if unittest(DATA_DIR, TEMP_DIR) == False: raise Exception() except Exception as e: test_failed = True test_passed = False print_result(test, test_passed) if test_failed: print("\nSome tests have FAILED!!!\n\n") else: print("\nAll tests PASSED.\n\n")
Move GC collect after loading unit test function.
Move GC collect after loading unit test function.
Python
mit
kwagyeman/openmv,openmv/openmv,openmv/openmv,kwagyeman/openmv,openmv/openmv,iabdalkader/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv,kwagyeman/openmv,iabdalkader/openmv,iabdalkader/openmv
--- +++ @@ -26,8 +26,8 @@ test_passed = True test_path = "/".join((mod_path, test)) try: + exec(open(test_path).read()) gc.collect() - exec(open(test_path).read()) if unittest(DATA_DIR, TEMP_DIR) == False: raise Exception() except Exception as e:
2522f5e1a132597e56d1e8e8559b3e16c15be47f
app/twitter/views.py
app/twitter/views.py
from flask import Blueprint, request, render_template from ..load import processing_results, api import string import tweepy twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static') ascii_chars = set(string.printable) ascii_chars.remove(' ') ascii_chars.add('...') def takeout_non_ascii(s): return list(filter(lambda x: x not in ascii_chars, s)) @twitter_mod.route('/twitter', methods=['GET', 'POST']) def twitter(): if request.method == 'POST': text = [] for tweet in tweepy.Cursor(api.search, request.form['topic'], lang='hi').items(100): temp = ''.join(takeout_non_ascii(tweet.text)) if not len(temp) in range(3): text.append(temp) data, emotion_sents, score, line_sentiment, text, length = processing_results(text) return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length]) else: return render_template('projects/twitter.html')
from flask import Blueprint, request, render_template from ..load import processing_results, api import string import tweepy twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static') ascii_chars = set(string.printable) ascii_chars.remove(' ') ascii_chars.add('...') def takeout_non_ascii(s): return list(filter(lambda x: x not in ascii_chars, s)) @twitter_mod.route('/twitter', methods=['GET', 'POST']) def twitter(): if request.method == 'POST': text = [] for tweet in tweepy.Cursor(api.search, request.form['topic'], lang='hi').items(50): temp = ''.join(takeout_non_ascii(tweet.text)) if not len(temp) in range(3): text.append(temp) data, emotion_sents, score, line_sentiment, text, length = processing_results(text) return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length]) else: return render_template('projects/twitter.html')
Reduce number of tweets to reduce CPU compuation time of the unoptimised models
Reduce number of tweets to reduce CPU compuation time of the unoptimised models
Python
mit
griimick/feature-mlsite,griimick/feature-mlsite,griimick/feature-mlsite
--- +++ @@ -20,7 +20,7 @@ if request.method == 'POST': text = [] - for tweet in tweepy.Cursor(api.search, request.form['topic'], lang='hi').items(100): + for tweet in tweepy.Cursor(api.search, request.form['topic'], lang='hi').items(50): temp = ''.join(takeout_non_ascii(tweet.text)) if not len(temp) in range(3): text.append(temp)
8aff9605a91f1041a1040c2cfc000dbc588c0503
biobox_cli/util.py
biobox_cli/util.py
import sys, yaml, os.path def select_module(module, name): """ Select and return a biobox module """ mod_name = ".".join(["biobox_cli", module, name]) try: __import__(mod_name) except ImportError: err_exit('unknown_command', {'command_type': str.replace(module, '_', ' '), 'command': name}) return sys.modules[mod_name] def parse_docopt(doc, argv, is_main_module): from docopt import docopt from version import __version__ return docopt(doc, argv = argv, version = __version__, options_first = is_main_module) def err_message(msg_key, locals_): path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'assets', 'error_messages.yml') with open(path, 'r') as f: errors = yaml.load(f.read()) return errors[msg_key].format(**locals_) def err_exit(msg_key, locals_): sys.stderr.write(err_message(msg_key, locals_)) exit(1)
import sys, yaml, os.path def select_module(module, name): """ Select and return a biobox module """ mod_name = ".".join(["biobox_cli", module, name]) try: __import__(mod_name) except ImportError: err_exit('unknown_command', {'command_type': str.replace(module, '_', ' '), 'command': name}) return sys.modules[mod_name] def parse_docopt(doc, argv, is_main_module): from docopt import docopt from version import __version__ return docopt(doc, argv = argv, version = __version__, options_first = is_main_module) def err_message(msg_key, locals_): from pkg_resources import resource_string errors = yaml.load(resource_string(__name__, os.path.join('..', 'assets', 'error_messages.yml'))) return errors[msg_key].format(**locals_) def err_exit(msg_key, locals_): sys.stderr.write(err_message(msg_key, locals_)) exit(1)
Use resource_string for error messages
Use resource_string for error messages Signed-off-by: Michael Barton <1d6e1cf70ec6f9ab28d3ea4b27a49a77654d370e@michaelbarton.me.uk>
Python
mit
pbelmann/command-line-interface,pbelmann/command-line-interface,michaelbarton/command-line-interface,bioboxes/command-line-interface,fungs/bbx-cli,fungs/bbx-cli,bioboxes/command-line-interface,michaelbarton/command-line-interface
--- +++ @@ -21,9 +21,8 @@ options_first = is_main_module) def err_message(msg_key, locals_): - path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'assets', 'error_messages.yml') - with open(path, 'r') as f: - errors = yaml.load(f.read()) + from pkg_resources import resource_string + errors = yaml.load(resource_string(__name__, os.path.join('..', 'assets', 'error_messages.yml'))) return errors[msg_key].format(**locals_) def err_exit(msg_key, locals_):
22758183910a479f62f9fb42bb5acdc09deff448
qual/calendars/historical.py
qual/calendars/historical.py
from datetime import date from base import Calendar from date import InvalidDate from main import JulianCalendar class JulianToGregorianCalendar(Calendar): def date(self, year, month, day): gregorian_date = date(year, month, day) if gregorian_date < self.first_gregorian_day: julian_date = JulianCalendar().date(year, month, day) if julian_date > self.first_gregorian_day: raise InvalidDate("This is a 'missing day' when the calendars changed.") self.bless(julian_date) return julian_date return self.from_date(gregorian_date) def bless(self, date): date.calendar = self.__class__ class EnglishHistoricalCalendar(JulianToGregorianCalendar): first_gregorian_day = date(1752, 9, 13)
from datetime import date from base import Calendar from date import InvalidDate from main import JulianCalendar class JulianToGregorianCalendar(Calendar): def date(self, year, month, day): gregorian_date = date(year, month, day) if gregorian_date < self.first_gregorian_day: julian_date = JulianCalendar().date(year, month, day) if not julian_date < self.first_gregorian_day: raise InvalidDate("This is a 'missing day' when the calendars changed.") self.bless(julian_date) return julian_date return self.from_date(gregorian_date) def bless(self, date): date.calendar = self.__class__ class EnglishHistoricalCalendar(JulianToGregorianCalendar): first_gregorian_day = date(1752, 9, 13)
Change comparison to be non-strict.
Change comparison to be non-strict.
Python
apache-2.0
jwg4/calexicon,jwg4/qual
--- +++ @@ -9,7 +9,7 @@ gregorian_date = date(year, month, day) if gregorian_date < self.first_gregorian_day: julian_date = JulianCalendar().date(year, month, day) - if julian_date > self.first_gregorian_day: + if not julian_date < self.first_gregorian_day: raise InvalidDate("This is a 'missing day' when the calendars changed.") self.bless(julian_date) return julian_date
e09414c101d49c70c6da4fcdabf3e985fb92f468
passenger_wsgi.py
passenger_wsgi.py
import os import subprocess import sys try: from flask import Flask import flask_login from flask_restless import APIManager from flask_sqlalchemy import SQLAlchemy import requests except ImportError: INTERP = "venv/bin/python" if os.path.relpath(sys.executable, os.getcwd()) != INTERP: try: os.execl(INTERP, INTERP, *sys.argv) except OSError: sys.exit("Could not find virtual environment. Run `:~$ ./setup.sh`") else: sys.exit("Could not find requirements. Are they all included in requirements.txt? Run `:~$ ./setup.sh`") application = Flask(__name__) @application.route("/") def index(): return "Hello, world!" @application.route("/update") def update(): subprocess.call(['git', 'fetch', 'origin']) subprocess.call(['git', 'pull']) subprocess.call(['mkdir', 'tmp']) subprocess.call(['touch', 'tmp/restart.txt']) @application.route("/big_update") def bigUpdate(): subprocess.call(['./setup.sh']) if __name__ == "__main__": application.run()
import os import subprocess import sys try: from flask import Flask import flask_login from flask_restless import APIManager from flask_sqlalchemy import SQLAlchemy import requests except ImportError: INTERP = "venv/bin/python" if os.path.relpath(sys.executable, os.getcwd()) != INTERP: try: os.execl(INTERP, INTERP, *sys.argv) except OSError: sys.exit("Could not find virtual environment. Run `:~$ ./setup.sh`") else: sys.exit("Could not find requirements. Are they all included in requirements.txt? Run `:~$ ./setup.sh`") application = Flask(__name__) @application.route("/") def index(): return "Hello, world!" @application.route("/update") def update(): subprocess.call(['git', 'fetch', 'origin']) subprocess.call(['git', 'pull']) try: subprocess.check_call(['mkdir', 'tmp']) except subprocess.CalledProcessError, e: pass subprocess.call(['touch', 'tmp/restart.txt']) return "Please restart." @application.route("/big_update") def bigUpdate(): subprocess.call(['./setup.sh']) if __name__ == "__main__": application.run()
Improve error catching logic for update
Improve error catching logic for update
Python
mit
GregBrimble/boilerplate-web-service,GregBrimble/boilerplate-web-service
--- +++ @@ -32,8 +32,12 @@ def update(): subprocess.call(['git', 'fetch', 'origin']) subprocess.call(['git', 'pull']) - subprocess.call(['mkdir', 'tmp']) + try: + subprocess.check_call(['mkdir', 'tmp']) + except subprocess.CalledProcessError, e: + pass subprocess.call(['touch', 'tmp/restart.txt']) + return "Please restart." @application.route("/big_update")
12a61da411134d2fc02e91d41b6687de8763a374
modules/pipetruncate.py
modules/pipetruncate.py
# pipetruncate.py # from pipe2py import util def pipe_truncate(context, _INPUT, conf, **kwargs): """This operator truncates the number of items in a feed. Keyword arguments: context -- pipeline context _INPUT -- source generator kwargs -- terminal, if the truncation value is wired in conf: count -- length of the truncated feed, if specified literally Yields (_OUTPUT): truncated list of source items """ count = conf['count'] limit = int(util.get_value(count, None, **kwargs)) for i in xrange(0, limit): yield _INPUT.next()
# pipetruncate.py # from pipe2py import util def pipe_truncate(context, _INPUT, conf, **kwargs): """This operator truncates the number of items in a feed. Keyword arguments: context -- pipeline context _INPUT -- source generator kwargs -- terminal, if the truncation value is wired in conf: count -- length of the truncated feed, if specified literally Yields (_OUTPUT): truncated list of source items """ count = conf['count'] limit = int(util.get_value(count, None, **kwargs)) i = 0 for item in _INPUT: if i >= limit: break yield item i += 1
Fix for taking feed from a split output
Fix for taking feed from a split output
Python
mit
nerevu/riko,nerevu/riko
--- +++ @@ -19,6 +19,9 @@ count = conf['count'] limit = int(util.get_value(count, None, **kwargs)) - for i in xrange(0, limit): - yield _INPUT.next() - + i = 0 + for item in _INPUT: + if i >= limit: + break + yield item + i += 1
cad0f1ebeeaac1af296930bc98cf892395293112
grako/rendering.py
grako/rendering.py
# -*- coding: utf-8 -*- """ The Renderer class provides the infrastructure for generating template-based code. It's used by the .grammars module for parser generation. """ from __future__ import print_function, division, absolute_import, unicode_literals import itertools from .util import trim def render(item, **fields): """ Render the given item """ if item is None: return '' elif isinstance(item, Renderer): return item.render(**fields) elif isinstance(item, list): return ''.join(render(e) for e in item) else: return str(item) class Renderer(object): template = '' _counter = itertools.count() def __init__(self, template=None): if template is not None: self.template = template def counter(self): return next(self._counter) def render_fields(self, fields): pass def render(self, template=None, **kwargs): fields = ({k:v for k, v in vars(self).items() if not k.startswith('_')}) override = self.render_fields(fields) if template is None: if override is not None: template = override else: template = self.template fields.update(kwargs) fields = {k:render(v) for k, v in fields.items()} try: return trim(template).format(**fields) except KeyError as e: raise KeyError(str(e), type(self))
# -*- coding: utf-8 -*- """ The Renderer class provides the infrastructure for generating template-based code. It's used by the .grammars module for parser generation. """ from __future__ import print_function, division, absolute_import, unicode_literals import itertools from .util import trim def render(item, **fields): """ Render the given item """ if item is None: return '' elif isinstance(item, Renderer): return item.render(**fields) elif isinstance(item, list): return ''.join(render(e) for e in item) else: return str(item) class Renderer(object): template = '' _counter = itertools.count() def __init__(self, template=None): if template is not None: self.template = template def counter(self): return next(self._counter) def render_fields(self, fields): pass def render(self, template=None, **fields): fields = ({k:v for k, v in vars(self).items() if not k.startswith('_')}) override = self.render_fields(fields) if template is None: if override is not None: template = override else: template = self.template fields.update(fields) fields = {k:render(v) for k, v in fields.items()} try: return trim(template).format(**fields) except KeyError as e: raise KeyError(str(e), type(self))
Use 'fields' instead of 'kwargs' to document intent.
Use 'fields' instead of 'kwargs' to document intent.
Python
bsd-2-clause
vmuriart/grako,frnknglrt/grako
--- +++ @@ -34,7 +34,7 @@ def render_fields(self, fields): pass - def render(self, template=None, **kwargs): + def render(self, template=None, **fields): fields = ({k:v for k, v in vars(self).items() if not k.startswith('_')}) override = self.render_fields(fields) @@ -44,7 +44,7 @@ else: template = self.template - fields.update(kwargs) + fields.update(fields) fields = {k:render(v) for k, v in fields.items()} try: return trim(template).format(**fields)
c617083fa413a0d45ff26c96751210901dfad7cf
cab/urls/search.py
cab/urls/search.py
from django.conf.urls import url from haystack.views import SearchView, search_view_factory from ..forms import AdvancedSearchForm search_view = search_view_factory(view_class=SearchView, template='search/advanced_search.html', form_class=AdvancedSearchForm) urlpatterns = [ url(r'^$', 'haystack.views.basic_search', name='cab_search'), url(r'^autocomplete/$', 'cab.views.snippets.autocomplete', name='snippet_autocomplete'), url(r'^advanced/$', search_view, name='cab_search_advanced'), ]
from django.conf.urls import url from haystack.views import SearchView, basic_search, search_view_factory from ..forms import AdvancedSearchForm from ..views.snippets import autocomplete search_view = search_view_factory(view_class=SearchView, template='search/advanced_search.html', form_class=AdvancedSearchForm) urlpatterns = [ url(r'^$', basic_search, name='cab_search'), url(r'^autocomplete/$', autocomplete, name='snippet_autocomplete'), url(r'^advanced/$', search_view, name='cab_search_advanced'), ]
Remove string views in urlpatterns
Remove string views in urlpatterns
Python
bsd-3-clause
django/djangosnippets.org,django/djangosnippets.org,django/djangosnippets.org,django-de/djangosnippets.org,django-de/djangosnippets.org,django/djangosnippets.org,django-de/djangosnippets.org,django-de/djangosnippets.org,django/djangosnippets.org
--- +++ @@ -1,20 +1,15 @@ from django.conf.urls import url -from haystack.views import SearchView, search_view_factory +from haystack.views import SearchView, basic_search, search_view_factory from ..forms import AdvancedSearchForm +from ..views.snippets import autocomplete search_view = search_view_factory(view_class=SearchView, template='search/advanced_search.html', form_class=AdvancedSearchForm) urlpatterns = [ - url(r'^$', - 'haystack.views.basic_search', - name='cab_search'), - url(r'^autocomplete/$', - 'cab.views.snippets.autocomplete', - name='snippet_autocomplete'), - url(r'^advanced/$', - search_view, - name='cab_search_advanced'), + url(r'^$', basic_search, name='cab_search'), + url(r'^autocomplete/$', autocomplete, name='snippet_autocomplete'), + url(r'^advanced/$', search_view, name='cab_search_advanced'), ]
ce939b6f03260a57268a8371a2e05e531b36bce2
hoomd/typeparam.py
hoomd/typeparam.py
from hoomd.parameterdicts import AttachedTypeParameterDict class TypeParameter: def __init__(self, name, type_kind, param_dict): self.name = name self.type_kind = type_kind self.param_dict = param_dict def __getitem__(self, key): return self.param_dict[key] def __setitem__(self, key, value): self.param_dict[key] = value @property def default(self): return self.param_dict.default @default.setter def default(self, value): self.param_dict.default = value def attach(self, cpp_obj, sim): self.param_dict = AttachedTypeParameterDict(cpp_obj, self.name, self.type_kind, self.param_dict, sim) return self def detach(self): self.param_dict = self.param_dict.to_dettached() return self def to_dict(self): return self.param_dict.to_dict() def keys(self): yield from self.param_dict.keys() @property def state(self): state = self.to_dict() if self.param_dict._len_keys > 1: state = {str(key): value for key, value in state.items()} state['__default'] = self.default return state
from hoomd.parameterdicts import AttachedTypeParameterDict class TypeParameter: def __init__(self, name, type_kind, param_dict): self.name = name self.type_kind = type_kind self.param_dict = param_dict def __getattr__(self, attr): try: return getattr(self.param_dict, attr) except AttributeError: raise AttributeError("'{}' object has no attribute " "'{}'".format(type(self), attr)) def __getitem__(self, key): return self.param_dict[key] def __setitem__(self, key, value): self.param_dict[key] = value @property def default(self): return self.param_dict.default @default.setter def default(self, value): self.param_dict.default = value def attach(self, cpp_obj, sim): self.param_dict = AttachedTypeParameterDict(cpp_obj, self.name, self.type_kind, self.param_dict, sim) return self def detach(self): self.param_dict = self.param_dict.to_dettached() return self def to_dict(self): return self.param_dict.to_dict() def keys(self): yield from self.param_dict.keys() @property def state(self): state = self.to_dict() if self.param_dict._len_keys > 1: state = {str(key): value for key, value in state.items()} state['__default'] = self.default return state
Allow TypeParameters to 'grap' attr from param_dict
Allow TypeParameters to 'grap' attr from param_dict
Python
bsd-3-clause
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
--- +++ @@ -6,6 +6,13 @@ self.name = name self.type_kind = type_kind self.param_dict = param_dict + + def __getattr__(self, attr): + try: + return getattr(self.param_dict, attr) + except AttributeError: + raise AttributeError("'{}' object has no attribute " + "'{}'".format(type(self), attr)) def __getitem__(self, key): return self.param_dict[key]
6d848c4b86913d71b986ef032348a8fa8720cfc7
src/idea/utility/state_helper.py
src/idea/utility/state_helper.py
from idea.models import State def get_first_state(): """ Get the first state for an idea. """ return State.objects.get(previous__isnull=True)
from idea.models import State def get_first_state(): """ Get the first state for an idea. """ #return State.objects.get(previous__isnull=True) # previous__isnull breaks functionality if someone creates a new state # without a previous state set. since we know the initial state # is id=1 per fixtures/state.json, use that instead. return State.objects.get(id=1)
Fix add_idea when multiple States have no previous
Fix add_idea when multiple States have no previous
Python
cc0-1.0
CapeSepias/idea-box,m3brown/idea-box,geomapdev/idea-box,cmc333333/idea-box,18F/idea-box,CapeSepias/idea-box,18F/idea-box,geomapdev/idea-box,18F/idea-box,cmc333333/idea-box,CapeSepias/idea-box,m3brown/idea-box,cmc333333/idea-box,geomapdev/idea-box
--- +++ @@ -2,4 +2,8 @@ def get_first_state(): """ Get the first state for an idea. """ - return State.objects.get(previous__isnull=True) + #return State.objects.get(previous__isnull=True) + # previous__isnull breaks functionality if someone creates a new state + # without a previous state set. since we know the initial state + # is id=1 per fixtures/state.json, use that instead. + return State.objects.get(id=1)
a8d46d56890948b36726432fbcaa3bc038a0095f
txircd/modules/extra/stripcolors.py
txircd/modules/extra/stripcolors.py
from twisted.plugin import IPlugin from txircd.module_interface import IMode, IModuleData, Mode, ModuleData from txircd.utils import ModeType, stripFormatting from zope.interface import implements class StripColors(ModuleData, Mode): implements(IPlugin, IModuleData, IMode) name = "StripColors" affectedActions = { "commandmodify-PRIVMSG": 10, "commandmodify-NOTICE": 10 } def channelModes(self): return [ ("S", ModeType.NoParam, self) ] def actions(self): return [ ("modeactioncheck-channel-S-commandmodify-PRIVMSG", 10, self.channelHasMode), ("modeactioncheck-channel-S-commandmodify-NOTICE", 10, self.channelHasMode) ] def channelHasMode(self, channel, user, data): if "S" in channel.modes: return "" return None def apply(self, actionName, channel, param, user, data): if channel in data["targetchans"] not self.ircd.runActionUntilValue("checkexemptchanops", "stripcolor", channel, user): message = data["targetchans"][channel] data["targetchans"][channel] = stripFormatting(message) stripColors = StripColors()
from twisted.plugin import IPlugin from txircd.module_interface import IMode, IModuleData, Mode, ModuleData from txircd.utils import ModeType, stripFormatting from zope.interface import implements class StripColors(ModuleData, Mode): implements(IPlugin, IModuleData, IMode) name = "StripColors" affectedActions = { "commandmodify-PRIVMSG": 10, "commandmodify-NOTICE": 10 } def channelModes(self): return [ ("S", ModeType.NoParam, self) ] def actions(self): return [ ("modeactioncheck-channel-S-commandmodify-PRIVMSG", 10, self.channelHasMode), ("modeactioncheck-channel-S-commandmodify-NOTICE", 10, self.channelHasMode) ] def channelHasMode(self, channel, user, data): if "S" in channel.modes: return "" return None def apply(self, actionName, channel, param, user, data): if channel in data["targetchans"] and not self.ircd.runActionUntilValue("checkexemptchanops", "stripcolor", channel, user): message = data["targetchans"][channel] data["targetchans"][channel] = stripFormatting(message) stripColors = StripColors()
Fix missing comparison operator in StripColors
Fix missing comparison operator in StripColors
Python
bsd-3-clause
ElementalAlchemist/txircd,Heufneutje/txircd
--- +++ @@ -25,7 +25,7 @@ return None def apply(self, actionName, channel, param, user, data): - if channel in data["targetchans"] not self.ircd.runActionUntilValue("checkexemptchanops", "stripcolor", channel, user): + if channel in data["targetchans"] and not self.ircd.runActionUntilValue("checkexemptchanops", "stripcolor", channel, user): message = data["targetchans"][channel] data["targetchans"][channel] = stripFormatting(message)
efc792de5225f3a21fab6d3f299fea07c63f6d0d
incident/models.py
incident/models.py
from django.db import models from django.utils.translation import ugettext_lazy as _ class Issue(models.Model): title = models.CharField(_('Title'), max_length=150) description = models.TextField(_('Description')) contract = models.ForeignKey('structure.Contract') assigned_team = models.ForeignKey('structure.Team') assigned_user = models.ForeignKey('structure.User', null=True)
from django.db import models from django.utils.translation import ugettext_lazy as _ class Issue(models.Model): title = models.CharField(_('Title'), max_length=150) description = models.TextField(_('Description')) contract = models.ForeignKey('structure.Contract') assigned_team = models.ForeignKey('structure.Team') assigned_user = models.ForeignKey('structure.User', null=True) created_at = models.DateTimeField(auto_now_add=True)
Add created_at to Issue model
Add created_at to Issue model
Python
bsd-3-clause
RocknRoot/LIIT
--- +++ @@ -7,3 +7,4 @@ contract = models.ForeignKey('structure.Contract') assigned_team = models.ForeignKey('structure.Team') assigned_user = models.ForeignKey('structure.User', null=True) + created_at = models.DateTimeField(auto_now_add=True)
355a3a34b9a264734c1f5f2ec365a5873f000b77
open_skin_as_project.py
open_skin_as_project.py
import os import subprocess import sublime import sublime_plugin from .path.skin_path_provider import get_cached_skin_path class RainmeterOpenSkinAsProjectCommand(sublime_plugin.ApplicationCommand): def run(self): skins_path = get_cached_skin_path() skins = os.listdir(skins_path) sublime.active_window().show_quick_panel(skins, self.on_skin_selected, 0, 0, None) def on_skin_selected(self, selected_skin_id): skins_path = get_cached_skin_path() skins = os.listdir(skins_path) selected_skin = skins[selected_skin_id] selected_skin_path = os.path.join(skins_path, selected_skin) # to open a folder in new window, just create a new process with the folder as argument st_path = sublime.executable_path() subprocess.Popen([ st_path, selected_skin_path ])
import os import subprocess import sublime import sublime_plugin from .path.skin_path_provider import get_cached_skin_path class RainmeterOpenSkinAsProjectCommand(sublime_plugin.ApplicationCommand): def run(self): skins_path = get_cached_skin_path() skins = os.listdir(skins_path) sublime.active_window().show_quick_panel(skins, self.on_skin_selected, 0, 0, None) def on_skin_selected(self, selected_skin_id): if selected_skin_id == -1: return skins_path = get_cached_skin_path() skins = os.listdir(skins_path) selected_skin = skins[selected_skin_id] selected_skin_path = os.path.join(skins_path, selected_skin) # to open a folder in new window, just create a new process with the folder as argument st_path = sublime.executable_path() subprocess.Popen([ st_path, selected_skin_path ])
Handle in case user cancels open skin as project command
Handle in case user cancels open skin as project command
Python
mit
thatsIch/sublime-rainmeter
--- +++ @@ -16,6 +16,9 @@ sublime.active_window().show_quick_panel(skins, self.on_skin_selected, 0, 0, None) def on_skin_selected(self, selected_skin_id): + if selected_skin_id == -1: + return + skins_path = get_cached_skin_path() skins = os.listdir(skins_path) selected_skin = skins[selected_skin_id]
fe217ed9436754d6f9cf01a0052091d095a6e99c
web.py
web.py
from flask import Flask, jsonify, render_template import test, stats, os app = Flask(__name__) cache = {} @app.route('/') def hello_world(): return 'Hello World!' @app.route('/r/<string:subreddit>') def episodes(subreddit): seasonsAndEpisodes = _getEpisodes(subreddit) return render_template('index.html', result=seasonsAndEpisodes, subreddit=subreddit) @app.route('/api/r/<string:subreddit>', methods=['GET']) def get_episodes(subreddit): seasonsAndEpisodes = _getEpisodes(subreddit) return jsonify([season.serialize() for season in seasonsAndEpisodes]) def _getEpisodes(subreddit): if subreddit in cache: return cache[subreddit] episodes = test.getData(subreddit) seasonsAndEpisodes = stats.extractSeasonsAndEpisodes(episodes) cache[subreddit] = seasonsAndEpisodes return seasonsAndEpisodes if __name__ == '__main__': port = int(os.environ.get('PORT', 33507)) app.run(debug=True, host='0.0.0.0', port=port)
from flask import Flask, jsonify, render_template import test, stats, os app = Flask(__name__) cache = {} @app.route('/') def hello_world(): return 'Hello World!' @app.route('/r/<string:subreddit>') def episodes(subreddit): seasonsAndEpisodes = _getEpisodes(subreddit) return render_template('index.html', result=seasonsAndEpisodes, subreddit=subreddit) @app.route('/api/r/<string:subreddit>', methods=['GET']) def get_episodes(subreddit): seasonsAndEpisodes = _getEpisodes(subreddit) seasons = [season.serialize() for season in seasonsAndEpisodes] result = {"seasons": seasons, "subreddit": subreddit} return jsonify(result) def _getEpisodes(subreddit): if subreddit in cache: return cache[subreddit] episodes = test.getData(subreddit) seasonsAndEpisodes = stats.extractSeasonsAndEpisodes(episodes) cache[subreddit] = seasonsAndEpisodes return seasonsAndEpisodes if __name__ == '__main__': port = int(os.environ.get('PORT', 33507)) app.run(debug=True, host='0.0.0.0', port=port)
Put seasons and subreddit into a response object for the api
Put seasons and subreddit into a response object for the api
Python
apache-2.0
chasedog/EpisodeDiscussions,chasedog/EpisodeDiscussions
--- +++ @@ -16,7 +16,9 @@ @app.route('/api/r/<string:subreddit>', methods=['GET']) def get_episodes(subreddit): seasonsAndEpisodes = _getEpisodes(subreddit) - return jsonify([season.serialize() for season in seasonsAndEpisodes]) + seasons = [season.serialize() for season in seasonsAndEpisodes] + result = {"seasons": seasons, "subreddit": subreddit} + return jsonify(result) def _getEpisodes(subreddit): if subreddit in cache:
a18e6aa3647779de3963e6781afaeea3732d100e
similarities.py
similarities.py
#!/usr/bin/env python import argparse import sys from gensim.models.word2vec import Word2Vec import csv from signal import signal, SIGINT signal(SIGINT, lambda signum, frame: sys.exit(1)) parser = argparse.ArgumentParser() parser.add_argument('--sim', type=float, default=.3) parser.add_argument('w2v', type=argparse.FileType('rb')) args = parser.parse_args() w2v = Word2Vec.load_word2vec_format(args.w2v, binary=True, unicode_errors='ignore') w2v.init_sims(replace=True) print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr) reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: word1, word2 = row[0], row[1] try: similarity = w2v.similarity(word1, word2) if similarity < 0: similarity = args.sim except KeyError: similarity = args.sim print('%s\t%s\t%f' % (word1, word2, similarity))
#!/usr/bin/env python import argparse import sys import csv from gensim.models import KeyedVectors from signal import signal, SIGINT signal(SIGINT, lambda signum, frame: sys.exit(1)) parser = argparse.ArgumentParser() parser.add_argument('--sim', type=float, default=.3) parser.add_argument('w2v', type=argparse.FileType('rb')) args = parser.parse_args() w2v = KeyedVectors.load_word2vec_format(args.w2v, binary=True, unicode_errors='ignore') w2v.init_sims(replace=True) print('Using %d word2vec dimensions from "%s".' % (w2v.vector_size, sys.argv[1]), file=sys.stderr) reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: word1, word2 = row[0], row[1] try: similarity = w2v.similarity(word1, word2) if similarity < 0: similarity = args.sim except KeyError: similarity = args.sim print('%s\t%s\t%f' % (word1, word2, similarity))
Update the Gensim API usage
Update the Gensim API usage
Python
mit
dustalov/watset,dustalov/watset
--- +++ @@ -2,8 +2,8 @@ import argparse import sys -from gensim.models.word2vec import Word2Vec import csv +from gensim.models import KeyedVectors from signal import signal, SIGINT signal(SIGINT, lambda signum, frame: sys.exit(1)) @@ -13,9 +13,9 @@ parser.add_argument('w2v', type=argparse.FileType('rb')) args = parser.parse_args() -w2v = Word2Vec.load_word2vec_format(args.w2v, binary=True, unicode_errors='ignore') +w2v = KeyedVectors.load_word2vec_format(args.w2v, binary=True, unicode_errors='ignore') w2v.init_sims(replace=True) -print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr) +print('Using %d word2vec dimensions from "%s".' % (w2v.vector_size, sys.argv[1]), file=sys.stderr) reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
935e16c55ac9dd59b1025c5b124ca0d15eaa02f7
run-lala.py
run-lala.py
#!/usr/bin/python2 import lala import ConfigParser import sys def main(): """Main method""" config = ConfigParser.SafeConfigParser() config.read("config") lalaconfig = config._sections["lala"] if "-d" in sys.argv: debug = True else: debug = False if "nickserv_password" in lalaconfig: nickserv_password = lalaconfig["nickserv_password"] else: nick nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\ in lalaconfig else None plugins = lalaconfig["plugins"].split(",") bot = lala.Bot( server=lalaconfig["server"], admin=lalaconfig["admin"], port=int(lalaconfig["port"]), nick=lalaconfig["nick"], channel=lalaconfig["channel"], debug=debug, plugins=plugins, nickserv = nickserv_password ) #try: bot.mainloop() #except RuntimeError, e: #print e if __name__ == '__main__': main()
#!/usr/bin/python2 import lala import ConfigParser import sys def main(): """Main method""" config = ConfigParser.SafeConfigParser() config.read("config") lalaconfig = config._sections["lala"] if "-d" in sys.argv: debug = True else: debug = False nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\ in lalaconfig else None plugins = lalaconfig["plugins"].split(",") bot = lala.Bot( server=lalaconfig["server"], admin=lalaconfig["admin"], port=int(lalaconfig["port"]), nick=lalaconfig["nick"], channel=lalaconfig["channel"], debug=debug, plugins=plugins, nickserv = nickserv_password ) #try: bot.mainloop() #except RuntimeError, e: #print e if __name__ == '__main__': main()
Remove that ... awkward... code
Remove that ... awkward... code
Python
mit
mineo/lala,mineo/lala
--- +++ @@ -12,10 +12,6 @@ debug = True else: debug = False - if "nickserv_password" in lalaconfig: - nickserv_password = lalaconfig["nickserv_password"] - else: - nick nickserv_password = lalaconfig["nickserv_password"] if "nickserv_password"\ in lalaconfig else None plugins = lalaconfig["plugins"].split(",")
795b661962915221ddab186b66523896316b2a79
jobs/jobs/items.py
jobs/jobs/items.py
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html import scrapy def single_item_serializer(value): # values are nested inside a list: (u'Viltu vaxa me\xf0 Alvogen?',) # so need to return just the fist value when serializing return value[0] class JobsItem(scrapy.Item): title = scrapy.Field(serializer=single_item_serializer) company = scrapy.Field(serializer=single_item_serializer) url = scrapy.Field(serializer=single_item_serializer) posted = scrapy.Field(serializer=single_item_serializer) deadline = scrapy.Field(serializer=single_item_serializer) views = scrapy.Field(serializer=int)
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html import scrapy def single_item_serializer(value): # values are sometimes nested inside a list: (u'Viltu vaxa me\xf0 Alvogen?',) # so need to return just the fist value when serializing if isinstance(value, (list, tuple)): return value[0] return value class JobsItem(scrapy.Item): title = scrapy.Field(serializer=single_item_serializer) company = scrapy.Field(serializer=single_item_serializer) url = scrapy.Field(serializer=single_item_serializer) posted = scrapy.Field(serializer=single_item_serializer) deadline = scrapy.Field(serializer=single_item_serializer) views = scrapy.Field(serializer=int)
Fix rendering of alfred.is data to json file
Fix rendering of alfred.is data to json file
Python
apache-2.0
multiplechoice/workplace
--- +++ @@ -9,9 +9,11 @@ def single_item_serializer(value): - # values are nested inside a list: (u'Viltu vaxa me\xf0 Alvogen?',) + # values are sometimes nested inside a list: (u'Viltu vaxa me\xf0 Alvogen?',) # so need to return just the fist value when serializing - return value[0] + if isinstance(value, (list, tuple)): + return value[0] + return value class JobsItem(scrapy.Item):
06ee9c73fed7a9b8488d800859f65c28ad63eb57
angr/extern_obj.py
angr/extern_obj.py
from cle.absobj import AbsObj class AngrExternObject(AbsObj): def __init__(self, alloc_size=0x1000): super(AngrExternObject, self).__init__('##angr_externs##') self._next_addr = 0 self._lookup_table = {} self._alloc_size = alloc_size self.memory = 'please never look at this' def get_max_addr(self): return self._alloc_size def get_min_addr(self): return 0 def get_pseudo_addr(self, ident): if ident not in self._lookup_table: self._lookup_table[ident] = self._next_addr self._next_addr += 16 return self._lookup_table[ident] + self.rebase_addr
from cle.absobj import AbsObj class AngrExternObject(AbsObj): def __init__(self, alloc_size=0x1000): super(AngrExternObject, self).__init__('##angr_externs##') self._next_addr = 0 self._lookup_table = {} self._alloc_size = alloc_size self.memory = 'please never look at this' def get_max_addr(self): return self._alloc_size + self.rebase_addr def get_min_addr(self): return self.rebase_addr def get_pseudo_addr(self, ident): if ident not in self._lookup_table: self._lookup_table[ident] = self._next_addr self._next_addr += 16 return self._lookup_table[ident] + self.rebase_addr
Make the extrn object use the convention for rebased addresses
Make the extrn object use the convention for rebased addresses
Python
bsd-2-clause
angr/angr,xurantju/angr,zhuyue1314/angr,haylesr/angr,axt/angr,tyb0807/angr,terry2012/angr,avain/angr,xurantju/angr,lowks/angr,iamahuman/angr,iamahuman/angr,tyb0807/angr,iamahuman/angr,angr/angr,haylesr/angr,avain/angr,terry2012/angr,chubbymaggie/angr,cureHsu/angr,f-prettyland/angr,tyb0807/angr,schieb/angr,GuardianRG/angr,axt/angr,schieb/angr,chubbymaggie/angr,f-prettyland/angr,fjferrer/angr,zhuyue1314/angr,schieb/angr,mingderwang/angr,mingderwang/angr,chubbymaggie/angr,fjferrer/angr,f-prettyland/angr,lowks/angr,cureHsu/angr,angr/angr,axt/angr,GuardianRG/angr
--- +++ @@ -9,10 +9,10 @@ self.memory = 'please never look at this' def get_max_addr(self): - return self._alloc_size + return self._alloc_size + self.rebase_addr def get_min_addr(self): - return 0 + return self.rebase_addr def get_pseudo_addr(self, ident): if ident not in self._lookup_table:
20c78cc7c4e0658be01df0ba92838b605bcafdc4
pyreststore/pyreststore/__init__.py
pyreststore/pyreststore/__init__.py
# -*- coding: utf-8; mode: Python; -*- from __future__ import unicode_literals __title__ = 'Python implementation of a REST based storage' # Please use PEP 0440 version strings. # https://www.python.org/dev/peps/pep-0440/ __version__ = '0.9.0' __author__ = 'Peter Dahl Vestergaard' __license__ = 'BSD 3-Clause' __copyright__ = 'Copyright 2015 Peter Dahl Vestergaard' # Version synonym VERSION = __version__
# -*- coding: utf-8; mode: Python; -*- from __future__ import unicode_literals __title__ = 'Python implementation of a REST based storage' # Please use PEP 0440 version strings. # https://www.python.org/dev/peps/pep-0440/ __version__ = '0.9.1.dev1' __author__ = 'Peter Dahl Vestergaard' __license__ = 'BSD 3-Clause' __copyright__ = 'Copyright 2015 Peter Dahl Vestergaard' # Version synonym VERSION = __version__
Bump version to development version
Bump version to development version
Python
bsd-3-clause
peterdv/pyreststore
--- +++ @@ -5,7 +5,7 @@ # Please use PEP 0440 version strings. # https://www.python.org/dev/peps/pep-0440/ -__version__ = '0.9.0' +__version__ = '0.9.1.dev1' __author__ = 'Peter Dahl Vestergaard' __license__ = 'BSD 3-Clause' __copyright__ = 'Copyright 2015 Peter Dahl Vestergaard'
726ebd81a592361d3e2f599d009be6d1f5a81b2c
project/utils/logger.py
project/utils/logger.py
# -*- coding: utf-8 -*- import datetime import logging import os import hashlib from utils.settings_handler import settings def set_up_logging(): """ Logger for tenhou communication and AI output """ logs_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'logs') if not os.path.exists(logs_directory): os.mkdir(logs_directory) # we shouldn't be afraid about collision # also, we need it to distinguish different bots logs (if they were run in the same time) name_hash = hashlib.sha1(settings.USER_ID.encode('utf-8')).hexdigest()[:5] logger = logging.getLogger('tenhou') logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) file_name = '{}_{}.log'.format(name_hash, datetime.datetime.now().strftime('%Y-%m-%d %H_%M_%S')) fh = logging.FileHandler(os.path.join(logs_directory, file_name)) fh.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') ch.setFormatter(formatter) fh.setFormatter(formatter) logger.addHandler(ch) logger.addHandler(fh) logger = logging.getLogger('ai') logger.setLevel(logging.DEBUG) logger.addHandler(ch) logger.addHandler(fh)
# -*- coding: utf-8 -*- import datetime import logging import os import hashlib from utils.settings_handler import settings def set_up_logging(): """ Logger for tenhou communication and AI output """ logs_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'logs') if not os.path.exists(logs_directory): os.mkdir(logs_directory) # we shouldn't be afraid about collision # also, we need it to distinguish different bots logs (if they were run in the same time) name_hash = hashlib.sha1(settings.USER_ID.encode('utf-8')).hexdigest()[:5] logger = logging.getLogger('tenhou') logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) file_name = '{}_{}.log'.format(name_hash, datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%S')) fh = logging.FileHandler(os.path.join(logs_directory, file_name)) fh.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') ch.setFormatter(formatter) fh.setFormatter(formatter) logger.addHandler(ch) logger.addHandler(fh) logger = logging.getLogger('ai') logger.setLevel(logging.DEBUG) logger.addHandler(ch) logger.addHandler(fh)
Remove " " from log name
Remove " " from log name
Python
mit
MahjongRepository/tenhou-python-bot,MahjongRepository/tenhou-python-bot
--- +++ @@ -26,7 +26,7 @@ ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) - file_name = '{}_{}.log'.format(name_hash, datetime.datetime.now().strftime('%Y-%m-%d %H_%M_%S')) + file_name = '{}_{}.log'.format(name_hash, datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%S')) fh = logging.FileHandler(os.path.join(logs_directory, file_name)) fh.setLevel(logging.DEBUG)
a339ee67b9a2f1effcedf836f26657f628a842ee
employees/admin.py
employees/admin.py
from django.contrib import admin from .models import Employee, Role class RoleAdmin(admin.ModelAdmin): list_display = ("name",) class EmployeeAdmin(admin.ModelAdmin): list_display = ("username", "first_name", "last_name", "email", 'level', 'score',) fieldsets = ( (None, {'fields': ('username', 'email', 'password')}), ('Personal info', {'fields': ('first_name', 'last_name', 'role', 'skype_id', 'avatar', 'categories')}), ('Personal score', {'fields': ('last_month_score', 'current_month_score', 'level', 'score')}), ('Permissions', {'fields': ('groups', 'user_permissions', 'is_superuser', 'is_staff', 'is_active',)}), ('History', {'fields': ('date_joined', 'last_login')}) ) admin.site.register(Employee, EmployeeAdmin) admin.site.register(Role, RoleAdmin)
from django import forms from django.contrib import admin from .models import Employee, Role class UserCreationForm(forms.ModelForm): class Meta: model = Employee fields = ('username', 'password',) def save(self, commit=True): user = super(UserCreationForm, self).save(commit=False) user.set_password(self.cleaned_data['password']) if commit: user.save() return user class RoleAdmin(admin.ModelAdmin): list_display = ("name",) class EmployeeAdmin(admin.ModelAdmin): form = UserCreationForm list_display = ("username", "first_name", "last_name", "email", 'level', 'score',) fieldsets = ( (None, {'fields': ('username', 'email', 'password')}), ('Personal info', {'fields': ('first_name', 'last_name', 'role', 'skype_id', 'avatar', 'categories')}), ('Personal score', {'fields': ('last_month_score', 'current_month_score', 'level', 'score')}), ('Permissions', {'fields': ('groups', 'user_permissions', 'is_superuser', 'is_staff', 'is_active',)}), ('History', {'fields': ('date_joined', 'last_login')}) ) admin.site.register(Employee, EmployeeAdmin) admin.site.register(Role, RoleAdmin)
Fix employee form error about password hashing
Fix employee form error about password hashing
Python
apache-2.0
belatrix/BackendAllStars
--- +++ @@ -1,12 +1,26 @@ +from django import forms from django.contrib import admin from .models import Employee, Role + +class UserCreationForm(forms.ModelForm): + class Meta: + model = Employee + fields = ('username', 'password',) + + def save(self, commit=True): + user = super(UserCreationForm, self).save(commit=False) + user.set_password(self.cleaned_data['password']) + if commit: + user.save() + return user class RoleAdmin(admin.ModelAdmin): list_display = ("name",) class EmployeeAdmin(admin.ModelAdmin): + form = UserCreationForm list_display = ("username", "first_name", "last_name", "email", 'level', 'score',) fieldsets = ( (None, {'fields': ('username', 'email', 'password')}),
ebf3f4a73aaab3aa2ea3e760bb44cfcbd7ca8d7f
src/main.py
src/main.py
#!/usr/bin/python #coding=utf8 from Tkinter import * import sys from ui import Gui if __name__ == '__main__': Root = Tk() App = Gui(Root) App.pack(expand='yes',fill='both') Root.geometry('320x240+10+10') Root.title('Mid!Magic') Root.mainloop()
#!/usr/bin/python #coding=utf8 from Tkinter import * import sys from ui import Gui if __name__ == '__main__': Root = Tk() App = Gui(Root) App.pack(expand='yes',fill='both') w, h = root.winfo_screenwidth(), root.winfo_screenheight() Root.overrideredirect(1) Root.geometry("%dx%d+0+0" % (w, h)) Root.focus_set() # <-- move focus to this widget Root.bind("<Escape>", lambda e: e.widget.quit()) Root.title('Mid!Magic') Root.mainloop()
Add support for full screen.
Add support for full screen.
Python
apache-2.0
doino-gretchenliev/Mid-Magic,doino-gretchenliev/Mid-Magic
--- +++ @@ -9,6 +9,10 @@ Root = Tk() App = Gui(Root) App.pack(expand='yes',fill='both') - Root.geometry('320x240+10+10') + w, h = root.winfo_screenwidth(), root.winfo_screenheight() + Root.overrideredirect(1) + Root.geometry("%dx%d+0+0" % (w, h)) + Root.focus_set() # <-- move focus to this widget + Root.bind("<Escape>", lambda e: e.widget.quit()) Root.title('Mid!Magic') Root.mainloop()
d1d53bf2c2e719f2ff90d4260bb2521d0ee4af01
main.py
main.py
import argparse from config import app_config as cfg from libraries.database_init import DataBase from libraries.tweetimporter import TweetImporter from libraries.twitterclient import TwitterClient # Twitter API configuration consumer_key = cfg.twitter["consumer_key"] consumer_secret = cfg.twitter["consumer_secret"] access_token = cfg.twitter["access_token"] access_token_secret = cfg.twitter["access_token_secret"] # Command line options parser = argparse.ArgumentParser(description='provide additional information to run the bot detection') parser.add_argument('specified_user', action="store", help='load tweets from the specified user') parser.add_argument('--followers', action="store_true", help="load tweets from user's followers") args = parser.parse_args() # Start database = DataBase(cfg.database["name"], cfg.database["tweet_table"]) client = TwitterClient(consumer_key, consumer_secret, access_token, access_token_secret) importer = TweetImporter(client, database) database.create_table() importer.fromUser(args.specified_user, 20) if args.followers: importer.fromFollowers(args.specified_user, 20)
import argparse from config import app_config as cfg from libraries.database_init import DataBase from libraries.tweetimporter import TweetImporter from libraries.twitterclient import TwitterClient # Twitter API configuration consumer_key = cfg.twitter["consumer_key"] consumer_secret = cfg.twitter["consumer_secret"] access_token = cfg.twitter["access_token"] access_token_secret = cfg.twitter["access_token_secret"] # Command line options parser = argparse.ArgumentParser(description='provide additional information to run the bot detection') parser.add_argument('specified_user', action="store", help='load tweets from the specified user') parser.add_argument('--followers', action="store_true", help="load tweets from user's followers") parser.add_argument('--create-db', action="store_true", help='create or drop the database if it already exists') args = parser.parse_args() # Start database = DataBase(cfg.database["name"], cfg.database["tweet_table"]) client = TwitterClient(consumer_key, consumer_secret, access_token, access_token_secret) importer = TweetImporter(client, database) if args.create_db: database.create_table() importer.fromUser(args.specified_user, 20) if args.followers: importer.fromFollowers(args.specified_user, 20)
Add option to create/drop the database
Add option to create/drop the database
Python
mit
franckbrignoli/twitter-bot-detection
--- +++ @@ -17,6 +17,7 @@ parser = argparse.ArgumentParser(description='provide additional information to run the bot detection') parser.add_argument('specified_user', action="store", help='load tweets from the specified user') parser.add_argument('--followers', action="store_true", help="load tweets from user's followers") +parser.add_argument('--create-db', action="store_true", help='create or drop the database if it already exists') args = parser.parse_args() @@ -25,7 +26,8 @@ client = TwitterClient(consumer_key, consumer_secret, access_token, access_token_secret) importer = TweetImporter(client, database) -database.create_table() +if args.create_db: + database.create_table() importer.fromUser(args.specified_user, 20) if args.followers:
2b0bcbb7ce82171965b22cf657439d6263fa9d91
geojson_scraper.py
geojson_scraper.py
import json import os import urllib.request from retry import retry from urllib.error import HTTPError from common import store_history, truncate, summarise # hack to override sqlite database filename # see: https://help.morph.io/t/using-python-3-with-morph-scraperwiki-fork/148 os.environ['SCRAPERWIKI_DATABASE_NAME'] = 'sqlite:///data.sqlite' import scraperwiki @retry(HTTPError, tries=2, delay=30) def scrape(url, council_id, encoding, table): with urllib.request.urlopen(url) as response: # clear any existing data truncate(table) # load json data_str = response.read() data = json.loads(data_str.decode(encoding)) print("found %i %s" % (len(data['features']), table)) for feature in data['features']: # assemble record record = { 'pk': feature['id'], 'council_id': council_id, 'geometry': json.dumps(feature), } for field in feature['properties']: if field != 'bbox': record[field] = feature['properties'][field] # save to db scraperwiki.sqlite.save( unique_keys=['pk'], data=record, table_name=table) scraperwiki.sqlite.commit_transactions() # print summary summarise(table) store_history(data_str, table)
import json import os import urllib.request from retry import retry from urllib.error import HTTPError from common import store_history, truncate, summarise # hack to override sqlite database filename # see: https://help.morph.io/t/using-python-3-with-morph-scraperwiki-fork/148 os.environ['SCRAPERWIKI_DATABASE_NAME'] = 'sqlite:///data.sqlite' import scraperwiki @retry(HTTPError, tries=2, delay=30) def scrape(url, council_id, encoding, table, key=None): with urllib.request.urlopen(url) as response: # clear any existing data truncate(table) # load json data_str = response.read() data = json.loads(data_str.decode(encoding)) print("found %i %s" % (len(data['features']), table)) for feature in data['features']: # assemble record record = { 'council_id': council_id, 'geometry': json.dumps(feature), } if key is None: record['pk'] = feature['id'] else: record['pk'] = feature['properties'][key] for field in feature['properties']: if field != 'bbox': record[field] = feature['properties'][field] # save to db scraperwiki.sqlite.save( unique_keys=['pk'], data=record, table_name=table) scraperwiki.sqlite.commit_transactions() # print summary summarise(table) store_history(data_str, table)
Add key param to geojson scraper
Add key param to geojson scraper Sometimes we encounter a geojson file with no 'id' attribute This allows us to specify a property to use as a key instead
Python
mit
wdiv-scrapers/dc-base-scrapers
--- +++ @@ -12,7 +12,7 @@ @retry(HTTPError, tries=2, delay=30) -def scrape(url, council_id, encoding, table): +def scrape(url, council_id, encoding, table, key=None): with urllib.request.urlopen(url) as response: @@ -28,10 +28,13 @@ # assemble record record = { - 'pk': feature['id'], 'council_id': council_id, 'geometry': json.dumps(feature), } + if key is None: + record['pk'] = feature['id'] + else: + record['pk'] = feature['properties'][key] for field in feature['properties']: if field != 'bbox': record[field] = feature['properties'][field]
78807533031cf46acb5e73b695c0e492cf2c3e20
gnsq/httpclient.py
gnsq/httpclient.py
# -*- coding: utf-8 -*- from __future__ import absolute_import import urllib3 try: import simplejson as json except ImportError: import json # pyflakes.ignore from .errors import NSQHttpError class HTTPClient(object): base_url = None __http = None @property def http(self): if self.__http is None: self.__http = urllib3.connection_from_url(url=self.base_url) return self.__http def http_request(self, method, url, **kwargs): response = self.http.request_encode_url(method, url, **kwargs) if 'application/json' in response.getheader('content-type', ''): return self._http_check_json(response) return self._http_check(response) def _http_check(self, response): if response.status != 200: raise NSQHttpError('http error <%s>' % response.status) return response.data def _http_check_json(self, response): try: data = json.loads(response.data) except ValueError: return self._http_check(response) if response.status != 200: status_txt = data.get('status_txt', 'http error') raise NSQHttpError('%s <%s>' % (status_txt, response.status)) return data['data'] def http_get(self, url, **kwargs): return self.http_request('GET', url, **kwargs) def http_post(self, url, **kwargs): return self.http_request('POST', url, **kwargs)
# -*- coding: utf-8 -*- from __future__ import absolute_import import urllib3 try: import simplejson as json except ImportError: import json # pyflakes.ignore from .decorators import cached_property from .errors import NSQHttpError class HTTPClient(object): @cached_property def http(self): return urllib3.connection_from_url(url=self.base_url) def http_request(self, method, url, **kwargs): response = self.http.request_encode_url(method, url, **kwargs) if 'application/json' in response.getheader('content-type', ''): return self._http_check_json(response) return self._http_check(response) def _http_check(self, response): if response.status != 200: raise NSQHttpError('http error <%s>' % response.status) return response.data def _http_check_json(self, response): try: data = json.loads(response.data) except ValueError: return self._http_check(response) if response.status != 200: status_txt = data.get('status_txt', 'http error') raise NSQHttpError('%s <%s>' % (status_txt, response.status)) return data['data'] def http_get(self, url, **kwargs): return self.http_request('GET', url, **kwargs) def http_post(self, url, **kwargs): return self.http_request('POST', url, **kwargs)
Use cached property decorator connection pool.
Use cached property decorator connection pool.
Python
bsd-3-clause
hiringsolved/gnsq,wtolson/gnsq,wtolson/gnsq
--- +++ @@ -8,18 +8,14 @@ except ImportError: import json # pyflakes.ignore +from .decorators import cached_property from .errors import NSQHttpError class HTTPClient(object): - base_url = None - __http = None - - @property + @cached_property def http(self): - if self.__http is None: - self.__http = urllib3.connection_from_url(url=self.base_url) - return self.__http + return urllib3.connection_from_url(url=self.base_url) def http_request(self, method, url, **kwargs): response = self.http.request_encode_url(method, url, **kwargs)
f8f1dc94bc7d48bbe9a36c7b1ffa8117ba458cba
lobster/sandbox.py
lobster/sandbox.py
from itertools import imap import os import re import sys import tarfile def dontpack(fn): return '/.' in fn or '/CVS/' in fn def package(indir, outfile): try: tarball = tarfile.open(outfile, 'w:bz2') tarball.dereference = True rtname = os.path.split(os.path.normpath(indir))[1] # package bin, etc subdirs = ['bin', 'lib', 'module', 'python'] for (path, dirs, files) in os.walk(indir): if 'data' not in dirs: continue rtpath = os.path.join(os.path.relpath(path, indir), 'data') subdirs.append(rtpath) for subdir in subdirs: inname = os.path.join(indir, subdir) if not os.path.isdir(inname): continue outname = os.path.join(rtname, subdir) print "packing", subdir tarball.add(inname, outname, exclude=dontpack) tarball.close() except: raise
from itertools import imap import os import re import sys import tarfile def dontpack(fn): res = ('/.' in fn and not '/.SCRAM' in fn) or '/CVS/' in fn if res: return True print fn return False def package(indir, outfile): try: tarball = tarfile.open(outfile, 'w:bz2') tarball.dereference = True rtname = os.path.split(os.path.normpath(indir))[1] # package bin, etc subdirs = ['.SCRAM', 'bin', 'config', 'lib', 'module', 'python'] for (path, dirs, files) in os.walk(indir): if 'data' not in dirs: continue rtpath = os.path.join(os.path.relpath(path, indir), 'data') subdirs.append(rtpath) for subdir in subdirs: inname = os.path.join(indir, subdir) if not os.path.isdir(inname): continue outname = os.path.join(rtname, subdir) print "packing", subdir tarball.add(inname, outname, exclude=dontpack) tarball.close() except: raise
Add the bits that cmsenv/cmsRun actually need.
Add the bits that cmsenv/cmsRun actually need.
Python
mit
matz-e/lobster,matz-e/lobster,matz-e/lobster
--- +++ @@ -5,7 +5,11 @@ import tarfile def dontpack(fn): - return '/.' in fn or '/CVS/' in fn + res = ('/.' in fn and not '/.SCRAM' in fn) or '/CVS/' in fn + if res: + return True + print fn + return False def package(indir, outfile): try: @@ -15,7 +19,7 @@ rtname = os.path.split(os.path.normpath(indir))[1] # package bin, etc - subdirs = ['bin', 'lib', 'module', 'python'] + subdirs = ['.SCRAM', 'bin', 'config', 'lib', 'module', 'python'] for (path, dirs, files) in os.walk(indir): if 'data' not in dirs:
b5d3b84d4d2554882632fa9b10f44bd6ffb94b40
recipyCommon/libraryversions.py
recipyCommon/libraryversions.py
import sys import pkg_resources import warnings import numbers import six def get_version(modulename): "Return a string containing the module name and the library version." version = '?' # Get the root module name (in case we have something like `recipy.open` # or `matplotlib.pyplot`) modulename = modulename.split('.')[0] if modulename in sys.modules: ws = pkg_resources.working_set package = ws.find(pkg_resources.Requirement(modulename)) version = package.version else: warnings.warn('requesting version of a module that has not been ' 'imported ({})'.format(modulename)) # If we get some kind of crazy object (ie. not a string or a number) # then ignore it if not isinstance(version, (six.string_types, numbers.Number)): version = '?' return '{} v{}'.format(modulename, version)
import sys import pkg_resources import warnings import numbers import six def get_version(modulename): "Return a string containing the module name and the library version." version = '?' # Get the root module name (in case we have something like `recipy.open` # or `matplotlib.pyplot`) modulename = modulename.split('.')[0] if modulename in sys.modules: version = _get_version_from_pkg_resources(modulename) if version == '?': version = _get_version_from_module(modulename) else: warnings.warn('requesting version of a module that has not been ' 'imported ({})'.format(modulename)) # If we get some kind of crazy object (ie. not a string or a number) # then ignore it if not isinstance(version, (six.string_types, numbers.Number)): version = '?' return '{} v{}'.format(modulename, version) def _get_version_from_pkg_resources(modulename): ws = pkg_resources.working_set package = ws.find(pkg_resources.Requirement(modulename)) try: version = package.version except AttributeError: version = '?' return version def _get_version_from_module(modulename): version = '?' mod = sys.modules[modulename] try: version = mod.__version__ except (AttributeError, TypeError, KeyError): pass try: version = mod.version except (AttributeError, TypeError, KeyError): pass try: version = mod.version.version except (AttributeError, TypeError, KeyError): pass try: version = mod.VERSION except (AttributeError, TypeError, KeyError): pass try: version = mod.version() except (AttributeError, TypeError, KeyError): pass return version
Add fallback method for finding library versions
Add fallback method for finding library versions For library 'iris' the version number cannot be found in the pkg_resources working set. It is unclear why. The version number can be found using iris.__version__, so re-added the old version finding code as a fallback.
Python
apache-2.0
recipy/recipy,recipy/recipy
--- +++ @@ -15,9 +15,9 @@ modulename = modulename.split('.')[0] if modulename in sys.modules: - ws = pkg_resources.working_set - package = ws.find(pkg_resources.Requirement(modulename)) - version = package.version + version = _get_version_from_pkg_resources(modulename) + if version == '?': + version = _get_version_from_module(modulename) else: warnings.warn('requesting version of a module that has not been ' 'imported ({})'.format(modulename)) @@ -28,3 +28,40 @@ version = '?' return '{} v{}'.format(modulename, version) + + +def _get_version_from_pkg_resources(modulename): + ws = pkg_resources.working_set + package = ws.find(pkg_resources.Requirement(modulename)) + try: + version = package.version + except AttributeError: + version = '?' + return version + + +def _get_version_from_module(modulename): + version = '?' + mod = sys.modules[modulename] + try: + version = mod.__version__ + except (AttributeError, TypeError, KeyError): + pass + try: + version = mod.version + except (AttributeError, TypeError, KeyError): + pass + try: + version = mod.version.version + except (AttributeError, TypeError, KeyError): + pass + try: + version = mod.VERSION + except (AttributeError, TypeError, KeyError): + pass + try: + version = mod.version() + except (AttributeError, TypeError, KeyError): + pass + + return version
85d12c7ce38c4349a98ccd09266bc3f08f511daa
tests/test_message_body.py
tests/test_message_body.py
from tddcommitmessage.messagebody import MessageBody def test_message_wrapped_in_quotes(): msg = MessageBody('forty-two') assert str(msg) == '"forty-two"' def test_message_with_double_quotes_is_wrapped_with_single(): msg = MessageBody('But what are "Birds"?') assert str(msg) == r"""'But what are "Birds"?'"""
from tddcommitmessage.messagebody import MessageBody def test_message_is_wrapped_in_quotes(): msg = MessageBody('forty-two') assert str(msg) == '"forty-two"' def test_message_with_double_quote_is_wrapped_with_single(): msg = MessageBody('But what are "Birds"?') assert str(msg) == r"""'But what are "Birds"?'"""
REFACTOR Improve test name's grammar and accuracy.
REFACTOR Improve test name's grammar and accuracy.
Python
mit
matatk/tdd-bdd-commit,matatk/tdd-bdd-commit
--- +++ @@ -1,9 +1,9 @@ from tddcommitmessage.messagebody import MessageBody -def test_message_wrapped_in_quotes(): +def test_message_is_wrapped_in_quotes(): msg = MessageBody('forty-two') assert str(msg) == '"forty-two"' -def test_message_with_double_quotes_is_wrapped_with_single(): +def test_message_with_double_quote_is_wrapped_with_single(): msg = MessageBody('But what are "Birds"?') assert str(msg) == r"""'But what are "Birds"?'"""
7c0c349656e6f02be0f3f0044f5d225f3688be08
bong/parse_args.py
bong/parse_args.py
from .settings import BongSettings, DEFAULT_MESSAGE from .metadata import VERSION, SUMMARY import argparse PARSER = argparse.ArgumentParser(description=SUMMARY) PARSER.add_argument('-V', '--version', action='version', version=VERSION, help='Show version') PARSER.add_argument('-s', '--short-break', action='store_const', const=5, dest='minutes', default=25, help='Time for a Pomodoro system short break') PARSER.add_argument('-l', '--long-break', action='store_const', const=15, dest='minutes', help='Time for a Pomodoro system long break') PARSER.add_argument('-p', '--pomodoro', action='store_const', const=25, dest='minutes', help='Time for a Pomodoro system single Pomodoro') PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes', help='Timer length, in minutes') PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE, help='Message to display in the notifier') def parse_args(args): settings = PARSER.parse_args(args) return BongSettings(time=60*settings.minutes, message=settings.message)
from .settings import BongSettings, DEFAULT_MESSAGE from .metadata import VERSION, SUMMARY import argparse PARSER = argparse.ArgumentParser(description=SUMMARY) PARSER.add_argument('-V', '--version', action='version', version='%(prog)s {}'.format(VERSION), help='show version') PARSER.add_argument('-s', '--short-break', action='store_const', const=5, dest='minutes', default=25, help='time for a Pomodoro system short break') PARSER.add_argument('-l', '--long-break', action='store_const', const=15, dest='minutes', help='time for a Pomodoro system long break') PARSER.add_argument('-p', '--pomodoro', action='store_const', const=25, dest='minutes', help='time for a Pomodoro system single Pomodoro') PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes', help='timer length, in minutes') PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE, help='message to display in the notifier') def parse_args(args): settings = PARSER.parse_args(args) return BongSettings(time=60*settings.minutes, message=settings.message)
Clean up the argument parsing
Clean up the argument parsing
Python
mit
prophile/bong
--- +++ @@ -3,21 +3,22 @@ import argparse PARSER = argparse.ArgumentParser(description=SUMMARY) -PARSER.add_argument('-V', '--version', action='version', version=VERSION, - help='Show version') +PARSER.add_argument('-V', '--version', action='version', + version='%(prog)s {}'.format(VERSION), + help='show version') PARSER.add_argument('-s', '--short-break', action='store_const', const=5, dest='minutes', default=25, - help='Time for a Pomodoro system short break') + help='time for a Pomodoro system short break') PARSER.add_argument('-l', '--long-break', action='store_const', const=15, dest='minutes', - help='Time for a Pomodoro system long break') + help='time for a Pomodoro system long break') PARSER.add_argument('-p', '--pomodoro', action='store_const', const=25, dest='minutes', - help='Time for a Pomodoro system single Pomodoro') + help='time for a Pomodoro system single Pomodoro') PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes', - help='Timer length, in minutes') + help='timer length, in minutes') PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE, - help='Message to display in the notifier') + help='message to display in the notifier') def parse_args(args):
8caef330d6e69468833aa95f218390cc1e9088a5
scripts/gen/__init__.py
scripts/gen/__init__.py
import sys if sys.version_info[0] != 2 or not (5 <= sys.version_info[1] <= 7): sys.stderr.write('error: unsupported Python version (2.5-2.7 required)\n') sys.exit(1)
Add build script Python version check
Add build script Python version check
Python
bsd-2-clause
depp/sglib,depp/sglib
--- +++ @@ -0,0 +1,4 @@ +import sys +if sys.version_info[0] != 2 or not (5 <= sys.version_info[1] <= 7): + sys.stderr.write('error: unsupported Python version (2.5-2.7 required)\n') + sys.exit(1)
31f1c65d6505bc443fdb1d6ccd4849b175788f04
gargbot_3000/config.py
gargbot_3000/config.py
#! /usr/bin/env python3.6 # coding: utf-8 import os import datetime as dt from pathlib import Path import pytz from dotenv import load_dotenv env_path = Path('.') / '.env' load_dotenv(dotenv_path=env_path) slack_verification_token = os.environ["slack_verification_token"] slack_bot_user_token = os.environ["slack_bot_user_token"] bot_id = os.environ["bot_id"] bot_name = os.environ["bot_name"] home = Path(os.getenv("home_folder", os.getcwd())) db_host = os.environ["db_host"] db_user = os.environ["db_user"] db_passwd = os.environ["db_passwd"] db_name = os.environ["db_name"] dropbox_token = os.environ["dropbox_token"] tz = pytz.timezone(os.environ["tz"]) app_id = os.environ["app_id"] test_channel = os.environ["test_channel"] main_channel = os.environ["main_channel"] countdown_message = os.environ["countdown_message"] ongoing_message = os.environ["ongoing_message"] finished_message = os.environ["finished_message"] countdown_date = dt.datetime.fromtimestamp(int(os.environ["countdown_date"]), tz=tz) countdown_args = os.environ["countdown_args"].split(", ")
#! /usr/bin/env python3.6 # coding: utf-8 import os import datetime as dt from pathlib import Path import pytz from dotenv import load_dotenv load_dotenv() slack_verification_token = os.environ["slack_verification_token"] slack_bot_user_token = os.environ["slack_bot_user_token"] bot_id = os.environ["bot_id"] bot_name = os.environ["bot_name"] home = Path(os.getenv("home_folder", os.getcwd())) print(home) db_host = os.environ["db_host"] db_user = os.environ["db_user"] db_passwd = os.environ["db_passwd"] db_name = os.environ["db_name"] dropbox_token = os.environ["dropbox_token"] tz = pytz.timezone(os.environ["tz"]) app_id = os.environ["app_id"] test_channel = os.environ["test_channel"] main_channel = os.environ["main_channel"] countdown_message = os.environ["countdown_message"] ongoing_message = os.environ["ongoing_message"] finished_message = os.environ["finished_message"] countdown_date = dt.datetime.fromtimestamp(int(os.environ["countdown_date"]), tz=tz) countdown_args = os.environ["countdown_args"].split(", ")
Remove explicit path from load_dotenv call
Remove explicit path from load_dotenv call
Python
mit
eirki/gargbot_3000,eirki/gargbot_3000,eirki/gargbot_3000,eirki/gargbot_3000
--- +++ @@ -8,8 +8,7 @@ import pytz from dotenv import load_dotenv -env_path = Path('.') / '.env' -load_dotenv(dotenv_path=env_path) +load_dotenv() slack_verification_token = os.environ["slack_verification_token"] slack_bot_user_token = os.environ["slack_bot_user_token"] @@ -17,6 +16,7 @@ bot_name = os.environ["bot_name"] home = Path(os.getenv("home_folder", os.getcwd())) +print(home) db_host = os.environ["db_host"] db_user = os.environ["db_user"]
da3b2feab201782b3468e5e39232c366e0a3ebc0
kolibri/core/discovery/serializers.py
kolibri/core/discovery/serializers.py
from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals from rest_framework import serializers from rest_framework.serializers import ValidationError from .models import NetworkLocation from .utils.network import errors from .utils.network.client import NetworkClient class NetworkLocationSerializer(serializers.ModelSerializer): class Meta: model = NetworkLocation fields = ('id', 'available', 'base_url', 'device_name', 'instance_id', 'added', 'last_accessed', 'operating_system', 'application', 'kolibri_version') def validate_base_url(self, value): try: client = NetworkClient(address=value) except errors.NetworkError as e: raise ValidationError("Error with address {} ({})".format(value, e.__class__.__name__), code=e.code) return client.base_url
from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals from rest_framework import serializers from rest_framework.serializers import ValidationError from .models import NetworkLocation from .utils.network import errors from .utils.network.client import NetworkClient class NetworkLocationSerializer(serializers.ModelSerializer): class Meta: model = NetworkLocation fields = ('id', 'available', 'base_url', 'device_name', 'instance_id', 'added', 'last_accessed', 'operating_system', 'application', 'kolibri_version') read_only_fields = ('available', 'instance_id', 'added', 'last_accessed', 'operating_system', 'application', 'kolibri_version') def validate_base_url(self, value): try: client = NetworkClient(address=value) except errors.NetworkError as e: raise ValidationError("Error with address {} ({})".format(value, e.__class__.__name__), code=e.code) return client.base_url
Make some of the NetworkLocationSerializer fields read-only
Make some of the NetworkLocationSerializer fields read-only
Python
mit
DXCanas/kolibri,lyw07/kolibri,DXCanas/kolibri,lyw07/kolibri,indirectlylit/kolibri,lyw07/kolibri,learningequality/kolibri,lyw07/kolibri,mrpau/kolibri,DXCanas/kolibri,mrpau/kolibri,indirectlylit/kolibri,DXCanas/kolibri,learningequality/kolibri,indirectlylit/kolibri,mrpau/kolibri,learningequality/kolibri,mrpau/kolibri,learningequality/kolibri,indirectlylit/kolibri
--- +++ @@ -15,6 +15,7 @@ class Meta: model = NetworkLocation fields = ('id', 'available', 'base_url', 'device_name', 'instance_id', 'added', 'last_accessed', 'operating_system', 'application', 'kolibri_version') + read_only_fields = ('available', 'instance_id', 'added', 'last_accessed', 'operating_system', 'application', 'kolibri_version') def validate_base_url(self, value): try:
906f35428e36f2ece2fdb1c7fb150b979cf0457b
pyxform/tests_v1/test_whitespace.py
pyxform/tests_v1/test_whitespace.py
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase class WhitespaceTest(PyxformTestCase): def test_over_trim(self): self.assertPyxformXform( name='issue96', md=""" | survey | | | | | | type | label | name | | | text | Ignored | var | | | note | ${var} text | label | """, xml__contains=[ '<label><output value=" /issue96/var "/> text </label>', ]) def empty_label_squashing(self): self.assertPyxformXform( name='empty_label', debug=True, ss_structure={'survey': [ { 'type':'note', 'label':'', 'name':'label' } ] }, xml__contains=[ '<label></label>', ])
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase class WhitespaceTest(PyxformTestCase): def test_over_trim(self): self.assertPyxformXform( name='issue96', md=""" | survey | | | | | | type | label | name | | | text | Ignored | var | | | note | ${var} text | label | """, xml__contains=[ '<label><output value=" /issue96/var "/> text </label>', ]) def test_empty_label_squashing(self): self.assertPyxformXform( name='empty_label', debug=True, ss_structure={'survey': [ { 'type':'note', 'label':'', 'name':'label' } ] }, xml__contains=[ '<label></label>', ])
Rename test so it actually runs
Rename test so it actually runs
Python
bsd-2-clause
XLSForm/pyxform,XLSForm/pyxform
--- +++ @@ -14,7 +14,7 @@ '<label><output value=" /issue96/var "/> text </label>', ]) - def empty_label_squashing(self): + def test_empty_label_squashing(self): self.assertPyxformXform( name='empty_label', debug=True,
9c9127446a9c362910ea093d5a317ad0fc23e3d7
opencivicdata/admin/division.py
opencivicdata/admin/division.py
from django.contrib import admin from opencivicdata.models import division as models @admin.register(models.Division) class DivisionAdmin(admin.ModelAdmin): list_display = ('display_name', 'id') search_fields = list_display
from django.contrib import admin from opencivicdata.models import division as models @admin.register(models.Division) class DivisionAdmin(admin.ModelAdmin): list_display = ('name', 'id') search_fields = list_display
Update to reflect change of display_name --> name
Update to reflect change of display_name --> name
Python
bsd-3-clause
opencivicdata/python-opencivicdata-django,opencivicdata/python-opencivicdata-django,rshorey/python-opencivicdata-django,opencivicdata/python-opencivicdata,influence-usa/python-opencivicdata-django,rshorey/python-opencivicdata-django,mileswwatkins/python-opencivicdata-django,mileswwatkins/python-opencivicdata-django,opencivicdata/python-opencivicdata,influence-usa/python-opencivicdata-django,opencivicdata/python-opencivicdata-divisions,opencivicdata/python-opencivicdata-django
--- +++ @@ -4,6 +4,6 @@ @admin.register(models.Division) class DivisionAdmin(admin.ModelAdmin): - list_display = ('display_name', 'id') + list_display = ('name', 'id') search_fields = list_display
6891981cd32a9dbf71346f95256f8447726672df
packages/pixman.py
packages/pixman.py
CairoGraphicsPackage ('pixman', '0.30.0')
class PixmanPackage (CairoGraphicsPackage): def __init__ (self): CairoGraphicsPackage.__init__ (self, 'pixman', '0.30.0') #This package would like to be built with fat binaries if Package.profile.m64 == True: self.fat_build = True def arch_build (self, arch): if arch == 'darwin-fat': #multi-arch build pass self.local_ld_flags = ['-arch i386' , '-arch x86_64'] self.local_gcc_flags = ['-arch i386' , '-arch x86_64', '-Os'] self.local_configure_flags = ['--disable-dependency-tracking'] Package.arch_build (self, arch) PixmanPackage ()
Enable fat binaries on pitman package
Enable fat binaries on pitman package
Python
mit
mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild
--- +++ @@ -1 +1,17 @@ -CairoGraphicsPackage ('pixman', '0.30.0') +class PixmanPackage (CairoGraphicsPackage): + def __init__ (self): + CairoGraphicsPackage.__init__ (self, 'pixman', '0.30.0') + + #This package would like to be built with fat binaries + if Package.profile.m64 == True: + self.fat_build = True + + def arch_build (self, arch): + if arch == 'darwin-fat': #multi-arch build pass + self.local_ld_flags = ['-arch i386' , '-arch x86_64'] + self.local_gcc_flags = ['-arch i386' , '-arch x86_64', '-Os'] + self.local_configure_flags = ['--disable-dependency-tracking'] + + Package.arch_build (self, arch) + +PixmanPackage ()
e81a54a5df2aaafae230084fe3b4d59c5b4f61cc
parallel/runner.py
parallel/runner.py
import Queue from parallel import worker class Runner(object): def __init__(self, num_workers=4): self.in_queue = Queue.Queue() self.out_queue = Queue.Queue() self.num_workers = num_workers self.workers = None self._start_workers() def _start_workers(self): self.workers = [worker.Worker(self.in_queue, self.out_queue) for i in range(self.num_workers)] def add_task(self, task, *args, **kwargs): self.in_queue.put((task, args, kwargs)) def results(self): self.in_queue.join() return self.out_queue.queue
import Queue from parallel import config from parallel import worker class Runner(object): def __init__(self, num_workers=config.NUM_WORKERS): self.in_queue = Queue.Queue() self.out_queue = Queue.Queue() self.num_workers = num_workers self.workers = None self._start_workers() def _start_workers(self): self.workers = [worker.Worker(self.in_queue, self.out_queue) for i in range(self.num_workers)] def add_task(self, task, *args, **kwargs): self.in_queue.put((task, args, kwargs)) def results(self): self.in_queue.join() return self.out_queue.queue
Use default number of workers from config
Use default number of workers from config config does a better job at figuring out the optimal number of workers, so it will be used instead.
Python
mit
andersonvom/mparallel
--- +++ @@ -1,10 +1,11 @@ import Queue +from parallel import config from parallel import worker class Runner(object): - def __init__(self, num_workers=4): + def __init__(self, num_workers=config.NUM_WORKERS): self.in_queue = Queue.Queue() self.out_queue = Queue.Queue() self.num_workers = num_workers
d5cd8d679b08aae6d91cad003fdd71660a85c39b
opps/images/forms.py
opps/images/forms.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from django import forms from .models import Image from .widgets import MultipleUpload, CropExample from redactor.widgets import RedactorEditor class ImageModelForm(forms.ModelForm): image = forms.FileField(required=True, widget=MultipleUpload()) crop_example = forms.CharField(required=False, widget=CropExample()) crop_x1 = forms.CharField(required=False, widget=forms.HiddenInput()) crop_x2 = forms.CharField(required=False, widget=forms.HiddenInput()) crop_y1 = forms.CharField(required=False, widget=forms.HiddenInput()) crop_y2 = forms.CharField(required=False, widget=forms.HiddenInput()) class Meta: model = Image widgets = {'description': RedactorEditor()} def more_image(self): more_image = self.files.getlist('image')[:] if len(more_image) >= 2: del more_image[-1] return more_image return []
#!/usr/bin/env python # -*- coding: utf-8 -*- from django import forms from .models import Image from .widgets import MultipleUpload, CropExample from redactor.widgets import RedactorEditor class ImageModelForm(forms.ModelForm): archive = forms.FileField(required=True, widget=MultipleUpload()) crop_example = forms.CharField(required=False, widget=CropExample()) crop_x1 = forms.CharField(required=False, widget=forms.HiddenInput()) crop_x2 = forms.CharField(required=False, widget=forms.HiddenInput()) crop_y1 = forms.CharField(required=False, widget=forms.HiddenInput()) crop_y2 = forms.CharField(required=False, widget=forms.HiddenInput()) class Meta: model = Image widgets = {'description': RedactorEditor()} def more_image(self): more_image = self.files.getlist('image')[:] if len(more_image) >= 2: del more_image[-1] return more_image return []
Change form field image to archive
Change form field image to archive
Python
mit
williamroot/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,opps/opps,jeanmask/opps
--- +++ @@ -9,7 +9,7 @@ class ImageModelForm(forms.ModelForm): - image = forms.FileField(required=True, widget=MultipleUpload()) + archive = forms.FileField(required=True, widget=MultipleUpload()) crop_example = forms.CharField(required=False, widget=CropExample()) crop_x1 = forms.CharField(required=False, widget=forms.HiddenInput()) crop_x2 = forms.CharField(required=False, widget=forms.HiddenInput())
ddaf1a2b6744c9012546d6258b0378ab1b96d658
zerver/lib/i18n.py
zerver/lib/i18n.py
# -*- coding: utf-8 -*- from django.conf import settings from django.utils import translation from django.utils.translation import ugettext as _ from six import text_type from typing import Any import os import ujson def with_language(string, language): # type: (text_type, text_type) -> text_type old_language = translation.get_language() translation.activate(language) result = _(string) translation.activate(old_language) return result def get_language_list(): # type: () -> List[Dict[str, Any]] path = os.path.join(settings.STATIC_ROOT, 'locale', 'language_options.json') with open(path, 'r') as reader: languages = ujson.load(reader) lang_list = [] for lang_info in languages['languages']: name = lang_info['name'] lang_info['name'] = with_language(name, lang_info['code']) if 'percent_translated' not in lang_info: lang_info['percent_translated'] = 'N/A' lang_list.append(lang_info) return sorted(lang_list, key=lambda i: i['name']) def get_available_language_codes(): # type: () -> List[text_type] language_list = get_language_list() codes = [language['code'] for language in language_list] return codes
# -*- coding: utf-8 -*- from __future__ import absolute_import from django.conf import settings from django.utils import translation from django.utils.translation import ugettext as _ from six import text_type from typing import Any import os import ujson def with_language(string, language): # type: (text_type, text_type) -> text_type old_language = translation.get_language() translation.activate(language) result = _(string) translation.activate(old_language) return result def get_language_list(): # type: () -> List[Dict[str, Any]] path = os.path.join(settings.STATIC_ROOT, 'locale', 'language_options.json') with open(path, 'r') as reader: languages = ujson.load(reader) lang_list = [] for lang_info in languages['languages']: name = lang_info['name'] lang_info['name'] = with_language(name, lang_info['code']) lang_list.append(lang_info) return sorted(lang_list, key=lambda i: i['name']) def get_available_language_codes(): # type: () -> List[text_type] language_list = get_language_list() codes = [language['code'] for language in language_list] return codes
Return unformatted list from get_language_list.
Return unformatted list from get_language_list.
Python
apache-2.0
grave-w-grave/zulip,amanharitsh123/zulip,kou/zulip,vikas-parashar/zulip,JPJPJPOPOP/zulip,ahmadassaf/zulip,andersk/zulip,vikas-parashar/zulip,andersk/zulip,hackerkid/zulip,kou/zulip,eeshangarg/zulip,ahmadassaf/zulip,dhcrzf/zulip,showell/zulip,punchagan/zulip,vabs22/zulip,peguin40/zulip,JPJPJPOPOP/zulip,shubhamdhama/zulip,joyhchen/zulip,arpith/zulip,amanharitsh123/zulip,dattatreya303/zulip,mahim97/zulip,rishig/zulip,ahmadassaf/zulip,umkay/zulip,andersk/zulip,jainayush975/zulip,verma-varsha/zulip,joyhchen/zulip,jphilipsen05/zulip,christi3k/zulip,sup95/zulip,blaze225/zulip,umkay/zulip,joyhchen/zulip,paxapy/zulip,showell/zulip,j831/zulip,synicalsyntax/zulip,cosmicAsymmetry/zulip,arpith/zulip,jainayush975/zulip,susansls/zulip,rht/zulip,showell/zulip,KingxBanana/zulip,AZtheAsian/zulip,mahim97/zulip,jphilipsen05/zulip,SmartPeople/zulip,blaze225/zulip,hackerkid/zulip,brockwhittaker/zulip,Juanvulcano/zulip,samatdav/zulip,isht3/zulip,punchagan/zulip,PhilSk/zulip,joyhchen/zulip,jrowan/zulip,vikas-parashar/zulip,susansls/zulip,punchagan/zulip,dawran6/zulip,Jianchun1/zulip,hackerkid/zulip,calvinleenyc/zulip,brockwhittaker/zulip,dattatreya303/zulip,Juanvulcano/zulip,andersk/zulip,susansls/zulip,shubhamdhama/zulip,jainayush975/zulip,zacps/zulip,Juanvulcano/zulip,showell/zulip,jrowan/zulip,rishig/zulip,sonali0901/zulip,sup95/zulip,niftynei/zulip,jainayush975/zulip,Juanvulcano/zulip,verma-varsha/zulip,jackrzhang/zulip,KingxBanana/zulip,jackrzhang/zulip,vikas-parashar/zulip,umkay/zulip,j831/zulip,susansls/zulip,vaidap/zulip,rishig/zulip,peguin40/zulip,vikas-parashar/zulip,zulip/zulip,calvinleenyc/zulip,ryanbackman/zulip,showell/zulip,TigorC/zulip,AZtheAsian/zulip,brainwane/zulip,calvinleenyc/zulip,blaze225/zulip,cosmicAsymmetry/zulip,dawran6/zulip,synicalsyntax/zulip,Diptanshu8/zulip,samatdav/zulip,arpith/zulip,joyhchen/zulip,dawran6/zulip,Galexrt/zulip,paxapy/zulip,sup95/zulip,christi3k/zulip,jphilipsen05/zulip,jainayush975/zulip,punchagan/zulip,ahmadassaf/zulip,souravbadami/zulip,TigorC/zulip,Jianchun1/zulip,isht3/zulip,AZtheAsian/zulip,sonali0901/zulip,arpith/zulip,JPJPJPOPOP/zulip,souravbadami/zulip,shubhamdhama/zulip,verma-varsha/zulip,peguin40/zulip,grave-w-grave/zulip,niftynei/zulip,JPJPJPOPOP/zulip,vabs22/zulip,jackrzhang/zulip,isht3/zulip,sup95/zulip,jrowan/zulip,jackrzhang/zulip,paxapy/zulip,paxapy/zulip,Diptanshu8/zulip,kou/zulip,souravbadami/zulip,jphilipsen05/zulip,amanharitsh123/zulip,AZtheAsian/zulip,dattatreya303/zulip,samatdav/zulip,eeshangarg/zulip,rht/zulip,SmartPeople/zulip,hackerkid/zulip,sharmaeklavya2/zulip,brockwhittaker/zulip,krtkmj/zulip,cosmicAsymmetry/zulip,jrowan/zulip,tommyip/zulip,cosmicAsymmetry/zulip,christi3k/zulip,dhcrzf/zulip,dhcrzf/zulip,tommyip/zulip,vaidap/zulip,sharmaeklavya2/zulip,andersk/zulip,Diptanshu8/zulip,blaze225/zulip,krtkmj/zulip,ryanbackman/zulip,shubhamdhama/zulip,blaze225/zulip,rht/zulip,rishig/zulip,timabbott/zulip,verma-varsha/zulip,grave-w-grave/zulip,umkay/zulip,aakash-cr7/zulip,jackrzhang/zulip,sharmaeklavya2/zulip,cosmicAsymmetry/zulip,blaze225/zulip,dhcrzf/zulip,SmartPeople/zulip,zulip/zulip,dhcrzf/zulip,krtkmj/zulip,brainwane/zulip,souravbadami/zulip,eeshangarg/zulip,punchagan/zulip,krtkmj/zulip,calvinleenyc/zulip,Jianchun1/zulip,aakash-cr7/zulip,brockwhittaker/zulip,jackrzhang/zulip,TigorC/zulip,niftynei/zulip,zulip/zulip,umkay/zulip,dawran6/zulip,punchagan/zulip,j831/zulip,synicalsyntax/zulip,amyliu345/zulip,christi3k/zulip,j831/zulip,brockwhittaker/zulip,peguin40/zulip,kou/zulip,PhilSk/zulip,peguin40/zulip,amyliu345/zulip,brainwane/zulip,timabbott/zulip,isht3/zulip,souravbadami/zulip,brainwane/zulip,amyliu345/zulip,Galexrt/zulip,ryanbackman/zulip,rht/zulip,grave-w-grave/zulip,Jianchun1/zulip,reyha/zulip,rishig/zulip,PhilSk/zulip,synicalsyntax/zulip,samatdav/zulip,JPJPJPOPOP/zulip,Jianchun1/zulip,sonali0901/zulip,ahmadassaf/zulip,eeshangarg/zulip,SmartPeople/zulip,susansls/zulip,amyliu345/zulip,zulip/zulip,reyha/zulip,rishig/zulip,synicalsyntax/zulip,jphilipsen05/zulip,mohsenSy/zulip,jainayush975/zulip,timabbott/zulip,mohsenSy/zulip,brainwane/zulip,tommyip/zulip,KingxBanana/zulip,zacps/zulip,Galexrt/zulip,joyhchen/zulip,mahim97/zulip,KingxBanana/zulip,synicalsyntax/zulip,zulip/zulip,mahim97/zulip,krtkmj/zulip,reyha/zulip,AZtheAsian/zulip,aakash-cr7/zulip,jrowan/zulip,calvinleenyc/zulip,tommyip/zulip,samatdav/zulip,rht/zulip,zulip/zulip,christi3k/zulip,dawran6/zulip,isht3/zulip,vabs22/zulip,souravbadami/zulip,Diptanshu8/zulip,TigorC/zulip,sonali0901/zulip,showell/zulip,eeshangarg/zulip,amanharitsh123/zulip,amanharitsh123/zulip,mahim97/zulip,dhcrzf/zulip,andersk/zulip,mohsenSy/zulip,niftynei/zulip,cosmicAsymmetry/zulip,mohsenSy/zulip,verma-varsha/zulip,ahmadassaf/zulip,timabbott/zulip,hackerkid/zulip,PhilSk/zulip,zacps/zulip,hackerkid/zulip,rht/zulip,Galexrt/zulip,tommyip/zulip,jackrzhang/zulip,brainwane/zulip,rishig/zulip,kou/zulip,krtkmj/zulip,Galexrt/zulip,dattatreya303/zulip,timabbott/zulip,ryanbackman/zulip,sup95/zulip,j831/zulip,zulip/zulip,kou/zulip,TigorC/zulip,timabbott/zulip,j831/zulip,amanharitsh123/zulip,KingxBanana/zulip,timabbott/zulip,vabs22/zulip,shubhamdhama/zulip,shubhamdhama/zulip,vaidap/zulip,AZtheAsian/zulip,verma-varsha/zulip,dattatreya303/zulip,mahim97/zulip,grave-w-grave/zulip,tommyip/zulip,zacps/zulip,SmartPeople/zulip,JPJPJPOPOP/zulip,mohsenSy/zulip,sonali0901/zulip,eeshangarg/zulip,brainwane/zulip,reyha/zulip,Jianchun1/zulip,sonali0901/zulip,eeshangarg/zulip,sharmaeklavya2/zulip,vaidap/zulip,peguin40/zulip,KingxBanana/zulip,andersk/zulip,dawran6/zulip,brockwhittaker/zulip,umkay/zulip,calvinleenyc/zulip,Juanvulcano/zulip,reyha/zulip,ahmadassaf/zulip,grave-w-grave/zulip,sharmaeklavya2/zulip,synicalsyntax/zulip,arpith/zulip,samatdav/zulip,Diptanshu8/zulip,punchagan/zulip,zacps/zulip,vabs22/zulip,ryanbackman/zulip,mohsenSy/zulip,dhcrzf/zulip,niftynei/zulip,shubhamdhama/zulip,amyliu345/zulip,jphilipsen05/zulip,vikas-parashar/zulip,SmartPeople/zulip,sup95/zulip,susansls/zulip,TigorC/zulip,kou/zulip,showell/zulip,arpith/zulip,PhilSk/zulip,tommyip/zulip,christi3k/zulip,jrowan/zulip,niftynei/zulip,paxapy/zulip,paxapy/zulip,Galexrt/zulip,hackerkid/zulip,zacps/zulip,krtkmj/zulip,vabs22/zulip,reyha/zulip,Juanvulcano/zulip,aakash-cr7/zulip,vaidap/zulip,Galexrt/zulip,sharmaeklavya2/zulip,vaidap/zulip,PhilSk/zulip,aakash-cr7/zulip,aakash-cr7/zulip,dattatreya303/zulip,umkay/zulip,ryanbackman/zulip,amyliu345/zulip,rht/zulip,isht3/zulip,Diptanshu8/zulip
--- +++ @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +from __future__ import absolute_import + from django.conf import settings from django.utils import translation from django.utils.translation import ugettext as _ @@ -26,8 +28,6 @@ for lang_info in languages['languages']: name = lang_info['name'] lang_info['name'] = with_language(name, lang_info['code']) - if 'percent_translated' not in lang_info: - lang_info['percent_translated'] = 'N/A' lang_list.append(lang_info) return sorted(lang_list, key=lambda i: i['name']) @@ -37,3 +37,4 @@ language_list = get_language_list() codes = [language['code'] for language in language_list] return codes +
308f1cd155e2db37fe5ff03e158e3d9fc32d6885
lehrex/__init__.py
lehrex/__init__.py
# -*- coding: utf-8 -*- """Python package to support the research during the annual Lehrexkursion at Universität Hamburg. """ from os.path import dirname, join from . import csv from . import math from . import plots from . import utils __version__ = open(join(dirname(__file__), 'VERSION')).read().strip() __all__ = [ 'csv', 'math', 'plots', 'utils', ]
# -*- coding: utf-8 -*- """Python package to support the research during the annual Lehrexkursion at Universität Hamburg. """ from os.path import dirname, join from . import csv from . import math from . import plots from . import utils __version__ = open(join(dirname(__file__), 'VERSION')).read().strip() __all__ = [ 'csv', 'math', 'plots', 'utils', ] class Test: def foo(self): return f"{self}" Test()
Introduce Python 3.5 syntax error.
Introduce Python 3.5 syntax error.
Python
mit
lkluft/lehrex
--- +++ @@ -18,3 +18,11 @@ 'plots', 'utils', ] + + +class Test: + def foo(self): + return f"{self}" + + +Test()
b2657fd84c0d8fd4e1188a649bb2595651b83adb
kazoo/handlers/util.py
kazoo/handlers/util.py
"""Handler utilities for getting non-monkey patched std lib stuff Allows one to get an unpatched thread module, with a thread decorator that uses the unpatching OS thread. """ try: from gevent import monkey [start_new_thread] = monkey.get_original('thread', ['start_new_thread']) except ImportError: from thread import start_new_thread def thread(func): """Thread decorator Takes a function and spawns it as a daemon thread using the real OS thread regardless of monkey patching. """ start_new_thread(func, ())
"""Handler utilities for getting non-monkey patched std lib stuff Allows one to get an unpatched thread module, with a thread decorator that uses the unpatching OS thread. """ from __future__ import absolute_import try: from gevent._threading import start_new_thread except ImportError: from thread import start_new_thread def thread(func): """Thread decorator Takes a function and spawns it as a daemon thread using the real OS thread regardless of monkey patching. """ start_new_thread(func, ())
Make sure we use proper gevent with absolute import, pull the start_new_thread directly out.
Make sure we use proper gevent with absolute import, pull the start_new_thread directly out.
Python
apache-2.0
harlowja/kazoo,pombredanne/kazoo,rockerbox/kazoo,AlexanderplUs/kazoo,harlowja/kazoo,pombredanne/kazoo,rgs1/kazoo,Asana/kazoo,jacksontj/kazoo,kormat/kazoo,rackerlabs/kazoo,bsanders/kazoo,tempbottle/kazoo,python-zk/kazoo,rockerbox/kazoo,jacksontj/kazoo,rackerlabs/kazoo,python-zk/kazoo,rgs1/kazoo,tempbottle/kazoo,AlexanderplUs/kazoo,max0d41/kazoo,kormat/kazoo,max0d41/kazoo,bsanders/kazoo
--- +++ @@ -4,9 +4,10 @@ decorator that uses the unpatching OS thread. """ +from __future__ import absolute_import + try: - from gevent import monkey - [start_new_thread] = monkey.get_original('thread', ['start_new_thread']) + from gevent._threading import start_new_thread except ImportError: from thread import start_new_thread
08f36c5ff17c2bdfb98bdbcaeaaaaf30a1b71b93
raxcli/__init__.py
raxcli/__init__.py
# Copyright 2012 Rackspace # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '0.0.1'
# Copyright 2013 Rackspace # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '0.0.1'
Update year in the license header.
Update year in the license header.
Python
apache-2.0
racker/python-raxcli
--- +++ @@ -1,4 +1,4 @@ -# Copyright 2012 Rackspace +# Copyright 2013 Rackspace # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with
534770cf0cc25c7aa0350570bc7a39d6239c7e05
src/data/extractor/parsehtml.py
src/data/extractor/parsehtml.py
from urllib import urlopen from bs4 import BeautifulSoup from selenium import webdriver def getBeautifulSoupObject(link): html = urlopen(link) return BeautifulSoup(html) def getDynamicContent(link): driver = webdriver.PhantomJS() driver.get(link) link = "https://www.dnainfo.com/chicago/2017-chicago-murders" getDynamicContent(link)
import requests from bs4 import BeautifulSoup from selenium import webdriver import logging logging.basicConfig(filename='scrape.log',level=20) logging.info('Initialized logger') def getBeautifulSoupObject(link): html = requests.get(link) return BeautifulSoup(html) def getDynamicContent(link): try: driver = webdriver.PhantomJS() logging.info("Webdriver initiated {0}".format(driver)) except Exception as exp: logging.fatal("Webdriver exception: {exception}".format(exception=exp)) raise exp driver.get(link) #REMOVELATER: test if we are able to get the title print driver.title link = "https://www.dnainfo.com/chicago/2017-chicago-murders" getDynamicContent(link)
Add logging, requests and handle exceptions
Add logging, requests and handle exceptions
Python
mit
craftbase/chicago-murders
--- +++ @@ -1,15 +1,25 @@ -from urllib import urlopen +import requests from bs4 import BeautifulSoup from selenium import webdriver +import logging +logging.basicConfig(filename='scrape.log',level=20) +logging.info('Initialized logger') + def getBeautifulSoupObject(link): - html = urlopen(link) + html = requests.get(link) return BeautifulSoup(html) def getDynamicContent(link): - driver = webdriver.PhantomJS() + try: + driver = webdriver.PhantomJS() + logging.info("Webdriver initiated {0}".format(driver)) + except Exception as exp: + logging.fatal("Webdriver exception: {exception}".format(exception=exp)) + raise exp driver.get(link) - + #REMOVELATER: test if we are able to get the title + print driver.title link = "https://www.dnainfo.com/chicago/2017-chicago-murders"
55378f71e8553eaad606433ae9e871983e99bd26
pastalog/pastalog/__init__.py
pastalog/pastalog/__init__.py
''' The pastalog Log class, which simply sends a POST request to a the server. ''' import requests class Log(object): def __init__(self, url, model_name): self.url = url self.model_name = model_name def post(self, series_name, value, step): payload = {"modelName": self.model_name, "pointType": series_name, "pointValue": value, "globalStep": step} r = requests.post(self.url, json=payload) return r.raise_for_status()
''' The pastalog Log class, which simply sends a POST request to a the server. ''' import requests import os class Log(object): def __init__(self, url, model_name): self.url = os.path.join(url, 'data') self.model_name = model_name def post(self, series_name, value, step): payload = {"modelName": self.model_name, "pointType": series_name, "pointValue": value, "globalStep": step} r = requests.post(self.url, json=payload) return r.raise_for_status()
Update to post to correct endpoint
Update to post to correct endpoint
Python
mit
rewonc/pastalog,rewonc/pastalog,rewonc/pastalog
--- +++ @@ -2,11 +2,12 @@ The pastalog Log class, which simply sends a POST request to a the server. ''' import requests +import os class Log(object): def __init__(self, url, model_name): - self.url = url + self.url = os.path.join(url, 'data') self.model_name = model_name def post(self, series_name, value, step):
8e3d77675d65740776fc8f0fc93bb311cf62c632
books/models.py
books/models.py
from django.db import models from django.utils.translation import ugettext as _ from egielda import settings class BookType(models.Model): publisher = models.CharField(max_length=150) title = models.CharField(max_length=150) price = models.IntegerField() def price_string(self): return "%(price).2f%(currency)s" % {'price': (self.price / 100), 'currency': getattr(settings, 'CURRENCY', 'USD')} def __str__(self): return _( "%(publisher)s %(title)s, Edition %(edition)d %(publication_year)d" % { 'publisher': self.publisher, 'title': self.title, 'edition': self.edition, 'publication_year': self.publication_year})
from django.db import models from django.utils.translation import ugettext as _ from egielda import settings class BookType(models.Model): publisher = models.CharField(max_length=150) title = models.CharField(max_length=150) price = models.IntegerField() def price_string(self): return "%(price).2f%(currency)s" % {'price': (self.price / 100), 'currency': getattr(settings, 'CURRENCY', 'USD')} def __str__(self): return _( "%(publisher)s %(title)s" % { 'publisher': self.publisher, 'title': self.title})
Fix __str__ method for BookType Model
Fix __str__ method for BookType Model
Python
agpl-3.0
m4tx/egielda,m4tx/egielda,m4tx/egielda
--- +++ @@ -15,7 +15,6 @@ def __str__(self): return _( - "%(publisher)s %(title)s, Edition %(edition)d %(publication_year)d" % { + "%(publisher)s %(title)s" % { 'publisher': self.publisher, - 'title': self.title, 'edition': self.edition, - 'publication_year': self.publication_year}) + 'title': self.title})
580b5c48fc42f09459bcf63c0e2e239a550adb41
calexicon/calendars/tests/calendar_testing.py
calexicon/calendars/tests/calendar_testing.py
import sys if sys.hexversion < 0x02070000: import unittest2 as unittest else: import unittest from hypothesis import given, example from hypothesis.extra.datetime import datetimes from calexicon.calendars import ProlepticGregorianCalendar, JulianCalendar from calexicon.dates import DateWithCalendar, InvalidDate class CalendarTest(unittest.TestCase): calendar = None def check_valid_date(self, year, month, day): d = self.calendar.date(year, month, day) self.assertIsNotNone(d) self.assertEqual(d.calendar, self.calendar.__class__) def check_invalid_date(self, year, month, day): self.assertRaises(InvalidDate, lambda : self.calendar.date(year, month, day)) @given(datetimes(timezones=[])) def test_date_strings(self, dt): if self.calendar is None: return d = dt.date() dc = self.calendar.from_date(d) self.assertIsNotNone(dc.__str__()) @given(datetimes(timezones=[])) def test_date_strings(self, dt): if self.calendar is None: return d = dt.date() dc = self.calendar.from_date(d) self.assertIsNotNone(dc.native_representation()) def display_string_comparison(self, year, month, day, expected): d = self.calendar.date(year, month, day) self.assertEqual(d.__str__(), expected)
import sys if sys.hexversion < 0x02070000: import unittest2 as unittest else: import unittest from hypothesis import given, example from hypothesis.extra.datetime import datetimes from calexicon.calendars import ProlepticGregorianCalendar, JulianCalendar from calexicon.dates import DateWithCalendar, InvalidDate class CalendarTest(unittest.TestCase): calendar = None def check_valid_date(self, year, month, day): d = self.calendar.date(year, month, day) self.assertIsNotNone(d) self.assertEqual(d.calendar, self.calendar.__class__) def check_invalid_date(self, year, month, day): self.assertRaises(InvalidDate, lambda : self.calendar.date(year, month, day)) @given(datetimes(timezones=[])) def test_date_strings(self, dt): if self.calendar is None: return d = dt.date() dc = self.calendar.from_date(d) self.assertIsNotNone(dc.__str__()) @given(datetimes(timezones=[])) def test_native_representation(self, dt): if self.calendar is None: return d = dt.date() dc = self.calendar.from_date(d) self.assertIsNotNone(dc.native_representation()) def display_string_comparison(self, year, month, day, expected): d = self.calendar.date(year, month, day) self.assertEqual(d.__str__(), expected)
Change name of test to avoid collision
Change name of test to avoid collision This was spotted by coveralls.
Python
apache-2.0
jwg4/calexicon,jwg4/qual
--- +++ @@ -31,7 +31,7 @@ self.assertIsNotNone(dc.__str__()) @given(datetimes(timezones=[])) - def test_date_strings(self, dt): + def test_native_representation(self, dt): if self.calendar is None: return d = dt.date()
520a23caae3bd4e1db60021025b5dc3f573b0873
plugins/mediawiki.py
plugins/mediawiki.py
import ConfigParser import datetime from wikitools import wiki from wikitools import category from plugin import Plugin class MediaWiki(Plugin): def __init__(self, config=None): if config: try: self.site = wiki.Wiki(config.get('MediaWiki', 'wikiapiurl')) self.site.login(config.get('MediaWiki', 'user'), config.get('MediaWiki', 'password')) except ConfigParser.NoSectionError: print "MediaWiki Error: Please configure the [MediaWiki] section in your config.ini" except ConfigParser.NoOptionError: print "MediaWiki Error: Mediawiki Url or login credentials are not configured in your config.ini" super(MediaWiki, self).__init__() def wikiupdate(self, title, url): now = datetime.datetime.now() date = now.strftime("%Y-%m-%d %H:%M") cat = category.Category(self.site, "Linklist") for article in cat.getAllMembersGen(namespaces=[0]): print article.edit(appendtext="\n* {title} - {url} ({date}) \n".format(title=title, url=url, date=date))
import ConfigParser import datetime from wikitools import wiki from wikitools import category from plugin import Plugin class MediaWiki(Plugin): def __init__(self, config=None): if config: try: self.site = wiki.Wiki(config.get('MediaWiki', 'wikiapiurl')) self.site.login(config.get('MediaWiki', 'user'), config.get('MediaWiki', 'password')) except ConfigParser.NoSectionError: print "MediaWiki Error: Please configure the [MediaWiki] section in your config.ini" except ConfigParser.NoOptionError: print "MediaWiki Error: Mediawiki Url or login credentials are not configured in your config.ini" super(MediaWiki, self).__init__() def wikiupdate(self, title, url): now = datetime.datetime.now() date = now.strftime("%Y-%m-%d %H:%M") cat = category.Category(self.site, "Linklist") for article in cat.getAllMembersGen(namespaces=[0]): print article.edit(appendtext="\n* {title} - {url} ({date}) \n".format(title=title, url=url, date=date), bot=True)
Mark wiki edits as bot edit
Mark wiki edits as bot edit
Python
mit
k4cg/Rezeptionistin
--- +++ @@ -23,4 +23,4 @@ date = now.strftime("%Y-%m-%d %H:%M") cat = category.Category(self.site, "Linklist") for article in cat.getAllMembersGen(namespaces=[0]): - print article.edit(appendtext="\n* {title} - {url} ({date}) \n".format(title=title, url=url, date=date)) + print article.edit(appendtext="\n* {title} - {url} ({date}) \n".format(title=title, url=url, date=date), bot=True)
6a9165a55d3238a40e368a348c6d7a7a7d133f34
dockci/api/base.py
dockci/api/base.py
""" Base classes and data for building the API """ from flask_restful import reqparse, Resource from .util import clean_attrs, set_attrs from dockci.server import DB AUTH_FORM_LOCATIONS = ('form', 'headers', 'json') class BaseDetailResource(Resource): """ Base resource for details API endpoints """ def handle_write(self, model, parser): # pylint:disable=no-self-use """ Parse request args, set attrs on the model, and commit """ args = parser.parse_args(strict=True) args = clean_attrs(args) set_attrs(model, args) DB.session.add(model) DB.session.commit() return model class BaseRequestParser(reqparse.RequestParser): """ Request parser that should be used for all DockCI API endpoints. Adds ``username``, ``password``, and ``api_key`` fields for login """ def __init__(self, *args, **kwargs): super(BaseRequestParser, self).__init__(*args, **kwargs) self.add_argument('username', location=AUTH_FORM_LOCATIONS) self.add_argument('password', location=AUTH_FORM_LOCATIONS) self.add_argument('api_key', location=('args',) + AUTH_FORM_LOCATIONS)
""" Base classes and data for building the API """ from flask_restful import reqparse, Resource from .util import clean_attrs, set_attrs from dockci.server import DB AUTH_FORM_LOCATIONS = ('form', 'headers', 'json') class BaseDetailResource(Resource): """ Base resource for details API endpoints """ # pylint:disable=no-self-use def handle_write(self, model, parser=None, data=None): """ Parse request args, set attrs on the model, and commit """ assert parser is not None or data is not None, ( "Must give either parser, or data") if data is None: args = parser.parse_args(strict=True) args = clean_attrs(args) else: args = data set_attrs(model, args) DB.session.add(model) DB.session.commit() return model class BaseRequestParser(reqparse.RequestParser): """ Request parser that should be used for all DockCI API endpoints. Adds ``username``, ``password``, and ``api_key`` fields for login """ def __init__(self, *args, **kwargs): super(BaseRequestParser, self).__init__(*args, **kwargs) self.add_argument('username', location=AUTH_FORM_LOCATIONS) self.add_argument('password', location=AUTH_FORM_LOCATIONS) self.add_argument('api_key', location=('args',) + AUTH_FORM_LOCATIONS)
Allow handle_write to accept data
Allow handle_write to accept data
Python
isc
RickyCook/DockCI,RickyCook/DockCI,sprucedev/DockCI,sprucedev/DockCI,sprucedev/DockCI-Agent,sprucedev/DockCI,RickyCook/DockCI,RickyCook/DockCI,sprucedev/DockCI-Agent,sprucedev/DockCI
--- +++ @@ -10,10 +10,18 @@ class BaseDetailResource(Resource): """ Base resource for details API endpoints """ - def handle_write(self, model, parser): # pylint:disable=no-self-use + # pylint:disable=no-self-use + def handle_write(self, model, parser=None, data=None): """ Parse request args, set attrs on the model, and commit """ - args = parser.parse_args(strict=True) - args = clean_attrs(args) + assert parser is not None or data is not None, ( + "Must give either parser, or data") + + if data is None: + args = parser.parse_args(strict=True) + args = clean_attrs(args) + else: + args = data + set_attrs(model, args) DB.session.add(model) DB.session.commit()
4ae0fccace6a3b2b640fd58c03fbd07341578acc
gen-android-icons.py
gen-android-icons.py
__author__ = 'Maksim Dmitriev' import argparse import os if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-s', '--source', help='the icon to be resized', required=True) parser.add_argument('-d', '--dest', help='the directory where resized icons are saved') parser.add_argument('-f', '--outfile', help='the output file names') args = parser.parse_args() source_image = args.source dest_dir = args.dest if dest_dir is None: os.makedirs(os.path.dirname(os.path.realpath(source_image)) + os.sep + 'out', exist_ok=True)
__author__ = 'Maksim Dmitriev' import argparse import os if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-s', '--source', help='the icon to be resized', required=True) parser.add_argument('-d', '--dest', help='the directory where resized icons are saved') parser.add_argument('-f', '--outfile', help='the output file names') args = parser.parse_args() source_image = args.source dest_dir = args.dest if dest_dir is None: dest_dir = os.path.join(os.path.dirname(os.path.realpath(source_image)), 'out') os.makedirs(dest_dir, exist_ok=True) dpi_dirs = ['drawable-mdpi', 'drawable-hdpi', 'drawable-xhdpi', 'drawable-xxhdpi'] for dpi_dir in dpi_dirs: os.makedirs(os.path.join(dest_dir, dpi_dir), exist_ok=True)
Use os.path.join instead of +
Use os.path.join instead of +
Python
bsd-3-clause
MaksimDmitriev/Python-Scripts
--- +++ @@ -13,5 +13,10 @@ source_image = args.source dest_dir = args.dest if dest_dir is None: - os.makedirs(os.path.dirname(os.path.realpath(source_image)) + os.sep + 'out', exist_ok=True) + dest_dir = os.path.join(os.path.dirname(os.path.realpath(source_image)), 'out') + os.makedirs(dest_dir, exist_ok=True) + dpi_dirs = ['drawable-mdpi', 'drawable-hdpi', 'drawable-xhdpi', 'drawable-xxhdpi'] + for dpi_dir in dpi_dirs: + os.makedirs(os.path.join(dest_dir, dpi_dir), exist_ok=True) +
77f22bd587ba2f6b1360bd5f8174e007b6028f9f
genes/debconf/set.py
genes/debconf/set.py
import os from subprocess import Popen, PIPE from genes.lib.traits import if_any from genes.ubuntu.traits import is_ubuntu from genes.debian.traits import is_debian # TODO: stop using sudo or ensure it exists # TODO: specify user to run as # TODO: utilize functools partial to handle some of the above functionality class Config: SET_SELECTIONS = ['debconf-set-selections'] ENV = os.environ.copy() ENV['DEBIAN_FRONTEND'] = "noninteractive" @if_any(is_debian(), is_ubuntu()) def set_selections(*selections): if selections: debconf = Popen(Config.SET_SELECTIONS, env=Config.ENV, stdin=PIPE) debconf.communicate(input=" ".join(selections)) # FIXME: capture errors above, report them else: # FIXME: add error pass
import os from subprocess import Popen, PIPE from genes.lib.traits import if_any from genes.ubuntu.traits import is_ubuntu from genes.debian.traits import is_debian # TODO: stop using sudo or ensure it exists # TODO: specify user to run as # TODO: utilize functools partial to handle some of the above functionality class Config: SET_SELECTIONS = ['debconf-set-selections'] ENV = os.environ.copy() ENV['DEBIAN_FRONTEND'] = "noninteractive" @if_any(is_debian, is_ubuntu) def set_selections(*selections): if selections: debconf = Popen(Config.SET_SELECTIONS, env=Config.ENV, stdin=PIPE) debconf.communicate(input=" ".join(selections)) # FIXME: capture errors above, report them else: # FIXME: add error pass
Change bools to functions in if_any
Change bools to functions in if_any
Python
mit
hatchery/genepool,hatchery/Genepool2
--- +++ @@ -14,7 +14,7 @@ ENV['DEBIAN_FRONTEND'] = "noninteractive" -@if_any(is_debian(), is_ubuntu()) +@if_any(is_debian, is_ubuntu) def set_selections(*selections): if selections: debconf = Popen(Config.SET_SELECTIONS, env=Config.ENV, stdin=PIPE)
25e5070a575de1ae7e20d6ede71297ab424cea87
bluegreen-example/app.py
bluegreen-example/app.py
import os from flask import Flask app = Flask(__name__) @app.route("/") def hello(): return "Hello 0-downtime %s World!" % os.environ.get('BLUEGREEN', 'bland')
import os from flask import Flask, send_from_directory app = Flask(__name__) @app.route("/") def hello(): return "Hello 0-downtime %s World!" % os.environ.get('BLUEGREEN', 'bland') @app.route("/parrots/<path:path>") def parrot(path): return send_from_directory(os.path.join('parrots', 'parrots'), path)
Add a route to send parrot images
Add a route to send parrot images
Python
mit
dbravender/gitric
--- +++ @@ -1,9 +1,14 @@ import os -from flask import Flask +from flask import Flask, send_from_directory app = Flask(__name__) @app.route("/") def hello(): return "Hello 0-downtime %s World!" % os.environ.get('BLUEGREEN', 'bland') + + +@app.route("/parrots/<path:path>") +def parrot(path): + return send_from_directory(os.path.join('parrots', 'parrots'), path)
09b6f3e5cde6aeff69ea15fdc15c7db300ce3272
python/web_socket.py
python/web_socket.py
#!/bin/python try: import urllib.request as urlrequest except ImportError: import urllib as urlrequest import json class RESTfulApi: """ Generic REST API call """ def __init__(self): """ Constructor """ pass def request(self, url): """ Web request :param: url: The url link :return JSON object """ req = urlrequest.Request(url, None, headers={ "Content-Type": "application/x-www-form-urlencoded", "Accept": "*/*", "User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"}) res = urlrequest.urlopen(req) res = json.loads(res.read().decode('utf8')) return res
#!/bin/python try: import urllib.request as urlrequest except ImportError: import urllib as urlrequest import json class RESTfulApi: """ Generic REST API call """ def __init__(self): """ Constructor """ pass def request(self, url): """ Web request :param: url: The url link :return JSON object """ res = urlrequest.urlopen(url) res = json.loads(res.read().decode('utf8')) return res
Support python 2 and 3 compatability
Support python 2 and 3 compatability
Python
apache-2.0
Aurora-Team/BitcoinExchangeFH
--- +++ @@ -24,11 +24,7 @@ :param: url: The url link :return JSON object """ - req = urlrequest.Request(url, None, headers={ - "Content-Type": "application/x-www-form-urlencoded", - "Accept": "*/*", - "User-Agent": "curl/7.24.0 (x86_64-apple-darwin12.0)"}) - res = urlrequest.urlopen(req) + res = urlrequest.urlopen(url) res = json.loads(res.read().decode('utf8')) return res
97a1c4979f8c46833b4ac89f6920138551ed2ee6
pyuvdata/__init__.py
pyuvdata/__init__.py
# -*- mode: python; coding: utf-8 -*- # Copyright (c) 2018 Radio Astronomy Software Group # Licensed under the 2-clause BSD License """init file for pyuvdata. """ from __future__ import absolute_import, division, print_function from .uvdata import * from .telescopes import * from .uvcal import * from .uvbeam import * from .utils import * from . import version __version__ = version.version
# -*- mode: python; coding: utf-8 -*- # Copyright (c) 2018 Radio Astronomy Software Group # Licensed under the 2-clause BSD License """init file for pyuvdata. """ from __future__ import absolute_import, division, print_function from .uvdata import * from .telescopes import * from .uvcal import * from .uvbeam import * from .utils import * # consider removing this import from . import version __version__ = version.version
Add comment about utils import
Add comment about utils import
Python
bsd-2-clause
HERA-Team/pyuvdata,HERA-Team/pyuvdata,HERA-Team/pyuvdata,HERA-Team/pyuvdata
--- +++ @@ -11,7 +11,7 @@ from .telescopes import * from .uvcal import * from .uvbeam import * -from .utils import * +from .utils import * # consider removing this import from . import version __version__ = version.version
5f7622824ac997228f83a894b451211fac4838ed
core/data/DataTransformer.py
core/data/DataTransformer.py
""" DataTransformer :Authors: Berend Klein Haneveld """ from vtk import vtkImageReslice class DataTransformer(object): """DataTransformer is a class that can transform a given dataset""" def __init__(self): super(DataTransformer, self).__init__() def TransformImageData(self, imageData, transform): """ :type imageData: vtkImageData :type transform: vtkTransform """ reslicer = vtkImageReslice() reslicer.SetInterpolationModeToCubic() # reslicer.SetAutoCropOutput(1) # Not sure if this is what we want reslicer.SetInputData(imageData) reslicer.SetResliceTransform(transform.GetInverse()) reslicer.Update() return reslicer.GetOutput()
""" DataTransformer :Authors: Berend Klein Haneveld """ from vtk import vtkImageReslice class DataTransformer(object): """DataTransformer is a class that can transform a given dataset""" def __init__(self): super(DataTransformer, self).__init__() def TransformImageData(self, imageData, transform): """ :type imageData: vtkImageData :type transform: vtkTransform """ reslicer = vtkImageReslice() reslicer.SetInterpolationModeToCubic() range = imageData.GetScalarRange() reslicer.SetBackgroundLevel(range[0]) # reslicer.SetAutoCropOutput(1) # Not sure if this is what we want reslicer.SetInputData(imageData) reslicer.SetResliceTransform(transform.GetInverse()) reslicer.Update() return reslicer.GetOutput()
Set the background color of the reslicer to the lowest value in the image data.
Set the background color of the reslicer to the lowest value in the image data.
Python
mit
berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop
--- +++ @@ -20,6 +20,8 @@ """ reslicer = vtkImageReslice() reslicer.SetInterpolationModeToCubic() + range = imageData.GetScalarRange() + reslicer.SetBackgroundLevel(range[0]) # reslicer.SetAutoCropOutput(1) # Not sure if this is what we want reslicer.SetInputData(imageData)
32b00e79b9204477e94adc47fae0e3a0838dc328
reviewboard/hostingsvcs/versionone.py
reviewboard/hostingsvcs/versionone.py
from django import forms from django.utils.translation import ugettext_lazy as _ from reviewboard.hostingsvcs.forms import HostingServiceForm from reviewboard.hostingsvcs.service import HostingService class VersionOneForm(HostingServiceForm): versionone_url = forms.CharField( label=_('VersionOne URL'), max_length=64, required=True, widget=forms.TextInput(attrs={'size': '60'})) class VersionOne(HostingService): name = 'versionone' form = VersionOneForm bug_tracker_field = '%(versionone_url)s/assetdetail.v1?Number=%%s' supports_bug_trackers = True
from django import forms from django.utils.translation import ugettext_lazy as _ from reviewboard.hostingsvcs.forms import HostingServiceForm from reviewboard.hostingsvcs.service import HostingService class VersionOneForm(HostingServiceForm): versionone_url = forms.CharField( label=_('VersionOne URL'), max_length=64, required=True, widget=forms.TextInput(attrs={'size': '60'})) class VersionOne(HostingService): name = 'VersionOne' form = VersionOneForm bug_tracker_field = '%(versionone_url)s/assetdetail.v1?Number=%%s' supports_bug_trackers = True
Clean up the name and spacing in the VersionOne code.
Clean up the name and spacing in the VersionOne code. The VersionOne code had some spacing and trailing whitespace problems, and also listed the service as "versionone" and not "VersionOne".
Python
mit
1tush/reviewboard,custode/reviewboard,1tush/reviewboard,1tush/reviewboard,chipx86/reviewboard,chipx86/reviewboard,KnowNo/reviewboard,KnowNo/reviewboard,brennie/reviewboard,davidt/reviewboard,reviewboard/reviewboard,sgallagher/reviewboard,beol/reviewboard,sgallagher/reviewboard,davidt/reviewboard,beol/reviewboard,bkochendorfer/reviewboard,brennie/reviewboard,reviewboard/reviewboard,1tush/reviewboard,1tush/reviewboard,brennie/reviewboard,reviewboard/reviewboard,sgallagher/reviewboard,custode/reviewboard,1tush/reviewboard,KnowNo/reviewboard,reviewboard/reviewboard,sgallagher/reviewboard,bkochendorfer/reviewboard,brennie/reviewboard,beol/reviewboard,custode/reviewboard,chipx86/reviewboard,bkochendorfer/reviewboard,custode/reviewboard,davidt/reviewboard,chipx86/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard,davidt/reviewboard,beol/reviewboard,1tush/reviewboard,KnowNo/reviewboard,1tush/reviewboard
--- +++ @@ -10,10 +10,11 @@ label=_('VersionOne URL'), max_length=64, required=True, - widget=forms.TextInput(attrs={'size': '60'})) + widget=forms.TextInput(attrs={'size': '60'})) + class VersionOne(HostingService): - name = 'versionone' + name = 'VersionOne' form = VersionOneForm bug_tracker_field = '%(versionone_url)s/assetdetail.v1?Number=%%s' - supports_bug_trackers = True + supports_bug_trackers = True
73a9889f0e43d2b1dc94e2235a94cb888e0eda89
zeus/utils/sentry.py
zeus/utils/sentry.py
from functools import wraps from sentry_sdk import Hub def span(op, desc_or_func=None): def inner(func): @wraps(func) def wrapped(*args, **kwargs): if callable(desc_or_func): description = desc_or_func(*args, **kwargs) else: description = desc_or_func with Hub.current.start_span(op=op, description=description): return func(*args, **kwargs) return wrapped return inner
import asyncio from contextlib import contextmanager from functools import wraps from sentry_sdk import Hub # https://stackoverflow.com/questions/44169998/how-to-create-a-python-decorator-that-can-wrap-either-coroutine-or-function def span(op, desc_or_func=None): def inner(func): @contextmanager def wrap_with_span(args, kwargs): if callable(desc_or_func): description = desc_or_func(*args, **kwargs) else: description = desc_or_func with Hub.current.start_span(op=op, description=description): yield @wraps(func) def wrapper(*args, **kwargs): if not asyncio.iscoroutinefunction(func): with wrap_with_span(args, kwargs): return func(*args, **kwargs) else: async def tmp(): with wrap_with_span(args, kwargs): return await func(*args, **kwargs) return tmp() return wrapper return inner
Fix span decorator to work with asyncio
Fix span decorator to work with asyncio
Python
apache-2.0
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
--- +++ @@ -1,18 +1,35 @@ +import asyncio + +from contextlib import contextmanager from functools import wraps from sentry_sdk import Hub +# https://stackoverflow.com/questions/44169998/how-to-create-a-python-decorator-that-can-wrap-either-coroutine-or-function def span(op, desc_or_func=None): def inner(func): - @wraps(func) - def wrapped(*args, **kwargs): + @contextmanager + def wrap_with_span(args, kwargs): if callable(desc_or_func): description = desc_or_func(*args, **kwargs) else: description = desc_or_func with Hub.current.start_span(op=op, description=description): - return func(*args, **kwargs) + yield - return wrapped + @wraps(func) + def wrapper(*args, **kwargs): + if not asyncio.iscoroutinefunction(func): + with wrap_with_span(args, kwargs): + return func(*args, **kwargs) + else: + + async def tmp(): + with wrap_with_span(args, kwargs): + return await func(*args, **kwargs) + + return tmp() + + return wrapper return inner
3304b539e0f4105b3ad2603b0676b25d7c96b606
getconf/oldconf.py
getconf/oldconf.py
from oslo_config import cfg from oslo_config import generator as gn __all__ = ['get_conf'] def get_conf(conf_file=None, config_file=None): conf_file = '/opt/stack/barbican/etc/oslo-config-generator/barbican.conf' config_file = '/etc/barbican/barbican.conf' conf = cfg.ConfigOpts() gn.register_cli_opts(conf) oslo_args = ['--config-file', conf_file] conf(oslo_args) groups = gn._get_groups(gn._list_opts(conf.namespace)) # Make new CONF new_conf = cfg.ConfigOpts() for k, v in groups.items(): group = cfg.OptGroup(k) namespaces = v.get('namespaces', []) list_opts = [] for namespace in namespaces: list_opts.extend(namespace[1]) new_conf.register_group(group) new_conf.register_opts(list_opts, group=group) nova_args = ['--config-file', config_file] new_conf(nova_args) return new_conf if __name__ == '__main__': get_conf()
from oslo_config import cfg from oslo_config import generator as gn __all__ = ['get_conf'] def get_conf(conf_file=None, config_file=None): conf_file = '/opt/stack/barbican/etc/oslo-config-generator/barbican.conf' config_file = '/etc/barbican/barbican.conf' conf = cfg.ConfigOpts() gn.register_cli_opts(conf) oslo_args = ['--config-file', conf_file] conf(oslo_args) groups = gn._get_groups(gn._list_opts(conf.namespace)) # Make new CONF new_conf = cfg.ConfigOpts() for k, v in groups.items(): group = cfg.OptGroup(k) namespaces = v.get('namespaces', []) list_opts = [] for namespace in namespaces: list_opts.extend(namespace[1]) new_conf.register_group(group) if k == 'DEFAULT': new_conf.register_opts(list_opts) new_conf.register_opts(list_opts, group=group) nova_args = ['--config-file', config_file] new_conf(nova_args) return new_conf def get_ne_default(conf=None): ne_dict = [] if isinstance(conf, cfg.ConfigOpts): for name, group in conf._groups.items(): for option, opt in group._opts.items(): if conf[name][option] != opt['opt'].default: ne_dict.append((name, option)) return ne_dict if __name__ == '__main__': conf = get_conf() get_ne_default(conf)
Add get not equal default options
Add get not equal default options
Python
apache-2.0
NguyenHoaiNam/Jump-Over-Release
--- +++ @@ -23,6 +23,8 @@ for namespace in namespaces: list_opts.extend(namespace[1]) new_conf.register_group(group) + if k == 'DEFAULT': + new_conf.register_opts(list_opts) new_conf.register_opts(list_opts, group=group) nova_args = ['--config-file', config_file] @@ -30,5 +32,16 @@ return new_conf +def get_ne_default(conf=None): + ne_dict = [] + if isinstance(conf, cfg.ConfigOpts): + for name, group in conf._groups.items(): + for option, opt in group._opts.items(): + if conf[name][option] != opt['opt'].default: + ne_dict.append((name, option)) + return ne_dict + + if __name__ == '__main__': - get_conf() + conf = get_conf() + get_ne_default(conf)
4f5d0e6d16835a10a36cc57fc7e18cee91da5e2b
main.py
main.py
from flask import Flask app = Flask(__name__) app.config['DEBUG'] = True # Note: We don't need to call run() since our application is embedded within # the App Engine WSGI application server. @app.route('/') def hello(): """Return a friendly HTTP greeting.""" return 'Hello World! 3' @app.errorhandler(404) def page_not_found(e): """Return a custom 404 error.""" return 'Sorry, nothing at this URL.', 404
from flask import Flask app = Flask(__name__) app.config['DEBUG'] = True # Note: We don't need to call run() since our application is embedded within # the App Engine WSGI application server. @app.route('/') #def hello(): # """Return a friendly HTTP greeting.""" def index(): return """ Upload Image <br> <form method="POST" action="/upload" enctype="multipart/form-data"> <input type="file" name="file"> <input type="submit"> </form> """ @app.route('/upload', methods=['POST']) def upload(): """Process the uploaded file and upload it to Google Cloud Storage.""" uploaded_file = request.files.get('file') if not uploaded_file: return 'No file uploaded.', 400 # Create a Cloud Storage client. gcs = storage.Client() # Get the bucket that the file will be uploaded to. bucket = gcs.get_bucket(CLOUD_STORAGE_BUCKET) # Create a new blob and upload the file's content. blob = bucket.blob(uploaded_file.filename) blob.upload_from_string( uploaded_file.read(), content_type=uploaded_file.content_type ) # The public URL can be used to directly access the uploaded file via HTTP. return blob.public_url @app.errorhandler(500) def server_error(e): logging.exception('An error occurred during a request.') return """ An internal error occurred: <pre>{}</pre> See logs for full stacktrace. """.format(e), 500 @app.errorhandler(404) def page_not_found(e): """Return a custom 404 error.""" return 'Sorry, nothing at this URL.', 404
Insert Upload function & save Google storage.
Insert Upload function & save Google storage.
Python
apache-2.0
DongjinChoi/tizen_easy_widget,DongjinChoi/tizen_easy_widget
--- +++ @@ -7,10 +7,49 @@ @app.route('/') -def hello(): - """Return a friendly HTTP greeting.""" - return 'Hello World! 3' +#def hello(): +# """Return a friendly HTTP greeting.""" +def index(): + return """ + Upload Image <br> +<form method="POST" action="/upload" enctype="multipart/form-data"> + <input type="file" name="file"> + <input type="submit"> +</form> +""" +@app.route('/upload', methods=['POST']) +def upload(): + """Process the uploaded file and upload it to Google Cloud Storage.""" + uploaded_file = request.files.get('file') + + if not uploaded_file: + return 'No file uploaded.', 400 + + # Create a Cloud Storage client. + gcs = storage.Client() + + # Get the bucket that the file will be uploaded to. + bucket = gcs.get_bucket(CLOUD_STORAGE_BUCKET) + + # Create a new blob and upload the file's content. + blob = bucket.blob(uploaded_file.filename) + + blob.upload_from_string( + uploaded_file.read(), + content_type=uploaded_file.content_type + ) + + # The public URL can be used to directly access the uploaded file via HTTP. + return blob.public_url + +@app.errorhandler(500) +def server_error(e): + logging.exception('An error occurred during a request.') + return """ + An internal error occurred: <pre>{}</pre> + See logs for full stacktrace. + """.format(e), 500 @app.errorhandler(404) def page_not_found(e):
031bce223eac9eda1f856a204a07149c8e9549fd
hoomd/update/__init__.py
hoomd/update/__init__.py
from hoomd.update.box_resize import BoxResize # TODO remove when no longer necessary class _updater: pass __all__ = [BoxResize]
from hoomd.update.box_resize import BoxResize # TODO remove when no longer necessary class _updater: pass __all__ = ['BoxResize']
Fix typo in hoomd.update.__all__ quote class name
Fix typo in hoomd.update.__all__ quote class name
Python
bsd-3-clause
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
--- +++ @@ -6,4 +6,4 @@ pass -__all__ = [BoxResize] +__all__ = ['BoxResize']
f5c19e5814763235f8abebba2239f64135dc3188
python/shapes2json.py
python/shapes2json.py
import re import numpy as np infile = "shapes.txt" filt = re.compile(r'^"?([^"]*)"?$') converter = lambda x: filt.match(x.strip()).group(1) data = np.recfromcsv(infile, delimiter=',') shapes = np.array(map(int, [converter(x) for x in data["shape_id"]])) lats = np.array(map(float, [converter(x) for x in data["shape_pt_lat"]])) lons = np.array(map(float, [converter(x) for x in data["shape_pt_lon"]])) idx = np.where(shapes == 10003553) print "{%s:" % (10003553), for la,lon in zip(lats[idx], lons[idx]): print "[%f,%f]," % (la,lon), print "}" #import matplotlib.pyplot as plt #idx = np.where(shapes == 10003553) #plt.plot(lats[idx], lons[idx]) #plt.show()
import re, sys import numpy as np infile = sys.argv[1] filt = re.compile(r'^"?([^"]*)"?$') converter = lambda x: filt.match(x.strip()).group(1) data = np.recfromcsv(infile, delimiter=',') shapes = np.array(map(int, [converter(x) for x in data["shape_id"]])) lats = np.array(map(float, [converter(x) for x in data["shape_pt_lat"]])) lons = np.array(map(float, [converter(x) for x in data["shape_pt_lon"]])) idx = np.where(shapes == 41545540)[0] if 0: print '{"%s":' % (41545540), for la,lon in zip(lats[idx], lons[idx]): print "[%f,%f]," % (la,lon), print "}" else: print "[" for i in idx: print "{" print ' name: "pt%d",' % (i) print ' lng: %f,' % (lons[i]) print ' lat: %f,' % (lats[i]) print ' }, ', print "]"
Print out an array of points
Print out an array of points
Python
mit
acbecker/kcmetrod3
--- +++ @@ -1,18 +1,24 @@ -import re +import re, sys import numpy as np -infile = "shapes.txt" +infile = sys.argv[1] filt = re.compile(r'^"?([^"]*)"?$') converter = lambda x: filt.match(x.strip()).group(1) data = np.recfromcsv(infile, delimiter=',') shapes = np.array(map(int, [converter(x) for x in data["shape_id"]])) lats = np.array(map(float, [converter(x) for x in data["shape_pt_lat"]])) lons = np.array(map(float, [converter(x) for x in data["shape_pt_lon"]])) -idx = np.where(shapes == 10003553) -print "{%s:" % (10003553), -for la,lon in zip(lats[idx], lons[idx]): print "[%f,%f]," % (la,lon), -print "}" +idx = np.where(shapes == 41545540)[0] -#import matplotlib.pyplot as plt -#idx = np.where(shapes == 10003553) -#plt.plot(lats[idx], lons[idx]) -#plt.show() +if 0: + print '{"%s":' % (41545540), + for la,lon in zip(lats[idx], lons[idx]): print "[%f,%f]," % (la,lon), + print "}" +else: + print "[" + for i in idx: + print "{" + print ' name: "pt%d",' % (i) + print ' lng: %f,' % (lons[i]) + print ' lat: %f,' % (lats[i]) + print ' }, ', + print "]"
e2924f23c48a6e39b7c6e24ac19f73bd10181167
say/say.py
say/say.py
# File: say.py # Purpose: Write a program that will take a number from 0 to 999,999,999,999 and spell out that number in English. # Programmer: Amal Shehu # Course: Exercism # Date: Wednesday 8th September 2016, 10:00 PM
# File: say.py # Purpose: 0 to 999,999,999,999 and spell out that number in English. # Programmer: Amal Shehu # Course: Exercism # Date: Wednesday 8th September 2016, 10:00 PM # #### Step 1 def say(num): num_dict = {0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five', 6: 'six', 7: 'seven', 8: 'eight', 9: 'nine', 10: 'ten', 11: 'eleven', 12: 'twelve', 13: 'thirteen', 14: 'fourteen', 15: 'fifteen', 16: 'sixteen', 17: 'seventeen', 18: 'eighteen', 19: 'ninteen', 20: 'twenty', 30: 'thirty', 40: 'forty', 50: 'fifty', 60: 'sixty', 70: 'seventy', 80: 'eighty', 90: 'ninty'} if (num < 20): return num_dict[num] if (num < 100): if num % 10 == 0: return num_dict[num] else: return num_dict[num // 10 * 10] + '-' + num_dict[num % 10]
Complete step 1 from problem Readme
Complete step 1 from problem Readme
Python
mit
amalshehu/exercism-python
--- +++ @@ -1,5 +1,26 @@ # File: say.py -# Purpose: Write a program that will take a number from 0 to 999,999,999,999 and spell out that number in English. +# Purpose: 0 to 999,999,999,999 and spell out that number in English. # Programmer: Amal Shehu # Course: Exercism # Date: Wednesday 8th September 2016, 10:00 PM + +# #### Step 1 + + +def say(num): + num_dict = {0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', + 5: 'five', 6: 'six', 7: 'seven', 8: 'eight', 9: 'nine', 10: 'ten', + 11: 'eleven', 12: 'twelve', 13: 'thirteen', 14: 'fourteen', + 15: 'fifteen', 16: 'sixteen', 17: 'seventeen', 18: 'eighteen', + 19: 'ninteen', 20: 'twenty', 30: 'thirty', 40: 'forty', + 50: 'fifty', 60: 'sixty', 70: 'seventy', 80: 'eighty', + 90: 'ninty'} + + if (num < 20): + return num_dict[num] + + if (num < 100): + if num % 10 == 0: + return num_dict[num] + else: + return num_dict[num // 10 * 10] + '-' + num_dict[num % 10]
80dcfaa7216ed0be88eb275316d135a1089e4dbe
simplesqlite/_error.py
simplesqlite/_error.py
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ import sqlite3 class DatabaseError(sqlite3.DatabaseError): """ Exception raised for errors that are related to the database. .. seealso:: - `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__ """ class NullDatabaseConnectionError(Exception): """ Exception raised when executing an operation of :py:class:`~simplesqlite.SimpleSQLite` instance without connection to a SQLite database file. """ class TableNotFoundError(Exception): """ Exception raised when accessed the table that not exists in the database. """ class AttributeNotFoundError(Exception): """ Exception raised when accessed the attribute that not exists in the table. """ class InvalidTableNameError(ValueError): """ Exception raised when used invalid table name for SQLite. """ class InvalidAttributeNameError(ValueError): """ Exception raised when used invalid attribute name for SQLite. """ class SqlSyntaxError(Exception): """ Exception raised when a SQLite query syntax is invalid. """ class OperationalError(sqlite3.OperationalError): """ Exception raised when failed to execute a query. """ @property def message(self): return self.__message def __init__(self, *args, **kwargs): self.__message = kwargs.pop("message", None) super(OperationalError, self).__init__(*args, **kwargs)
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ import sqlite3 from tabledata import InvalidTableNameError class DatabaseError(sqlite3.DatabaseError): """ Exception raised for errors that are related to the database. .. seealso:: - `sqlite3.DatabaseError <https://docs.python.org/3/library/sqlite3.html#sqlite3.DatabaseError>`__ """ class NullDatabaseConnectionError(Exception): """ Exception raised when executing an operation of :py:class:`~simplesqlite.SimpleSQLite` instance without connection to a SQLite database file. """ class TableNotFoundError(Exception): """ Exception raised when accessed the table that not exists in the database. """ class AttributeNotFoundError(Exception): """ Exception raised when accessed the attribute that not exists in the table. """ class InvalidAttributeNameError(ValueError): """ Exception raised when used invalid attribute name for SQLite. """ class SqlSyntaxError(Exception): """ Exception raised when a SQLite query syntax is invalid. """ class OperationalError(sqlite3.OperationalError): """ Exception raised when failed to execute a query. """ @property def message(self): return self.__message def __init__(self, *args, **kwargs): self.__message = kwargs.pop("message", None) super(OperationalError, self).__init__(*args, **kwargs)
Remove a duplicate error class definition
Remove a duplicate error class definition
Python
mit
thombashi/SimpleSQLite,thombashi/SimpleSQLite
--- +++ @@ -5,6 +5,8 @@ """ import sqlite3 + +from tabledata import InvalidTableNameError class DatabaseError(sqlite3.DatabaseError): @@ -36,12 +38,6 @@ """ -class InvalidTableNameError(ValueError): - """ - Exception raised when used invalid table name for SQLite. - """ - - class InvalidAttributeNameError(ValueError): """ Exception raised when used invalid attribute name for SQLite.
79ef50191123c6e8a2e5e47efb8b15239a7acc5c
readux/__init__.py
readux/__init__.py
from django.conf import settings __version_info__ = (1, 2, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__, # Alternate names for social-auth backends, # to be used for display and font-awesome icon (lowercased) # If not entered here, backend name will be used as-is for # icon and title-cased for display (i.e., twitter / Twitter). 'backend_names': { 'github': 'GitHub', 'google-oauth2': 'Google', }, 'ANNOTATOR_STORE_URI': settings.ANNOTATOR_STORE_URI }
from django.conf import settings __version_info__ = (1, 2, 0, 'dev') # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],)) # context processor to add version to the template environment def context_extras(request): return { # software version 'SW_VERSION': __version__, # Alternate names for social-auth backends, # to be used for display and font-awesome icon (lowercased) # If not entered here, backend name will be used as-is for # icon and title-cased for display (i.e., twitter / Twitter). 'backend_names': { 'github': 'GitHub', 'google-oauth2': 'Google', }, }
Drop configured annotator store uri
Drop configured annotator store uri
Python
apache-2.0
emory-libraries/readux,emory-libraries/readux,emory-libraries/readux
--- +++ @@ -22,6 +22,5 @@ 'github': 'GitHub', 'google-oauth2': 'Google', }, - 'ANNOTATOR_STORE_URI': settings.ANNOTATOR_STORE_URI }
45f33dcf98b7b20fbedf3e05ca5c575ce3cbcbb3
scripts/generate-invoice.py
scripts/generate-invoice.py
#!/usr/bin/env python3 import argparse import yaml import jinja2 import weasyprint parser = argparse.ArgumentParser() parser.add_argument('--data', help='path to data directory', required=True) parser.add_argument('--number', help='Invoice number', type=int, required=True) args = parser.parse_args() data_directory = str(args.data) invoice_number = str(args.number) supplier_file = open(data_directory + 'data/supplier.yaml') supplier_data = yaml.safe_load(supplier_file.read()) supplier_file.close() invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml') invoice_data = yaml.safe_load(invoice_file.read()) invoice_file.close() # TODO: Validation # TODO: Sum of invoice items equals total # TODO: Invoice number matches filename client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml') client_data = yaml.safe_load(client_file.read()) client_file.close() template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/')) template = template_environment.get_template('invoice.html') html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data) weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
#!/usr/bin/env python3 import argparse import yaml import jinja2 import weasyprint parser = argparse.ArgumentParser() parser.add_argument('--data', help='path to data directory', required=True) parser.add_argument('--number', help='Invoice number', type=int, required=True) args = parser.parse_args() data_directory = str(args.data) invoice_number = str(args.number) supplier_file = open(data_directory + 'data/supplier.yaml') supplier_data = yaml.safe_load(supplier_file.read()) supplier_file.close() invoice_file = open(data_directory + 'data/invoices/' + invoice_number + '.yaml') invoice_data = yaml.safe_load(invoice_file.read()) invoice_file.close() client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml') client_data = yaml.safe_load(client_file.read()) client_file.close() template_environment = jinja2.Environment(loader = jinja2.FileSystemLoader('../templates/')) template = template_environment.get_template('invoice.html') html_data = template.render(supplier = supplier_data, invoice = invoice_data, client = client_data) weasyprint.HTML(string = html_data).write_pdf(data_directory + 'output/invoices/' + invoice_number + '.pdf')
Move TODO items to GitHub issues
Move TODO items to GitHub issues
Python
mit
pwaring/125-accounts,pwaring/125-accounts
--- +++ @@ -21,10 +21,6 @@ invoice_data = yaml.safe_load(invoice_file.read()) invoice_file.close() -# TODO: Validation -# TODO: Sum of invoice items equals total -# TODO: Invoice number matches filename - client_file = open(data_directory + 'data/clients/' + invoice_data['client'] + '.yaml') client_data = yaml.safe_load(client_file.read()) client_file.close()
ad72c990051865d6c40cc0a0b3dbd1b514c5aa17
plumbium/analysis_file.py
plumbium/analysis_file.py
from checksum import file_sha1sum class AnalysisFile(object): def __init__(self, filename): self.filename = filename def checksum(self): return file_sha1sum(self.filename)
from utils import file_sha1sum class AnalysisFile(object): def __init__(self, filename): self.filename = filename def checksum(self): return file_sha1sum(self.filename)
Fix defunct import of checksum module
Fix defunct import of checksum module
Python
mit
jstutters/Plumbium
--- +++ @@ -1,4 +1,4 @@ -from checksum import file_sha1sum +from utils import file_sha1sum class AnalysisFile(object):
016c9e715350aa4a87a24cdb9d7e7ba0340589aa
login_token/models.py
login_token/models.py
import random import re from django.contrib.auth.models import User from django.db import models from instances.models import InstanceMixin NUMBER_OF_TOKEN_WORDS = 3 def generate_token(): def useful_word(w): # FIXME: should try to exclude offensive words if len(w) < 4: return False if re.search('^[a-z]*$', w): return True words = [] with open('/usr/share/dict/words') as fp: for line in fp: word = line.strip() if useful_word(word): words.append(word) return " ".join(random.choice(words) for i in range(NUMBER_OF_TOKEN_WORDS)) class LoginToken(InstanceMixin, models.Model): '''Represents a readable login token for mobile devices To enable logging in to a SayIt instance as a particular user, we ask the user to type in a three word phrase; this model records tokens that allow login for a particular instance by a particular user.''' user = models.ForeignKey(User) token = models.TextField(max_length=255)
import random import re from django.contrib.auth.models import User from django.db import models from instances.models import InstanceMixin NUMBER_OF_TOKEN_WORDS = 3 def generate_token(): def useful_word(w): # FIXME: should try to exclude offensive words if len(w) < 4: return False if re.search('^[a-z]*$', w): return True words = [] with open('/usr/share/dict/words') as fp: for line in fp: word = line.strip() if useful_word(word): words.append(word) return " ".join(random.choice(words) for i in range(NUMBER_OF_TOKEN_WORDS)) class LoginToken(InstanceMixin, models.Model): '''Represents a readable login token for mobile devices To enable logging in to a SayIt instance as a particular user, we ask the user to type in a three word phrase; this model records tokens that allow login for a particular instance by a particular user.''' user = models.ForeignKey(User) token = models.TextField(max_length=255, default=generate_token)
Make generate_token the default token value
Make generate_token the default token value
Python
agpl-3.0
opencorato/sayit,opencorato/sayit,opencorato/sayit,opencorato/sayit
--- +++ @@ -34,4 +34,5 @@ user.''' user = models.ForeignKey(User) - token = models.TextField(max_length=255) + token = models.TextField(max_length=255, + default=generate_token)
68c048f625aa0158f0117a737d94000da193df40
astral/models/event.py
astral/models/event.py
import Queue from elixir import Field, Unicode, Entity from astral.models.base import BaseEntityMixin import logging log = logging.getLogger(__name__) EVENT_QUEUE = Queue.Queue() class Event(BaseEntityMixin, Entity): message = Field(Unicode(96)) def __init__(self, *args, **kwargs): super(Event, self).__init__(*args, **kwargs) EVENT_QUEUE.put(self) def __repr__(self): return u'<Event %s>' % self.message
import Queue from elixir import Field, Unicode, Entity from astral.models.base import BaseEntityMixin import logging log = logging.getLogger(__name__) EVENT_QUEUE = Queue.Queue() class Event(BaseEntityMixin, Entity): message = Field(Unicode(96)) def __init__(self, *args, **kwargs): kwargs['message'] = unicode(kwargs['message']) super(Event, self).__init__(*args, **kwargs) EVENT_QUEUE.put(self) def __repr__(self): return u'<Event %s>' % self.message
Convert JSON to unicode before saving to clear up a SQLAlchemy warning.
Convert JSON to unicode before saving to clear up a SQLAlchemy warning.
Python
mit
peplin/astral
--- +++ @@ -14,6 +14,7 @@ message = Field(Unicode(96)) def __init__(self, *args, **kwargs): + kwargs['message'] = unicode(kwargs['message']) super(Event, self).__init__(*args, **kwargs) EVENT_QUEUE.put(self)
117c5f19df731dd56901023e4ed29db5741617b2
tests/TestModel.py
tests/TestModel.py
#!/usr/bin/env python import unittest from PythonTemplate import Model class TestModel(unittest.TestCase): def setUp(self, use_auth=False): # TODO pass def test_do_thing(self): # do something Model.doThing() # evaluate something # TODO
#!/usr/bin/env python import unittest from PythonTemplate import Model class TestModel(unittest.TestCase): def setUp(self): # TODO pass def test_do_thing(self): # do something Model.doThing() # evaluate something # TODO
Remove hold over from copy n paste
Remove hold over from copy n paste
Python
apache-2.0
joshvillbrandt/PythonTemplate
--- +++ @@ -5,7 +5,7 @@ class TestModel(unittest.TestCase): - def setUp(self, use_auth=False): + def setUp(self): # TODO pass
f6c2f222db0f529d3f5906d5de7a7541e835ea77
litecord/api/guilds.py
litecord/api/guilds.py
''' guilds.py - All handlers under /guilds/* ''' import json import logging from ..utils import _err, _json, strip_user_data log = logging.getLogger(__name__) class GuildsEndpoint: def __init__(self, server): self.server = server async def h_post_guilds(self, request): pass
''' guilds.py - All handlers under /guilds/* ''' import json import logging from ..utils import _err, _json, strip_user_data log = logging.getLogger(__name__) class GuildsEndpoint: def __init__(self, server): self.server = server async def h_post_guilds(self, request): pass async def h_guilds(self, request): ''' GuildsEndpoint.h_guilds Handle `GET /guilds/{guild_id}` ''' _error = await self.server.check_request(request) _error_json = json.loads(_error.text) if _error_json['code'] == 0: return _error guild_id = request.match_info['guild_id'] guild = self.server.guild_man.get_guild(guild_id) if guild is None: return _err('404: Not Found') return _json(guild.as_json) async def h_get_guild_channels(self, request): ''' GuildsEndpoint.h_get_guild_channels `GET /guilds/{guild_id}/channels` ''' _error = await self.server.check_request(request) _error_json = json.loads(_error.text) if _error_json['code'] == 0: return _error guild_id = request.match_info['guild_id'] return _json('Not Implemented')
Add some dummy routes in GuildsEndpoint
Add some dummy routes in GuildsEndpoint
Python
mit
nullpixel/litecord,nullpixel/litecord
--- +++ @@ -14,3 +14,37 @@ async def h_post_guilds(self, request): pass + + async def h_guilds(self, request): + ''' + GuildsEndpoint.h_guilds + + Handle `GET /guilds/{guild_id}` + ''' + _error = await self.server.check_request(request) + _error_json = json.loads(_error.text) + if _error_json['code'] == 0: + return _error + + guild_id = request.match_info['guild_id'] + + guild = self.server.guild_man.get_guild(guild_id) + if guild is None: + return _err('404: Not Found') + + return _json(guild.as_json) + + async def h_get_guild_channels(self, request): + ''' + GuildsEndpoint.h_get_guild_channels + + `GET /guilds/{guild_id}/channels` + ''' + _error = await self.server.check_request(request) + _error_json = json.loads(_error.text) + if _error_json['code'] == 0: + return _error + + guild_id = request.match_info['guild_id'] + + return _json('Not Implemented')
e4e4db6c08612ba5b08224040d191eb83d27bf0d
scripts/prob_bedpe_to_bedgraph.py
scripts/prob_bedpe_to_bedgraph.py
#!/usr/bin/env python import sys import numpy as np from optparse import OptionParser parser = OptionParser() parser.add_option("-b", "--bedpe_file", dest="bedpe_file", help="BEDPE file") parser.add_option("-n", "--name", default="LUMPY BedGraph", dest="name", help="Name") (options, args) = parser.parse_args() if not options.bedpe_file: parser.error('BEDPE file not given') f = open(options.bedpe_file,'r') print 'track type=bedGraph name="' + options.name + '"' for l in f: A = l.rstrip().split('\t') L=[float(x) for x in A[11].split()] R=[float(x) for x in A[12].split()] l_chr = A[0] l_start = int(A[1]) r_chr = A[3] r_start = int(A[4]) c = 0 for p in L: print '\t'.join( [l_chr, str(l_start + c), str(l_start + c + 1), str(p)]) c+=1 c = 0 for p in R: print '\t'.join( [r_chr, str(r_start + c), str(r_start + c + 1), str(p)]) c+=1 f.close()
#!/usr/bin/env python import sys import numpy as np from optparse import OptionParser parser = OptionParser() parser.add_option("-b", "--bedpe_file", dest="bedpe_file", help="BEDPE file") parser.add_option("-n", "--name", default="LUMPY BedGraph", dest="name", help="Name") (options, args) = parser.parse_args() if not options.bedpe_file: parser.error('BEDPE file not given') f = open(options.bedpe_file,'r') print 'track type=bedGraph name="' + options.name + '"' for l in f: A = l.rstrip().split('\t') L=[float(x) for x in A[15][2:].split(',')] R=[float(x) for x in A[16][2:].split(',')] l_chr = A[0] l_start = int(A[1]) r_chr = A[3] r_start = int(A[4]) c = 0 for p in L: print '\t'.join( [l_chr, str(l_start + c), str(l_start + c + 1), str(p)]) c+=1 c = 0 for p in R: print '\t'.join( [r_chr, str(r_start + c), str(r_start + c + 1), str(p)]) c+=1 f.close()
Move fields for new output
Move fields for new output
Python
mit
arq5x/lumpy-sv,glebkuznetsov/lumpy-sv,hall-lab/lumpy-sv,cc2qe/lumpy-sv,hall-lab/lumpy-sv,glebkuznetsov/lumpy-sv,arq5x/lumpy-sv,glebkuznetsov/lumpy-sv,arq5x/lumpy-sv,cc2qe/lumpy-sv,hall-lab/lumpy-sv,cc2qe/lumpy-sv,glebkuznetsov/lumpy-sv,glebkuznetsov/lumpy-sv,hall-lab/lumpy-sv,arq5x/lumpy-sv,cc2qe/lumpy-sv,arq5x/lumpy-sv,hall-lab/lumpy-sv,cc2qe/lumpy-sv
--- +++ @@ -29,8 +29,8 @@ for l in f: A = l.rstrip().split('\t') - L=[float(x) for x in A[11].split()] - R=[float(x) for x in A[12].split()] + L=[float(x) for x in A[15][2:].split(',')] + R=[float(x) for x in A[16][2:].split(',')] l_chr = A[0] l_start = int(A[1])
8de10efd931e397af9c6b4c405a58af27940608a
lwr/lwr_client/util.py
lwr/lwr_client/util.py
from threading import Lock, Event class TransferEventManager(object): def __init__(self): self.events = dict() self.events_lock = Lock() def acquire_event(self, path, force_clear=False): with self.events_lock: if path in self.events: event_holder = self.events[path] else: event_holder = EventHolder(Event(), path, self) self.events[path] = event_holder if force_clear: event_holder.event.clear() return event_holder def free_event(self, path): with self.events_lock: del self.events[path] class EventHolder(object): def __init__(self, event, path, condition_manager): self.event = event self.path = path self.condition_manager = condition_manager def release(self): self.event.set() def __del__(self): self.condition_manager.free_event(self.path)
from threading import Lock, Event from weakref import WeakValueDictionary class TransferEventManager(object): def __init__(self): self.events = WeakValueDictionary(dict()) self.events_lock = Lock() def acquire_event(self, path, force_clear=False): with self.events_lock: if path in self.events: event_holder = self.events[path] else: event_holder = EventHolder(Event(), path, self) self.events[path] = event_holder if force_clear: event_holder.event.clear() return event_holder class EventHolder(object): def __init__(self, event, path, condition_manager): self.event = event self.path = path self.condition_manager = condition_manager def release(self): self.event.set()
Fix logic related to GC of Event references using weakref.WeakValueDictionary.
Fix logic related to GC of Event references using weakref.WeakValueDictionary.
Python
apache-2.0
galaxyproject/pulsar,galaxyproject/pulsar,jmchilton/pulsar,jmchilton/lwr,ssorgatem/pulsar,natefoo/pulsar,ssorgatem/pulsar,natefoo/pulsar,jmchilton/lwr,jmchilton/pulsar
--- +++ @@ -1,10 +1,11 @@ from threading import Lock, Event +from weakref import WeakValueDictionary class TransferEventManager(object): def __init__(self): - self.events = dict() + self.events = WeakValueDictionary(dict()) self.events_lock = Lock() def acquire_event(self, path, force_clear=False): @@ -18,10 +19,6 @@ event_holder.event.clear() return event_holder - def free_event(self, path): - with self.events_lock: - del self.events[path] - class EventHolder(object): @@ -32,6 +29,3 @@ def release(self): self.event.set() - - def __del__(self): - self.condition_manager.free_event(self.path)
90a8a55b4607e88e7e73f849250b4230185a18fd
integration-test/843-normalize-underscore.py
integration-test/843-normalize-underscore.py
# http://www.openstreetmap.org/way/219071307 assert_has_feature( 16, 10478, 25338, 'roads', { 'id': 219071307, 'kind': 'minor_road', 'service': 'drive_through' }) # http://www.openstreetmap.org/way/258020271 assert_has_feature( 16, 11077, 25458, 'roads', { 'id': 258020271, 'kind': 'aerialway', 'kind_detail': 't_bar' }) # http://www.openstreetmap.org/way/256717307 assert_has_feature( 16, 18763, 24784, 'roads', { 'id': 256717307, 'kind': 'aerialway', 'kind_detail': 'j_bar' }) # http://www.openstreetmap.org/way/258132198 assert_has_feature( 16, 10910, 25120, 'roads', { 'id': 258132198, 'kind': 'aerialway', 'kind_detail': type(None) })
# http://www.openstreetmap.org/way/219071307 assert_has_feature( 16, 10478, 25338, 'roads', { 'id': 219071307, 'kind': 'minor_road', 'service': 'drive_through' }) # http://www.openstreetmap.org/way/258020271 assert_has_feature( 16, 11077, 25458, 'roads', { 'id': 258020271, 'kind': 'aerialway', 'kind_detail': 't_bar' }) # http://www.openstreetmap.org/way/256717307 assert_has_feature( 16, 18763, 24784, 'roads', { 'id': 256717307, 'kind': 'aerialway', 'kind_detail': 'j_bar' }) # http://www.openstreetmap.org/way/232074914 assert_has_feature( 16, 13304, 24998, 'roads', { 'id': 232074914, 'kind': 'aerialway', 'kind_detail': type(None) })
Change test case item, as the previous one had been updated and fixed.
Change test case item, as the previous one had been updated and fixed.
Python
mit
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
--- +++ @@ -13,7 +13,7 @@ 16, 18763, 24784, 'roads', { 'id': 256717307, 'kind': 'aerialway', 'kind_detail': 'j_bar' }) -# http://www.openstreetmap.org/way/258132198 +# http://www.openstreetmap.org/way/232074914 assert_has_feature( - 16, 10910, 25120, 'roads', - { 'id': 258132198, 'kind': 'aerialway', 'kind_detail': type(None) }) + 16, 13304, 24998, 'roads', + { 'id': 232074914, 'kind': 'aerialway', 'kind_detail': type(None) })
e11d201c415eb86137194d93ac1fca7c20123ae9
runtests.py
runtests.py
#!/usr/bin/env python import sys from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, INSTALLED_APPS=( 'constants', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ) from django.test.utils import get_runner def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['constants', ]) sys.exit(failures) if __name__ == '__main__': runtests()
#!/usr/bin/env python import sys from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, INSTALLED_APPS=( 'constants', ), SITE_ID=1, SECRET_KEY='this-is-just-for-tests-so-not-that-secret', ) # For Django 1.7. try: import django django.setup() except AttributeError: pass from django.test.utils import get_runner def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) failures = test_runner.run_tests(['constants', ]) sys.exit(failures) if __name__ == '__main__': runtests()
Add explicit setup for Django 1.7
Add explicit setup for Django 1.7
Python
bsd-3-clause
caktus/django-dry-choices
--- +++ @@ -20,6 +20,14 @@ ) +# For Django 1.7. +try: + import django + django.setup() +except AttributeError: + pass + + from django.test.utils import get_runner
d1ee3578cd948c28e72bda3fbd06193be5033196
ractivate/utils/client.py
ractivate/utils/client.py
# # Copyright (c) 2010 rPath, Inc. # # This program is distributed under the terms of the Common Public License, # version 1.0. A copy of this license should have been distributed with this # source file in a file called LICENSE. If it is not present, the license # is always available at http://www.rpath.com/permanent/licenses/CPL-1.0. # # This program is distributed in the hope that it will be useful, but # without any warranty; without even the implied warranty of merchantability # or fitness for a particular purpose. See the Common Public License for # full details. # import urllib import urllib2 import urlparse class Client(object): def __init__(self, url): self.url = url def request(self, data=None): print data return self.response = urllib.urlopen(self.url, data) self.responseBody = self.response.read() return self.responseBody class ActivationClient(Client): def activate(self, data): print data # return None return data return self.request(data)
# # Copyright (c) 2010 rPath, Inc. # # This program is distributed under the terms of the Common Public License, # version 1.0. A copy of this license should have been distributed with this # source file in a file called LICENSE. If it is not present, the license # is always available at http://www.rpath.com/permanent/licenses/CPL-1.0. # # This program is distributed in the hope that it will be useful, but # without any warranty; without even the implied warranty of merchantability # or fitness for a particular purpose. See the Common Public License for # full details. # import logging import urllib import urllib2 import urlparse logger = logging.getLogger('activation') class Client(object): def __init__(self, url): self.url = url def request(self, data=None): logger.debug("POSTing XML data:\n%s" % data) self.response = urllib.urlopen(self.url, data) self.responseBody = self.response.read() if self.response.code == self.SUCCESS_CODE: return True else: return False class ActivationClient(Client): SUCCESS_CODE = 201 def activate(self, data): activated = self.request(data) if not activated: logger.error("Failed activation with %s." % self.url) logger.error("Response code: %s" % self.response.code) logger.error("Response: %s" % self.responseBody) return activated
Make ActivationClient look for correct return code
Make ActivationClient look for correct return code
Python
apache-2.0
sassoftware/rpath-tools,sassoftware/rpath-tools
--- +++ @@ -12,25 +12,36 @@ # full details. # - +import logging import urllib import urllib2 import urlparse + +logger = logging.getLogger('activation') class Client(object): def __init__(self, url): self.url = url def request(self, data=None): - print data - return + logger.debug("POSTing XML data:\n%s" % data) self.response = urllib.urlopen(self.url, data) self.responseBody = self.response.read() - return self.responseBody + if self.response.code == self.SUCCESS_CODE: + return True + else: + return False class ActivationClient(Client): + + SUCCESS_CODE = 201 + def activate(self, data): - print data - # return None - return data - return self.request(data) + activated = self.request(data) + + if not activated: + logger.error("Failed activation with %s." % self.url) + logger.error("Response code: %s" % self.response.code) + logger.error("Response: %s" % self.responseBody) + + return activated
669b9c07ebca1d80778da950fc145c8cf0c6df65
skdata/__init__.py
skdata/__init__.py
try: # during the installation process this can raise an exception. from .data import SkData except: pass __author__ = 'Ivan Ogasawara' __email__ = 'ivan.ogasawara@gmail.com' __version__ = '0.1.2'
try: # during the installation process this can raise an exception. from .data import SkData except: pass __author__ = 'Ivan Ogasawara' __email__ = 'ivan.ogasawara@gmail.com' __version__ = '0.1.3'
Change pkg version to 0.1.3
Change pkg version to 0.1.3
Python
mit
xmnlab/skdata,OpenDataScienceLab/skdata
--- +++ @@ -6,5 +6,5 @@ __author__ = 'Ivan Ogasawara' __email__ = 'ivan.ogasawara@gmail.com' -__version__ = '0.1.2' +__version__ = '0.1.3'
3622a0354bc57d32c8b2e714c6b5f8533fa8f6d7
parsl/tests/configs/local_threads_globus.py
parsl/tests/configs/local_threads_globus.py
from parsl.config import Config from parsl.data_provider.scheme import GlobusScheme from parsl.executors.threads import ThreadPoolExecutor from parsl.tests.utils import get_rundir # If you are a developer running tests, make sure to update parsl/tests/configs/user_opts.py # If you are a user copying-and-pasting this as an example, make sure to either # 1) create a local `user_opts.py`, or # 2) delete the user_opts import below and replace all appearances of `user_opts` with the literal value # (i.e., user_opts['swan']['username'] -> 'your_username') from .user_opts import user_opts config = Config( executors=[ ThreadPoolExecutor( label='local_threads_globus', storage_access=[GlobusScheme( endpoint_uuid=user_opts['globus']['endpoint'], endpoint_path=user_opts['globus']['path'] )[, working_dir=user_opts['globus']['path'] ) ], run_dir=get_rundir() )
from parsl.config import Config from parsl.data_provider.scheme import GlobusScheme from parsl.executors.threads import ThreadPoolExecutor from parsl.tests.utils import get_rundir # If you are a developer running tests, make sure to update parsl/tests/configs/user_opts.py # If you are a user copying-and-pasting this as an example, make sure to either # 1) create a local `user_opts.py`, or # 2) delete the user_opts import below and replace all appearances of `user_opts` with the literal value # (i.e., user_opts['swan']['username'] -> 'your_username') from .user_opts import user_opts config = Config( executors=[ ThreadPoolExecutor( label='local_threads_globus', storage_access=[GlobusScheme( endpoint_uuid=user_opts['globus']['endpoint'], endpoint_path=user_opts['globus']['path'] )], working_dir=user_opts['globus']['path'] ) ], run_dir=get_rundir() )
Fix storage_access in the test config
Fix storage_access in the test config
Python
apache-2.0
swift-lang/swift-e-lab,Parsl/parsl,Parsl/parsl,Parsl/parsl,swift-lang/swift-e-lab,Parsl/parsl
--- +++ @@ -17,7 +17,7 @@ storage_access=[GlobusScheme( endpoint_uuid=user_opts['globus']['endpoint'], endpoint_path=user_opts['globus']['path'] - )[, + )], working_dir=user_opts['globus']['path'] ) ],
1dc4546aa95d3adfff910430afe922bf137bf595
test/testlib/schema.py
test/testlib/schema.py
import testbase schema = None __all__ = 'Table', 'Column', table_options = {} def Table(*args, **kw): """A schema.Table wrapper/hook for dialect-specific tweaks.""" global schema if schema is None: from sqlalchemy import schema test_opts = dict([(k,kw.pop(k)) for k in kw.keys() if k.startswith('test_')]) kw.update(table_options) if testbase.db.name == 'mysql': if 'mysql_engine' not in kw and 'mysql_type' not in kw: if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts: kw['mysql_engine'] = 'InnoDB' return schema.Table(*args, **kw) def Column(*args, **kw): """A schema.Column wrapper/hook for dialect-specific tweaks.""" global schema if schema is None: from sqlalchemy import schema # TODO: a Column that creates a Sequence automatically for PK columns, # which would help Oracle tests return schema.Column(*args, **kw)
import testbase from testlib import testing schema = None __all__ = 'Table', 'Column', table_options = {} def Table(*args, **kw): """A schema.Table wrapper/hook for dialect-specific tweaks.""" global schema if schema is None: from sqlalchemy import schema test_opts = dict([(k,kw.pop(k)) for k in kw.keys() if k.startswith('test_')]) kw.update(table_options) if testbase.db.name == 'mysql': if 'mysql_engine' not in kw and 'mysql_type' not in kw: if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts: kw['mysql_engine'] = 'InnoDB' # Apply some default cascading rules for self-referential foreign keys. # MySQL InnoDB has some issues around seleting self-refs too. if testing.against('firebird'): table_name = args[0] unpack = (testing.config.db.dialect. identifier_preparer.unformat_identifiers) # Only going after ForeignKeys in Columns. May need to # expand to ForeignKeyConstraint too. fks = [fk for col in args if isinstance(col, schema.Column) for fk in col.args if isinstance(fk, schema.ForeignKey)] for fk in fks: # root around in raw spec ref = fk._colspec if isinstance(ref, schema.Column): name = ref.table.name else: name = unpack(ref)[-2] print name, table_name if name == table_name: if fk.ondelete is None: fk.ondelete = 'CASCADE' if fk.onupdate is None: fk.onupdate = 'CASCADE' return schema.Table(*args, **kw) def Column(*args, **kw): """A schema.Column wrapper/hook for dialect-specific tweaks.""" global schema if schema is None: from sqlalchemy import schema # TODO: a Column that creates a Sequence automatically for PK columns, # which would help Oracle tests return schema.Column(*args, **kw)
Apply default cascade rules for firebird self-ref ForeignKeys.
Apply default cascade rules for firebird self-ref ForeignKeys.
Python
mit
graingert/sqlalchemy,alex/sqlalchemy,WinterNis/sqlalchemy,elelianghh/sqlalchemy,bdupharm/sqlalchemy,sandan/sqlalchemy,monetate/sqlalchemy,olemis/sqlalchemy,wujuguang/sqlalchemy,ThiefMaster/sqlalchemy,brianv0/sqlalchemy,j5int/sqlalchemy,wfxiang08/sqlalchemy,wujuguang/sqlalchemy,sqlalchemy/sqlalchemy,ThiefMaster/sqlalchemy,Akrog/sqlalchemy,hsum/sqlalchemy,Cito/sqlalchemy,monetate/sqlalchemy,hsum/sqlalchemy,epa/sqlalchemy,halfcrazy/sqlalchemy,wfxiang08/sqlalchemy,brianv0/sqlalchemy,276361270/sqlalchemy,EvaSDK/sqlalchemy,pdufour/sqlalchemy,dstufft/sqlalchemy,alex/sqlalchemy,sandan/sqlalchemy,davidjb/sqlalchemy,epa/sqlalchemy,olemis/sqlalchemy,WinterNis/sqlalchemy,davidjb/sqlalchemy,276361270/sqlalchemy,itkovian/sqlalchemy,inspirehep/sqlalchemy,davidfraser/sqlalchemy,bdupharm/sqlalchemy,robin900/sqlalchemy,bootandy/sqlalchemy,Cito/sqlalchemy,davidfraser/sqlalchemy,Akrog/sqlalchemy,halfcrazy/sqlalchemy,inspirehep/sqlalchemy,j5int/sqlalchemy,robin900/sqlalchemy,elelianghh/sqlalchemy,EvaSDK/sqlalchemy,bootandy/sqlalchemy,dstufft/sqlalchemy,graingert/sqlalchemy,pdufour/sqlalchemy,Cito/sqlalchemy,zzzeek/sqlalchemy,itkovian/sqlalchemy,alex/sqlalchemy
--- +++ @@ -1,4 +1,5 @@ import testbase +from testlib import testing schema = None __all__ = 'Table', 'Column', @@ -11,7 +12,7 @@ global schema if schema is None: from sqlalchemy import schema - + test_opts = dict([(k,kw.pop(k)) for k in kw.keys() if k.startswith('test_')]) @@ -21,6 +22,33 @@ if 'mysql_engine' not in kw and 'mysql_type' not in kw: if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts: kw['mysql_engine'] = 'InnoDB' + + # Apply some default cascading rules for self-referential foreign keys. + # MySQL InnoDB has some issues around seleting self-refs too. + if testing.against('firebird'): + table_name = args[0] + unpack = (testing.config.db.dialect. + identifier_preparer.unformat_identifiers) + + # Only going after ForeignKeys in Columns. May need to + # expand to ForeignKeyConstraint too. + fks = [fk + for col in args if isinstance(col, schema.Column) + for fk in col.args if isinstance(fk, schema.ForeignKey)] + + for fk in fks: + # root around in raw spec + ref = fk._colspec + if isinstance(ref, schema.Column): + name = ref.table.name + else: + name = unpack(ref)[-2] + print name, table_name + if name == table_name: + if fk.ondelete is None: + fk.ondelete = 'CASCADE' + if fk.onupdate is None: + fk.onupdate = 'CASCADE' return schema.Table(*args, **kw)
c69da19e8316b1e1c6f55d3571917e85ba4fbf8d
src/member/admin.py
src/member/admin.py
from django.contrib import admin from member.models import (Member, Client, Contact, Address, Referencing, Route, Client_avoid_component, Client_avoid_ingredient, Option, Client_option, Restriction) admin.site.register(Member) admin.site.register(Client) admin.site.register(Route) admin.site.register(Contact) admin.site.register(Address) admin.site.register(Referencing) admin.site.register(Client_avoid_component) admin.site.register(Client_avoid_ingredient) admin.site.register(Option) admin.site.register(Client_option) admin.site.register(Restriction)
from django.contrib import admin from member.models import (Member, Client, Contact, Address, Referencing, Route, Client_avoid_component, Client_avoid_ingredient, Option, Client_option, Restriction) from meal.models import Ingredient class IngredientsToAvoidInline(admin.TabularInline): model = Client.ingredients_to_avoid.through class ClientAdmin(admin.ModelAdmin): inlines = [ IngredientsToAvoidInline ] admin.site.register(Member) admin.site.register(Client, ClientAdmin) admin.site.register(Route) admin.site.register(Contact) admin.site.register(Address) admin.site.register(Referencing) admin.site.register(Client_avoid_component) admin.site.register(Option) admin.site.register(Client_option) admin.site.register(Restriction)
Add ingredients to avoid as a TabularInline to Client
Add ingredients to avoid as a TabularInline to Client Do not edit the ingredients to avoid directly, edit them through a tabular form from the client form
Python
agpl-3.0
savoirfairelinux/santropol-feast,madmath/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef,savoirfairelinux/sous-chef,savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast,madmath/sous-chef,madmath/sous-chef
--- +++ @@ -4,14 +4,27 @@ Client_avoid_ingredient, Option, Client_option, Restriction) +from meal.models import Ingredient + + +class IngredientsToAvoidInline(admin.TabularInline): + model = Client.ingredients_to_avoid.through + + +class ClientAdmin(admin.ModelAdmin): + + inlines = [ + IngredientsToAvoidInline + ] + + admin.site.register(Member) -admin.site.register(Client) +admin.site.register(Client, ClientAdmin) admin.site.register(Route) admin.site.register(Contact) admin.site.register(Address) admin.site.register(Referencing) admin.site.register(Client_avoid_component) -admin.site.register(Client_avoid_ingredient) admin.site.register(Option) admin.site.register(Client_option) admin.site.register(Restriction)
83c12d598221aac8e7173fb7d78083bc1c5ab64b
tests/test_commands.py
tests/test_commands.py
import unittest from cobe.commands import LearnIrcLogCommand class testIrcLogParsing(unittest.TestCase): def setUp(self): self.command = LearnIrcLogCommand() def testNonPubmsg(self): msg = "this is some non-pubmsg text found in a log" cmd = self.command self.assertEqual(None, cmd._parse_irc_message(msg)) def testNormalPubmsg(self): msg = "12:00 <foo> bar baz" cmd = self.command self.assertEqual("bar baz", cmd._parse_irc_message(msg)) def testKibotQuotePubmsg(self): msg = "12:00 <foo> \"bar baz\" --user, 01-oct-09" cmd = self.command self.assertEqual("bar baz", cmd._parse_irc_message(msg)) if __name__ == '__main__': unittest.main()
import unittest from cobe.commands import LearnIrcLogCommand class testIrcLogParsing(unittest.TestCase): def setUp(self): self.command = LearnIrcLogCommand() def testNonPubmsg(self): msg = "this is some non-pubmsg text found in a log" cmd = self.command self.assertEqual(None, cmd._parse_irc_message(msg)) def testNormalPubmsg(self): msg = "12:00 <foo> bar baz" cmd = self.command self.assertEqual("bar baz", cmd._parse_irc_message(msg)) def testKibotQuotePubmsg(self): msg = "12:00 <foo> \"bar baz\" --user, 01-oct-09" cmd = self.command self.assertEqual("bar baz", cmd._parse_irc_message(msg)) def testIgnoredNickPubmsg(self): msg = "12:00 <foo> bar baz" cmd = self.command self.assertEqual(None, cmd._parse_irc_message(msg, ["foo"])) if __name__ == '__main__': unittest.main()
Add a unit test for ignored nicks in _parse_irc_message
Add a unit test for ignored nicks in _parse_irc_message
Python
mit
pteichman/cobe,wodim/cobe-ng,wodim/cobe-ng,DarkMio/cobe,tiagochiavericosta/cobe,meska/cobe,LeMagnesium/cobe,LeMagnesium/cobe,DarkMio/cobe,meska/cobe,tiagochiavericosta/cobe,pteichman/cobe
--- +++ @@ -24,5 +24,11 @@ self.assertEqual("bar baz", cmd._parse_irc_message(msg)) + def testIgnoredNickPubmsg(self): + msg = "12:00 <foo> bar baz" + cmd = self.command + + self.assertEqual(None, cmd._parse_irc_message(msg, ["foo"])) + if __name__ == '__main__': unittest.main()
3760005cec3174b14d3f0ee20327e1b8de9ce800
ffflash/lib/container.py
ffflash/lib/container.py
from os import path from ffflash import RELEASE, log, now, timeout from ffflash.lib.clock import epoch_repr from ffflash.lib.data import merge_dicts from ffflash.lib.files import read_json_file, write_json_file class Container: def __init__(self, spec, filename): self._spec = spec self._location = path.abspath(filename) self.data = read_json_file(self._location, fallback={}) self._info() def _info(self, info={}): self.data['_info'] = self.data.get('_info', {}) self.data['_info']['generator'] = RELEASE self.data['_info']['access'] = self.data['_info'].get('access', {}) if not self.data['_info']['access'].get('first', False): self.data['_info']['access']['first'] = now self.data['_info']['access']['last'] = now self.data['_info']['access']['overall'] = epoch_repr( abs(now - self.data['_info']['access']['first']), ms=True ) self.data['_info']['access']['scrub'] = timeout if info: self.data['_info'] = merge_dicts(self.data['_info'], info) def save(self, info={}): self._info(info) if write_json_file(self._location, self.data): log.info('{} saved {}'.format(self._spec, self._location))
from os import path from ffflash import RELEASE, log, now, timeout from ffflash.lib.clock import epoch_repr from ffflash.lib.data import Element from ffflash.lib.files import read_json_file, write_json_file class Container: def __init__(self, spec, filename): self._spec = spec self._location = path.abspath(filename) content = read_json_file(self._location, fallback={}) self.info = Element(content.get('_info', {})) self.data = Element(content.get(self._spec, {})) def _info(self): self.info.generator = RELEASE if not self.info.access.first: self.info.access.first = now self.info.access.last = now self.info.access.overall = epoch_repr( abs(now - self.info.access.first), ms=True ) self.info.access.scrub = timeout def save(self): self._info() content = { '_info': self.info, self._spec: self.data } if write_json_file(self._location, content): log.info('{} saved {}'.format(self._spec, self._location))
Rewrite Container to dump/load two Elements (data,_info) into json file
Rewrite Container to dump/load two Elements (data,_info) into json file
Python
bsd-3-clause
spookey/ffflash,spookey/ffflash
--- +++ @@ -2,7 +2,7 @@ from ffflash import RELEASE, log, now, timeout from ffflash.lib.clock import epoch_repr -from ffflash.lib.data import merge_dicts +from ffflash.lib.data import Element from ffflash.lib.files import read_json_file, write_json_file @@ -10,28 +10,28 @@ def __init__(self, spec, filename): self._spec = spec self._location = path.abspath(filename) - self.data = read_json_file(self._location, fallback={}) + content = read_json_file(self._location, fallback={}) + + self.info = Element(content.get('_info', {})) + self.data = Element(content.get(self._spec, {})) + + def _info(self): + self.info.generator = RELEASE + + if not self.info.access.first: + self.info.access.first = now + self.info.access.last = now + self.info.access.overall = epoch_repr( + abs(now - self.info.access.first), ms=True + ) + self.info.access.scrub = timeout + + def save(self): self._info() - - def _info(self, info={}): - self.data['_info'] = self.data.get('_info', {}) - self.data['_info']['generator'] = RELEASE - - self.data['_info']['access'] = self.data['_info'].get('access', {}) - if not self.data['_info']['access'].get('first', False): - self.data['_info']['access']['first'] = now - self.data['_info']['access']['last'] = now - self.data['_info']['access']['overall'] = epoch_repr( - abs(now - self.data['_info']['access']['first']), - ms=True - ) - self.data['_info']['access']['scrub'] = timeout - - if info: - self.data['_info'] = merge_dicts(self.data['_info'], info) - - def save(self, info={}): - self._info(info) - if write_json_file(self._location, self.data): + content = { + '_info': self.info, + self._spec: self.data + } + if write_json_file(self._location, content): log.info('{} saved {}'.format(self._spec, self._location))
a594c5154ca35590f2b793b89fc12e77d6b01537
accounts/management/commands/clean_expired.py
accounts/management/commands/clean_expired.py
# coding=utf-8 from django.core.management.base import BaseCommand from registration.models import RegistrationProfile class Command(BaseCommand): help = 'Cleanup expired registrations' OPT_SIMULATE = 'dry-run' def add_arguments(self, parser): parser.add_argument(''.join(['--', self.OPT_SIMULATE]), action='store_true', dest=self.OPT_SIMULATE, default=False, help='Only print registrations that would be deleted') def handle(self, *args, **options): self.stdout.write('Deleting expired user registrations') dry_run = True if self.OPT_SIMULATE in options and options[ self.OPT_SIMULATE] else False if dry_run: user_count, reg_profile_count = 0, 0 for profile in RegistrationProfile.objects.select_related( 'user').exclude(user__is_active=True): if profile.activation_key_expired(): user_count += 1 reg_profile_count += 1 print "Would delete {} User and {} RegistrationProfile objects".format( user_count, reg_profile_count) else: RegistrationProfile.objects.delete_expired_users()
# coding=utf-8 from django.core.management.base import BaseCommand from registration.models import RegistrationProfile class Command(BaseCommand): help = 'Cleanup expired registrations' OPT_SIMULATE = 'dry-run' def add_arguments(self, parser): parser.add_argument(''.join(['--', self.OPT_SIMULATE]), action='store_true', dest=self.OPT_SIMULATE, default=False, help='Only print registrations that would be deleted') def handle(self, *args, **options): dry_run = True if self.OPT_SIMULATE in options and options[ self.OPT_SIMULATE] else False if dry_run: user_count, reg_profile_count = 0, 0 for profile in RegistrationProfile.objects.select_related( 'user').exclude(user__is_active=True): if profile.activation_key_expired(): user_count += 1 reg_profile_count += 1 print "Would delete {} User and {} RegistrationProfile objects".format( user_count, reg_profile_count) else: RegistrationProfile.objects.delete_expired_users()
Make cleanup command less verbose
Make cleanup command less verbose
Python
agpl-3.0
volunteer-planner/volunteer_planner,volunteer-planner/volunteer_planner,coders4help/volunteer_planner,pitpalme/volunteer_planner,klinger/volunteer_planner,volunteer-planner/volunteer_planner,christophmeissner/volunteer_planner,volunteer-planner/volunteer_planner,klinger/volunteer_planner,klinger/volunteer_planner,pitpalme/volunteer_planner,alper/volunteer_planner,christophmeissner/volunteer_planner,klinger/volunteer_planner,christophmeissner/volunteer_planner,alper/volunteer_planner,christophmeissner/volunteer_planner,alper/volunteer_planner,pitpalme/volunteer_planner,coders4help/volunteer_planner,coders4help/volunteer_planner,coders4help/volunteer_planner,pitpalme/volunteer_planner
--- +++ @@ -17,7 +17,6 @@ help='Only print registrations that would be deleted') def handle(self, *args, **options): - self.stdout.write('Deleting expired user registrations') dry_run = True if self.OPT_SIMULATE in options and options[ self.OPT_SIMULATE] else False if dry_run:
1636757f52a553c99fb40059f4461e97485d2199
fits/make_fit_feedmes.py
fits/make_fit_feedmes.py
#!/usr/bin/env python from glob import glob import os import re def make_feedmes(): # Used to convert all the fit*.galfit files to fit*.diff ids = glob('*/') for id in ids: os.chdir(id) feedmes = glob('fit*diff') # output starting models for f in feedmes: template = r'fit(.*)(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})' matchobj = re.match(template, f) if matchobj.group(1) != 'A' or matchobj.group(5) != '': cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/' 'fitA\g<2>\g<4>.galfit \g<0>.diff') os.system(cmd) os.chdir('..') if __name__ =='__main__': make_feedmes()
#!/usr/bin/env python from glob import glob import os import re def make_feedmes(): # Used to convert all the fit*.galfit files to fit*.diff ids = glob('*/') for id in ids: feedmes = glob(id+'fit*diff') # output starting models for f in feedmes: template = r'.*fit(.*)(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})' matchobj = re.match(template, f) if matchobj.group(1) != 'A' or matchobj.group(5) != '': cmd = matchobj.expand('patch -o \g<0>.galfit A\g<2>/' 'fitA\g<2>\g<4>.galfit \g<0>.diff') os.system(cmd) if __name__ =='__main__': make_feedmes()
Fix to work with new patch
Fix to work with new patch
Python
mit
MegaMorph/galfitm-illustrations,MegaMorph/galfitm-illustrations
--- +++ @@ -8,17 +8,15 @@ # Used to convert all the fit*.galfit files to fit*.diff ids = glob('*/') for id in ids: - os.chdir(id) - feedmes = glob('fit*diff') + feedmes = glob(id+'fit*diff') # output starting models for f in feedmes: - template = r'fit(.*)(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})' + template = r'.*fit(.*)(\d)(n|m){0,1}([ugrizYJHK]{0,1})([abcde]{0,1})' matchobj = re.match(template, f) if matchobj.group(1) != 'A' or matchobj.group(5) != '': - cmd = matchobj.expand('patch -o \g<0>.galfit ../A\g<2>/' + cmd = matchobj.expand('patch -o \g<0>.galfit A\g<2>/' 'fitA\g<2>\g<4>.galfit \g<0>.diff') os.system(cmd) - os.chdir('..') if __name__ =='__main__': make_feedmes()
07f409bb6b8d008cf473aeb33fd0833dccfba402
mm1_main.py
mm1_main.py
#!/usr/bin/env python # encoding: utf-8 import mm1 import sim ### Params # Mean interarrival rate of customers per second; # hence, 0.05 <=> 3 people/minute interarrival_rate = 0.05 # Mean service rate by the teller per second; # hence, 0.1 <=> 6 people/minute service_rate = 0.1 ### Initialize # Create new simulation engine se = sim.SimulationEngine() # Seed default PRNG se.prng.seed = 100 # Create MM1 specific event handler event_handler = mm1.MM1EventHandler() event_handler.interarrival_rate = interarrival_rate event_handler.service_rate = service_rate ### Simulate # Schedule finishing event; simulate for 24h se.stop(60*60*24) # Start simulating se.start()
#!/usr/bin/env python # encoding: utf-8 import argparse import mm1 import sim import time ### Parse command line arguments parser = argparse.ArgumentParser(description="M/M/1 queue simulation -- Main script") parser.add_argument('sim_duration', metavar='simulation_duration', type=int, help='simulation duration in seconds') parser.add_argument('--seed', dest='seed', default=int(round(time.time())), type=int, help='seed for the PRNG (default: current system timestamp)') args = parser.parse_args() sim_duration = args.sim_duration seed = args.seed ### Params # Mean interarrival rate of customers per second; # hence, 0.05 <=> 3 people/minute interarrival_rate = 0.05 # Mean service rate by the teller per second; # hence, 0.1 <=> 6 people/minute service_rate = 0.1 ### Initialize # Create new simulation engine se = sim.SimulationEngine() # Seed default PRNG se.prng.seed = seed # Create MM1 specific event handler event_handler = mm1.MM1EventHandler() event_handler.interarrival_rate = interarrival_rate event_handler.service_rate = service_rate ### Simulate # Schedule finishing event se.stop(sim_duration) # Start simulating se.start()
Add command line arguments to main script.
Add command line arguments to main script.
Python
mit
kubkon/des-in-python
--- +++ @@ -1,9 +1,21 @@ #!/usr/bin/env python # encoding: utf-8 +import argparse import mm1 import sim +import time + +### Parse command line arguments +parser = argparse.ArgumentParser(description="M/M/1 queue simulation -- Main script") +parser.add_argument('sim_duration', metavar='simulation_duration', + type=int, help='simulation duration in seconds') +parser.add_argument('--seed', dest='seed', default=int(round(time.time())), + type=int, help='seed for the PRNG (default: current system timestamp)') +args = parser.parse_args() +sim_duration = args.sim_duration +seed = args.seed ### Params # Mean interarrival rate of customers per second; @@ -17,14 +29,14 @@ # Create new simulation engine se = sim.SimulationEngine() # Seed default PRNG -se.prng.seed = 100 +se.prng.seed = seed # Create MM1 specific event handler event_handler = mm1.MM1EventHandler() event_handler.interarrival_rate = interarrival_rate event_handler.service_rate = service_rate ### Simulate -# Schedule finishing event; simulate for 24h -se.stop(60*60*24) +# Schedule finishing event +se.stop(sim_duration) # Start simulating se.start()
471828d39ff256961bf48323feb43438901a4762
orges/plugins/base.py
orges/plugins/base.py
"""This module provides an abstract base class for invocation plugins""" from abc import abstractmethod, ABCMeta class BasePlugin(object): """ Abstract base class for invocation plugins. Plugin developers can either derive their objects directly from this class or from :class:`orges.plugins.dummy.DummyInvocationPlugin` to only override methods selectively. """ __metaclass__ = ABCMeta @abstractmethod def before_invoke(self, invocation): """ Called right before the invoker calls the objective function :param invocation: Information about the current (and past) invocations :type invocation: :class:`orges.invoker.pluggable.Invocation` """ pass @abstractmethod def on_invoke(self, invocation): """ Called after the invoker called the objective function Since objective functions are usually called asyncronously `invocation` will not contain any results yet. :param invocation: Information about the current (and past) invocations :type invocation: :class:`orges.invoker.pluggable.Invocation` """ pass @abstractmethod def on_result(self, invocation): """ Called when the invocation of the objective function was successful :param invocation: Information about the current (and past) invocations :type invocation: :class:`orges.invoker.pluggable.Invocation` """ pass @abstractmethod def on_error(self, invocation): """ Called when the invocation of the objective function was not successful Since the invocation was not successful `invocation` will not contain any result. :param invocation: Information about the current (and past) invocations :type invocation: :class:`orges.invoker.pluggable.Invocation` """ pass
"""This module provides an abstract base class for invocation plugins""" from abc import abstractmethod, ABCMeta class BasePlugin(object): """ Abstract base class for invocation plugins. Plugin developers can either derive their objects directly from this class or from :class:`orges.plugins.dummy.DummyPlugin` to only override methods selectively. """ __metaclass__ = ABCMeta @abstractmethod def before_invoke(self, invocation): """ Called right before the invoker calls the objective function :param invocation: Information about the current (and past) invocations :type invocation: :class:`orges.invoker.pluggable.Invocation` """ pass @abstractmethod def on_invoke(self, invocation): """ Called after the invoker called the objective function Since objective functions are usually called asyncronously `invocation` will not contain any results yet. :param invocation: Information about the current (and past) invocations :type invocation: :class:`orges.invoker.pluggable.Invocation` """ pass @abstractmethod def on_result(self, invocation): """ Called when the invocation of the objective function was successful :param invocation: Information about the current (and past) invocations :type invocation: :class:`orges.invoker.pluggable.Invocation` """ pass @abstractmethod def on_error(self, invocation): """ Called when the invocation of the objective function was not successful Since the invocation was not successful `invocation` will not contain any result. :param invocation: Information about the current (and past) invocations :type invocation: :class:`orges.invoker.pluggable.Invocation` """ pass
Fix broken reference in documentation
Fix broken reference in documentation
Python
bsd-3-clause
cigroup-ol/metaopt,cigroup-ol/metaopt,cigroup-ol/metaopt
--- +++ @@ -9,7 +9,7 @@ Abstract base class for invocation plugins. Plugin developers can either derive their objects directly from this class - or from :class:`orges.plugins.dummy.DummyInvocationPlugin` to only override + or from :class:`orges.plugins.dummy.DummyPlugin` to only override methods selectively. """
35409711d9976ed44e25e314486f3f703b18c068
packages/cardpay-subgraph-extraction/export.py
packages/cardpay-subgraph-extraction/export.py
from subgraph_extractor.cli import extract_from_config import click from cloudpathlib import AnyPath @click.command() @click.option( "--subgraph-config-folder", help="The folder containing the subgraph config files", default='config', ) @click.option( "--database-string", default="postgresql://graph-node:let-me-in@localhost:5432/graph-node", help="The database string for connections, defaults to a local graph-node", ) @click.option( "--output-location", default="data", help="The base output location, whether local or cloud", ) def export(subgraph_config_folder, database_string, output_location): for file_name in AnyPath(subgraph_config_folder).glob('*.yaml'): extract_from_config( file_name, database_string, output_location ) if __name__ == "__main__": export()
from subgraph_extractor.cli import extract_from_config import click from cloudpathlib import AnyPath import os @click.command() @click.option( "--subgraph-config-folder", help="The folder containing the subgraph config files", default="config", ) @click.option( "--database-string", default=os.environ.get( "SE_DATABASE_STRING", "postgresql://graph-node:let-me-in@localhost:5432/graph-node", ), help="The database string for connections. Defaults to SE_DATABASE_STRING if set, otherwise a local graph-node", ) @click.option( "--output-location", default=os.environ.get("SE_OUTPUT_LOCATION", "data"), help="The base output location, whether local or cloud. Defaults to SE_OUTPUT_LOCATION if set, otherwise a folder called data", ) def export(subgraph_config_folder, database_string, output_location): for file_name in AnyPath(subgraph_config_folder).glob("*.yaml"): extract_from_config(file_name, database_string, output_location) if __name__ == "__main__": export()
Support environment variables for the extraction
Support environment variables for the extraction
Python
mit
cardstack/cardstack,cardstack/cardstack,cardstack/cardstack,cardstack/cardstack,cardstack/cardstack,cardstack/cardstack
--- +++ @@ -1,30 +1,32 @@ from subgraph_extractor.cli import extract_from_config import click from cloudpathlib import AnyPath +import os + @click.command() @click.option( "--subgraph-config-folder", help="The folder containing the subgraph config files", - default='config', + default="config", ) @click.option( "--database-string", - default="postgresql://graph-node:let-me-in@localhost:5432/graph-node", - help="The database string for connections, defaults to a local graph-node", + default=os.environ.get( + "SE_DATABASE_STRING", + "postgresql://graph-node:let-me-in@localhost:5432/graph-node", + ), + help="The database string for connections. Defaults to SE_DATABASE_STRING if set, otherwise a local graph-node", ) @click.option( "--output-location", - default="data", - help="The base output location, whether local or cloud", + default=os.environ.get("SE_OUTPUT_LOCATION", "data"), + help="The base output location, whether local or cloud. Defaults to SE_OUTPUT_LOCATION if set, otherwise a folder called data", ) def export(subgraph_config_folder, database_string, output_location): - for file_name in AnyPath(subgraph_config_folder).glob('*.yaml'): - extract_from_config( - file_name, - database_string, - output_location - ) + for file_name in AnyPath(subgraph_config_folder).glob("*.yaml"): + extract_from_config(file_name, database_string, output_location) + if __name__ == "__main__": export()
b60cfdb2b338a4f87b4ac6ba7dd03c9c1d751b37
scrapi/processing/base.py
scrapi/processing/base.py
class BaseProcessor(object): NAME = None def process_raw(self, raw_doc, **kwargs): pass # pragma: no cover def process_normalized(self, raw_doc, normalized, **kwargs): pass # pragma: no cover
import six import json from abc import abstractproperty, abstractmethod from requests.structures import CaseInsensitiveDict class BaseProcessor(object): NAME = None def process_raw(self, raw_doc, **kwargs): pass # pragma: no cover def process_normalized(self, raw_doc, normalized, **kwargs): pass # pragma: no cover class BaseHarvesterResponse(object): """A parody of requests.response but stored in a database for caching Should reflect all methods of a response object Contains an additional field time_made, self-explanatory """ class DoesNotExist(Exception): pass @abstractproperty def method(self): raise NotImplementedError @abstractproperty def url(self): raise NotImplementedError @abstractproperty def ok(self): raise NotImplementedError @abstractproperty def content(self): raise NotImplementedError @abstractproperty def encoding(self): raise NotImplementedError @abstractproperty def headers_str(self): raise NotImplementedError @abstractproperty def status_code(self): raise NotImplementedError @abstractproperty def time_made(self): raise NotImplementedError @classmethod @abstractmethod def get(self, url=None, method=None): raise NotImplementedError @abstractmethod def save(self): raise NotImplementedError @abstractmethod def update(self, **kwargs): raise NotImplementedError def json(self): try: content = self.content.decode('utf-8') except AttributeError: # python 3eeeee! content = self.content return json.loads(content) @property def headers(self): return CaseInsensitiveDict(json.loads(self.headers_str)) @property def text(self): return six.u(self.content)
Add definition of abstract harvester model
Add definition of abstract harvester model
Python
apache-2.0
erinspace/scrapi,felliott/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,erinspace/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,mehanig/scrapi
--- +++ @@ -1,3 +1,10 @@ +import six +import json +from abc import abstractproperty, abstractmethod + +from requests.structures import CaseInsensitiveDict + + class BaseProcessor(object): NAME = None @@ -6,3 +13,73 @@ def process_normalized(self, raw_doc, normalized, **kwargs): pass # pragma: no cover + + +class BaseHarvesterResponse(object): + """A parody of requests.response but stored in a database for caching + Should reflect all methods of a response object + Contains an additional field time_made, self-explanatory + """ + + class DoesNotExist(Exception): + pass + + @abstractproperty + def method(self): + raise NotImplementedError + + @abstractproperty + def url(self): + raise NotImplementedError + + @abstractproperty + def ok(self): + raise NotImplementedError + + @abstractproperty + def content(self): + raise NotImplementedError + + @abstractproperty + def encoding(self): + raise NotImplementedError + + @abstractproperty + def headers_str(self): + raise NotImplementedError + + @abstractproperty + def status_code(self): + raise NotImplementedError + + @abstractproperty + def time_made(self): + raise NotImplementedError + + @classmethod + @abstractmethod + def get(self, url=None, method=None): + raise NotImplementedError + + @abstractmethod + def save(self): + raise NotImplementedError + + @abstractmethod + def update(self, **kwargs): + raise NotImplementedError + + def json(self): + try: + content = self.content.decode('utf-8') + except AttributeError: # python 3eeeee! + content = self.content + return json.loads(content) + + @property + def headers(self): + return CaseInsensitiveDict(json.loads(self.headers_str)) + + @property + def text(self): + return six.u(self.content)
a8997dad913fbc5a12def6a31931efab16efd285
{{cookiecutter.project_slug}}/search/views.py
{{cookiecutter.project_slug}}/search/views.py
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator from django.shortcuts import render from wagtail.core.models import Page from wagtail.search.models import Query def search(request): search_query = request.GET.get('query', None) page = request.GET.get('page', 1) # Search if search_query: search_results = Page.objects.live().search(search_query) query = Query.get(search_query) # Record hit query.add_hit() else: search_results = Page.objects.none() # Pagination paginator = Paginator(search_results, 10) try: search_results = paginator.page(page) except PageNotAnInteger: search_results = paginator.page(1) except EmptyPage: search_results = paginator.page(paginator.num_pages) return render(request, 'search/search.html', { 'search_query': search_query, 'search_results': search_results, })
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator from django.shortcuts import render from wagtail.core.models import Page from wagtail.search.models import Query def search(request): search_query = request.GET.get('q', None) page = request.GET.get('page', 1) # Search if search_query: search_results = Page.objects.live().search(search_query) query = Query.get(search_query) # Record hit query.add_hit() else: search_results = Page.objects.none() # Pagination paginator = Paginator(search_results, 10) try: search_results = paginator.page(page) except PageNotAnInteger: search_results = paginator.page(1) except EmptyPage: search_results = paginator.page(paginator.num_pages) return render(request, 'search/search.html', { 'search_query': search_query, 'search_results': search_results, })
Make search parameter match template parameter name
Make search parameter match template parameter name
Python
mit
chrisdev/wagtail-cookiecutter-foundation,chrisdev/wagtail-cookiecutter-foundation,ilendl2/wagtail-cookiecutter-foundation,ilendl2/wagtail-cookiecutter-foundation,ilendl2/wagtail-cookiecutter-foundation,chrisdev/wagtail-cookiecutter-foundation
--- +++ @@ -6,7 +6,7 @@ def search(request): - search_query = request.GET.get('query', None) + search_query = request.GET.get('q', None) page = request.GET.get('page', 1) # Search
721e2ab81f78de5956c37b654a0cddf99679a20f
django/applications/catmaid/middleware.py
django/applications/catmaid/middleware.py
import json from django.http import HttpResponse from django.contrib.auth.models import User from django.conf import settings from traceback import format_exc class AnonymousAuthenticationMiddleware(object): """ This middleware class tests whether the current user is the anonymous user. If so, it replaces the request.user object with Guardian's anonymous user and monkey patchs it to behave like Django's anonymou user. """ def process_request(self, request): if request.user.is_anonymous() and settings.ANONYMOUS_USER_ID: request.user = User.objects.get(id=settings.ANONYMOUS_USER_ID) request.user.is_anonymous = lambda: True request.user.is_authenticated = lambda: False return None class AjaxExceptionMiddleware(object): def process_exception(self, request, exception): response = { 'error': str(exception), 'detail': format_exc(), } if settings.DEBUG: import sys, traceback (exc_type, exc_info, tb) = sys.exc_info() response['type'] = exc_type.__name__ response['info'] = str(exc_info) response['traceback'] = ''.join(traceback.format_tb(tb)) return HttpResponse(json.dumps(response))
import json from django.http import HttpResponse from django.contrib.auth.models import User from django.conf import settings from traceback import format_exc class AnonymousAuthenticationMiddleware(object): """ This middleware class tests whether the current user is the anonymous user. If so, it replaces the request.user object with Guardian's anonymous user and monkey patchs it to behave like Django's anonymou user. """ def process_request(self, request): if request.user.is_anonymous() and settings.ANONYMOUS_USER_ID: request.user = User.objects.get(id=settings.ANONYMOUS_USER_ID) request.user.is_anonymous = lambda: False request.user.is_authenticated = lambda: False return None class AjaxExceptionMiddleware(object): def process_exception(self, request, exception): response = { 'error': str(exception), 'detail': format_exc(), } if settings.DEBUG: import sys, traceback (exc_type, exc_info, tb) = sys.exc_info() response['type'] = exc_type.__name__ response['info'] = str(exc_info) response['traceback'] = ''.join(traceback.format_tb(tb)) return HttpResponse(json.dumps(response))
Make anonymous user distinguishable from Django's AnonymousUser
Make anonymous user distinguishable from Django's AnonymousUser Django uses a special object---the AnonymousUser---for requests that are not bound to a logged in user. Since we use guardian we replace this object with an actual user object (with the ID set in ANONYMOUS_USER_ID, usually -1) in a middleware class. So far this user had its is_anonymous() function set to return False. This however imposes a problem if one wants to have permissions for this users (e.g. testing them with user.has_perm()). As it turns out, permissions are not checked if is_anonymous() returns True. The Django documentation says this method is only used to distinguish real user objects from the AnonymousUser object. Our anonymous user is a real user and therefore this commit changes this function of the anonymous user to return False.
Python
agpl-3.0
htem/CATMAID,htem/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID
--- +++ @@ -14,7 +14,7 @@ def process_request(self, request): if request.user.is_anonymous() and settings.ANONYMOUS_USER_ID: request.user = User.objects.get(id=settings.ANONYMOUS_USER_ID) - request.user.is_anonymous = lambda: True + request.user.is_anonymous = lambda: False request.user.is_authenticated = lambda: False return None
61e6c21e32a481be9b2c61d71b0faef3fe731ae6
tests/rules_tests/RulesTest.py
tests/rules_tests/RulesTest.py
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 15.08.2017 15:31 :Licence GNUv3 Part of grammpy """ from unittest import main, TestCase from grammpy import * from .grammar import * class RulesTest(TestCase): def test_oneRules(self): class Tmp1(Rule): rules = [([NFirst], [NSecond, 0]), ([NThird], [0, 1]), ([NSecond], [NSecond, 'a'])] grammar.add_rule(Tmp1) if __name__ == '__main__': main()
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 15.08.2017 15:31 :Licence GNUv3 Part of grammpy """ from unittest import main, TestCase from grammpy import * from .grammar import * class RulesTest(TestCase): def setUp(self): self.g = Grammar(terminals=[0, 1, 2, 'a', 'b', 'c', TFirst, TSecond, TThird, TInstFirst, TInstSecond, TInstThird], nonterminals=[NFirst, NSecond, NThird, NFourth, NFifth]) def test_oneInRules(self): class Tmp1(Rule): rules = [([NFirst], [NSecond, 0]), ([NThird], [0, 1]), ([NSecond], [NSecond, 'a'])] self.g.add_rule(Tmp1) self.assertEqual(self.g.rules_count(), 3) self.assertTrue(self.g.have_rule(Tmp1)) def test_oneInRulesIteration(self): class Tmp1(Rule): rules = [([NFirst], [NSecond, 0]), ([NThird], [0, 1]), ([NSecond], [NSecond, 'a'])] self.g.add_rule(Tmp1) for rule in self.g.rule(): self.assertIn(rule.rule, Tmp1.rules) def test_hashes(self): class Tmp1(Rule): rules = [([NFirst], [NSecond, 0]), ([NThird], [0, 1]), ([NSecond], [NSecond, 'a'])] class Tmp2(Rule): rules = [([NFirst], [NSecond, 0]), ([NThird], [0, 1]), ([NSecond], [NSecond, 'a'])] hash1 = hash(Tmp1) hash2 = hash(Tmp2) self.assertEqual(hash1, hash2) if __name__ == '__main__': main()
Add test that check hash values of rule
Add test that check hash values of rule
Python
mit
PatrikValkovic/grammpy
--- +++ @@ -13,13 +13,43 @@ class RulesTest(TestCase): - def test_oneRules(self): + def setUp(self): + self.g = Grammar(terminals=[0, 1, 2, + 'a', 'b', 'c', + TFirst, TSecond, TThird, + TInstFirst, TInstSecond, TInstThird], + nonterminals=[NFirst, NSecond, NThird, NFourth, NFifth]) + + def test_oneInRules(self): class Tmp1(Rule): rules = [([NFirst], [NSecond, 0]), ([NThird], [0, 1]), ([NSecond], [NSecond, 'a'])] - grammar.add_rule(Tmp1) + self.g.add_rule(Tmp1) + self.assertEqual(self.g.rules_count(), 3) + self.assertTrue(self.g.have_rule(Tmp1)) + def test_oneInRulesIteration(self): + class Tmp1(Rule): + rules = [([NFirst], [NSecond, 0]), + ([NThird], [0, 1]), + ([NSecond], [NSecond, 'a'])] + self.g.add_rule(Tmp1) + for rule in self.g.rule(): + self.assertIn(rule.rule, Tmp1.rules) + + def test_hashes(self): + class Tmp1(Rule): + rules = [([NFirst], [NSecond, 0]), + ([NThird], [0, 1]), + ([NSecond], [NSecond, 'a'])] + class Tmp2(Rule): + rules = [([NFirst], [NSecond, 0]), + ([NThird], [0, 1]), + ([NSecond], [NSecond, 'a'])] + hash1 = hash(Tmp1) + hash2 = hash(Tmp2) + self.assertEqual(hash1, hash2) if __name__ == '__main__': main()
e87fc9172b9d56fe9d64cfda2fb36a3f71e69e70
tests/test_utils.py
tests/test_utils.py
# -*- coding: utf-8; -*- import re from jenkins_autojobs import main def test_filter_jobs(): class Job: def __init__(self, name): self.name = name class jenkins: pass names = ['feature-one', 'feature-two', 'release-one', 'release-two'] jenkins.jobs = [Job(i) for i in names] filter_jobs = lambda **kw: {i.name for i in main.filter_jobs(jenkins, **kw)} #------------------------------------------------------------------------- assert filter_jobs() == {'feature-one', 'feature-two', 'release-one', 'release-two'} res = filter_jobs(by_name_regex=[re.compile('feature-')]) assert res == {'feature-one', 'feature-two'} res = filter_jobs(by_name_regex=[re.compile('.*one'), re.compile('.*two')]) assert res == {'feature-one', 'feature-two', 'release-one', 'release-two'} #------------------------------------------------------------------------- view_jobs = { 'v1': [Job('scratch-one'), Job('scratch-two')], 'v2': [Job('release-one'), Job('maintenance-three')] } jenkins.view_jobs = lambda x: view_jobs[x] res = filter_jobs(by_views=['v1']) assert res == {'scratch-one', 'scratch-two'} res = filter_jobs(by_views=['v1', 'v2']) assert res == {'scratch-one', 'scratch-two', 'release-one', 'maintenance-three'}
# -*- coding: utf-8; -*- import re from jenkins_autojobs import main def test_filter_jobs(): class Job: def __init__(self, name): self.name = name class jenkins: @staticmethod def view_jobs(x): return { 'v1': [Job('scratch-one'), Job('scratch-two')], 'v2': [Job('release-one'), Job('maintenance-three')] }[x] names = ['feature-one', 'feature-two', 'release-one', 'release-two'] jenkins.jobs = [Job(i) for i in names] filter_jobs = lambda **kw: {i.name for i in main.filter_jobs(jenkins, **kw)} #------------------------------------------------------------------------- assert filter_jobs() == {'feature-one', 'feature-two', 'release-one', 'release-two'} res = filter_jobs(by_name_regex=[re.compile('feature-')]) assert res == {'feature-one', 'feature-two'} res = filter_jobs(by_name_regex=[re.compile('.*one'), re.compile('.*two')]) assert res == {'feature-one', 'feature-two', 'release-one', 'release-two'} #------------------------------------------------------------------------- res = filter_jobs(by_views=['v1']) assert res == {'scratch-one', 'scratch-two'} res = filter_jobs(by_views=['v1', 'v2']) assert res == {'scratch-one', 'scratch-two', 'release-one', 'maintenance-three'}
Fix tests on Python 2
Fix tests on Python 2
Python
bsd-3-clause
gvalkov/jenkins-autojobs,gvalkov/jenkins-autojobs
--- +++ @@ -11,7 +11,12 @@ self.name = name class jenkins: - pass + @staticmethod + def view_jobs(x): + return { + 'v1': [Job('scratch-one'), Job('scratch-two')], + 'v2': [Job('release-one'), Job('maintenance-three')] + }[x] names = ['feature-one', 'feature-two', 'release-one', 'release-two'] jenkins.jobs = [Job(i) for i in names] @@ -27,12 +32,6 @@ assert res == {'feature-one', 'feature-two', 'release-one', 'release-two'} #------------------------------------------------------------------------- - view_jobs = { - 'v1': [Job('scratch-one'), Job('scratch-two')], - 'v2': [Job('release-one'), Job('maintenance-three')] - } - jenkins.view_jobs = lambda x: view_jobs[x] - res = filter_jobs(by_views=['v1']) assert res == {'scratch-one', 'scratch-two'}
1963012ba4628f1f66d495e777275243dc7248e4
.CI/trigger_conda-forge.github.io.py
.CI/trigger_conda-forge.github.io.py
""" Trigger the conda-forge.github.io Travis job to restart. """ import os import requests import six import conda_smithy.ci_register def rebuild_travis(repo_slug): headers = conda_smithy.ci_register.travis_headers() # If we don't specify the API version, we get a 404. # Also fix the accepted content type. headers["Accept"] = "application/json" headers["Travis-API-Version"] = "3" # Trigger a build on `master`. encoded_slug = six.moves.urllib.parse.quote(repo_slug, safe='') url = 'https://api.travis-ci.org/repo/{}/requests'.format(encoded_slug) response = requests.post( url, json={"request": {"branch": "master"}}, headers=headers ) if response.status_code != 201: response.raise_for_status() if __name__ == '__main__': rebuild_travis('conda-forge/conda-forge.github.io')
""" Trigger the conda-forge.github.io Travis job to restart. """ import os import requests import six import conda_smithy.ci_register def rebuild_travis(repo_slug): headers = conda_smithy.ci_register.travis_headers() # If we don't specify the API version, we get a 404. # Also fix the accepted content type. headers["Accept"] = "application/json" headers["Travis-API-Version"] = "3" # Trigger a build on `master`. encoded_slug = six.moves.urllib.parse.quote(repo_slug, safe='') url = 'https://api.travis-ci.org/repo/{}/requests'.format(encoded_slug) response = requests.post( url, json={ "request": { "branch": "master", "message": "Triggering build from staged-recipes", } }, headers=headers ) if response.status_code != 201: response.raise_for_status() if __name__ == '__main__': rebuild_travis('conda-forge/conda-forge.github.io')
Add message to webpage repo trigger
Add message to webpage repo trigger Should fix triggering builds on the webpage repo even when the most recent commit message skip the CI build. Also should make it easier to identify builds started by this trigger. [ci skip] [skip ci]
Python
bsd-3-clause
jakirkham/staged-recipes,Cashalow/staged-recipes,dschreij/staged-recipes,scopatz/staged-recipes,stuertz/staged-recipes,conda-forge/staged-recipes,guillochon/staged-recipes,hadim/staged-recipes,SylvainCorlay/staged-recipes,mcs07/staged-recipes,scopatz/staged-recipes,sodre/staged-recipes,pmlandwehr/staged-recipes,sannykr/staged-recipes,shadowwalkersb/staged-recipes,sodre/staged-recipes,larray-project/staged-recipes,sodre/staged-recipes,patricksnape/staged-recipes,birdsarah/staged-recipes,NOAA-ORR-ERD/staged-recipes,pmlandwehr/staged-recipes,jochym/staged-recipes,chohner/staged-recipes,ceholden/staged-recipes,rmcgibbo/staged-recipes,hadim/staged-recipes,goanpeca/staged-recipes,barkls/staged-recipes,Cashalow/staged-recipes,kwilcox/staged-recipes,sannykr/staged-recipes,jjhelmus/staged-recipes,glemaitre/staged-recipes,isuruf/staged-recipes,mariusvniekerk/staged-recipes,larray-project/staged-recipes,jakirkham/staged-recipes,mariusvniekerk/staged-recipes,jochym/staged-recipes,chrisburr/staged-recipes,Juanlu001/staged-recipes,patricksnape/staged-recipes,petrushy/staged-recipes,rvalieris/staged-recipes,ReimarBauer/staged-recipes,synapticarbors/staged-recipes,rvalieris/staged-recipes,glemaitre/staged-recipes,guillochon/staged-recipes,petrushy/staged-recipes,Juanlu001/staged-recipes,asmeurer/staged-recipes,isuruf/staged-recipes,birdsarah/staged-recipes,conda-forge/staged-recipes,kwilcox/staged-recipes,barkls/staged-recipes,chrisburr/staged-recipes,jjhelmus/staged-recipes,mcs07/staged-recipes,basnijholt/staged-recipes,asmeurer/staged-recipes,ReimarBauer/staged-recipes,basnijholt/staged-recipes,cpaulik/staged-recipes,goanpeca/staged-recipes,johanneskoester/staged-recipes,shadowwalkersb/staged-recipes,synapticarbors/staged-recipes,SylvainCorlay/staged-recipes,igortg/staged-recipes,stuertz/staged-recipes,cpaulik/staged-recipes,NOAA-ORR-ERD/staged-recipes,ocefpaf/staged-recipes,johanneskoester/staged-recipes,rmcgibbo/staged-recipes,igortg/staged-recipes,ocefpaf/staged-recipes,chohner/staged-recipes,dschreij/staged-recipes,ceholden/staged-recipes
--- +++ @@ -24,7 +24,12 @@ url = 'https://api.travis-ci.org/repo/{}/requests'.format(encoded_slug) response = requests.post( url, - json={"request": {"branch": "master"}}, + json={ + "request": { + "branch": "master", + "message": "Triggering build from staged-recipes", + } + }, headers=headers ) if response.status_code != 201:
86e9e5a8da58b2902f5848353df9b05151bd08fa
turbustat/tests/test_cramer.py
turbustat/tests/test_cramer.py
# Licensed under an MIT open source license - see LICENSE ''' Test functions for Cramer ''' from unittest import TestCase import numpy as np import numpy.testing as npt from ..statistics import Cramer_Distance from ._testing_data import \ dataset1, dataset2, computed_data, computed_distances class testCramer(TestCase): def setUp(self): self.dataset1 = dataset1 self.dataset2 = dataset2 def test_cramer(self): self.tester = Cramer_Distance(dataset1["cube"][0], dataset2["cube"][0]) self.tester.distance_metric() assert np.allclose(self.tester.data_matrix1, computed_data["cramer_val"]) npt.assert_almost_equal(self.tester.distance, computed_distances['cramer_distance'])
# Licensed under an MIT open source license - see LICENSE ''' Test functions for Cramer ''' from unittest import TestCase import numpy as np import numpy.testing as npt from ..statistics import Cramer_Distance from ._testing_data import \ dataset1, dataset2, computed_data, computed_distances class testCramer(TestCase): def setUp(self): self.dataset1 = dataset1 self.dataset2 = dataset2 def test_cramer(self): self.tester = Cramer_Distance(dataset1["cube"][0], dataset2["cube"][0]) self.tester.distance_metric() assert np.allclose(self.tester.data_matrix1, computed_data["cramer_val"]) npt.assert_almost_equal(self.tester.distance, computed_distances['cramer_distance']) def test_cramer_spatial_diff(self): small_data = dataset1["cube"][0][:, :26, :26] self.tester2 = Cramer_Distance(small_data, dataset2["cube"][0]) self.tester3 = Cramer_Distance(dataset2["cube"][0], small_data) npt.assert_almost_equal(self.tester2.distance, self.tester3.distance)
Add test for Cramer with different spatial sizes
Add test for Cramer with different spatial sizes
Python
mit
Astroua/TurbuStat,e-koch/TurbuStat
--- +++ @@ -28,3 +28,13 @@ computed_data["cramer_val"]) npt.assert_almost_equal(self.tester.distance, computed_distances['cramer_distance']) + + def test_cramer_spatial_diff(self): + + small_data = dataset1["cube"][0][:, :26, :26] + + self.tester2 = Cramer_Distance(small_data, dataset2["cube"][0]) + self.tester3 = Cramer_Distance(dataset2["cube"][0], small_data) + + npt.assert_almost_equal(self.tester2.distance, self.tester3.distance) +
05240d24f6184b015422e2e1996fb90d7f6d7654
twstock/cli/best_four_point.py
twstock/cli/best_four_point.py
import twstock def main(argv): print('四大買賣點判斷 Best Four Point') print('------------------------------') if len(argv) > 1: sids = argv[1:] for sid in sids: bfp = twstock.BestFourPoint(twstock.Stock(sid)) bfp = bfp.best_four_point() print('%s: ' % (sid), end='') if bfp: if bfp[0]: print('Buy ', bfp[1]) else: print('Sell ', bfp[1]) else: print("Don't touch")
import twstock def run(argv): print('四大買賣點判斷 Best Four Point') print('------------------------------') for sid in argv: bfp = twstock.BestFourPoint(twstock.Stock(sid)) bfp = bfp.best_four_point() print('%s: ' % (sid), end='') if bfp: if bfp[0]: print('Buy ', bfp[1]) else: print('Sell ', bfp[1]) else: print("Don't touch")
Fix best four point cli
Fix best four point cli
Python
mit
mlouielu/twstock,TCCinTaiwan/twstock
--- +++ @@ -1,19 +1,17 @@ import twstock -def main(argv): +def run(argv): print('四大買賣點判斷 Best Four Point') print('------------------------------') - if len(argv) > 1: - sids = argv[1:] - for sid in sids: - bfp = twstock.BestFourPoint(twstock.Stock(sid)) - bfp = bfp.best_four_point() - print('%s: ' % (sid), end='') - if bfp: - if bfp[0]: - print('Buy ', bfp[1]) - else: - print('Sell ', bfp[1]) + for sid in argv: + bfp = twstock.BestFourPoint(twstock.Stock(sid)) + bfp = bfp.best_four_point() + print('%s: ' % (sid), end='') + if bfp: + if bfp[0]: + print('Buy ', bfp[1]) else: - print("Don't touch") + print('Sell ', bfp[1]) + else: + print("Don't touch")
ec2454626e22244c504bae528457fb8136c59feb
cooler/cli/__init__.py
cooler/cli/__init__.py
# -*- coding: utf-8 -*- from __future__ import division, print_function import click CONTEXT_SETTINGS = { 'help_option_names': ['-h', '--help'], } @click.group(context_settings=CONTEXT_SETTINGS) def cli(): pass from . import ( makebins, digest, csort, cload, load, balance, dump, show, info )
# -*- coding: utf-8 -*- from __future__ import division, print_function import click # Monkey patch click.core._verify_python3_env = lambda: None CONTEXT_SETTINGS = { 'help_option_names': ['-h', '--help'], } @click.group(context_settings=CONTEXT_SETTINGS) def cli(): pass from . import ( makebins, digest, csort, cload, load, balance, dump, show, info )
Stop Click from aborting due to misconfigured locale
Stop Click from aborting due to misconfigured locale
Python
bsd-3-clause
mirnylab/cooler
--- +++ @@ -2,9 +2,14 @@ from __future__ import division, print_function import click +# Monkey patch +click.core._verify_python3_env = lambda: None + + CONTEXT_SETTINGS = { 'help_option_names': ['-h', '--help'], } + @click.group(context_settings=CONTEXT_SETTINGS) def cli():
1594ab8d77e6522e0d85aa363ddc67d55d6ee81a
zc-list.py
zc-list.py
#!/usr/bin/env python import sys import argparse import client_wrap def parse_args(): parser = argparse.ArgumentParser() parser.add_argument("-t", "--type", help="type of the cached data", default="double") parser.add_argument("-c", "--connection", help="connection string", default="ipc:///var/run/zero-cache/0") parser.add_argument("-w", "--column", help="number of columns", type=int, default=1) parser.add_argument("-l", "--log", help="log file name", default="") args = parser.parse_args() def get_keys(client): key_str = client.GetKeys() keys = key_str.split (';') del keys[-1] if len(keys) == 0: sys.exit() return keys def print_keys(client, keys): for key in keys: value = client.ReadLong(key) print "%s = %d" % (key, value) def main(): parse_args() client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0) keys = get_keys(client) print_keys(client, keys) if __name__ == "__main__": main()
#!/usr/bin/env python import sys import argparse import client_wrap def parse_args(): parser = argparse.ArgumentParser() parser.add_argument("-t", "--type", help="type of the cached data", default="double") parser.add_argument("-c", "--connection", help="connection string", default="ipc:///var/run/zero-cache/0") parser.add_argument("-w", "--column", help="number of columns", type=int, default=1) parser.add_argument("-l", "--log", help="log file name", default="") global ARGS ARGS = parser.parse_args() def get_keys(client): key_str = client.GetKeys() keys = key_str.split (';') del keys[-1] if len(keys) == 0: sys.exit() return keys def read_value(client, key): if ARGS.type == "double": return client.ReadDouble(key) if ARGS.type == "long": return client.ReadLong(key) if ARGS.type == "string": return client.ReadString(key) def print_value(key, value): if ARGS.type == "double": print "%s = %f" % (key, value) if ARGS.type == "long": print "%s = %d" % (key, value) if ARGS.type == "string": print "%s = %s" % (key, value) def print_keys(client, keys): for key in keys: value = read_value(client, key) print_value(key, value) def main(): parse_args() client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0) keys = get_keys(client) print_keys(client, keys) if __name__ == "__main__": main()
Implement the type specific values output
Implement the type specific values output
Python
agpl-3.0
ellysh/zero-cache-utils,ellysh/zero-cache-utils
--- +++ @@ -10,7 +10,8 @@ parser.add_argument("-c", "--connection", help="connection string", default="ipc:///var/run/zero-cache/0") parser.add_argument("-w", "--column", help="number of columns", type=int, default=1) parser.add_argument("-l", "--log", help="log file name", default="") - args = parser.parse_args() + global ARGS + ARGS = parser.parse_args() def get_keys(client): key_str = client.GetKeys() @@ -22,10 +23,30 @@ return keys +def read_value(client, key): + if ARGS.type == "double": + return client.ReadDouble(key) + + if ARGS.type == "long": + return client.ReadLong(key) + + if ARGS.type == "string": + return client.ReadString(key) + +def print_value(key, value): + if ARGS.type == "double": + print "%s = %f" % (key, value) + + if ARGS.type == "long": + print "%s = %d" % (key, value) + + if ARGS.type == "string": + print "%s = %s" % (key, value) + def print_keys(client, keys): for key in keys: - value = client.ReadLong(key) - print "%s = %d" % (key, value) + value = read_value(client, key) + print_value(key, value) def main(): parse_args()
cb2f6840d34807d2df72e8c7ed0a76576a50f392
src/brightness.py
src/brightness.py
import sys import os from workflow import Workflow def main(wf): user_input = wf.args[0] if user_input != '': try: if int(user_input) <= 100 and int(user_input) > 0: wf.add_item('%s%%' % user_input, arg='%s' % (int(user_input) / 100.0), valid=True) else: wf.add_item('Enter value between 0 and 100') except ValueError: wf.add_item('Enter value between 0 and 100') for i in range(0,120, 20): wf.add_item('%s%%' % i, arg='%s' % (i / 100.0), valid=True) try: current_value = os.popen('./brightness').readline() wf.add_item('Current brightness: %s%%' % int(100 * float(current_value)), valid=False) except ValueError: wf.add_item('Cannot get current brightness') wf.send_feedback() if __name__ == '__main__': wf = Workflow() sys.exit(wf.run(main))
import sys import os from workflow import Workflow def main(wf): user_input = wf.args[0] if user_input != '': try: if int(user_input) <= 100 and int(user_input) >= 0: wf.add_item('%s%%' % user_input, arg='%s' % (int(user_input) / 100.0), valid=True) else: wf.add_item('Enter value between 0 and 100') except ValueError: wf.add_item('Enter value between 0 and 100') for i in range(0,120, 20): wf.add_item('%s%%' % i, arg='%s' % (i / 100.0), valid=True) try: current_value = os.popen('./brightness').readline() wf.add_item('Current brightness: %s%%' % int(100 * float(current_value)), valid=False) except ValueError: wf.add_item('Cannot get current brightness') wf.send_feedback() if __name__ == '__main__': wf = Workflow() sys.exit(wf.run(main))
Allow user to input "0" as value
Allow user to input "0" as value
Python
mit
fniephaus/alfred-brightness
--- +++ @@ -7,7 +7,7 @@ user_input = wf.args[0] if user_input != '': try: - if int(user_input) <= 100 and int(user_input) > 0: + if int(user_input) <= 100 and int(user_input) >= 0: wf.add_item('%s%%' % user_input, arg='%s' % (int(user_input) / 100.0), valid=True) else: wf.add_item('Enter value between 0 and 100')