commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
f86f684e659ab1e6c1c9d7c7f5f126e7814e7e8a
napper_kittydar.py
napper_kittydar.py
import sys, socket, time, logging import shlex, subprocess from hdfs import * logging.basicConfig() if len(sys.argv) < 4: print "usage: napper_kittydar <job name> <worker ID> <executable>" sys.exit(1) job_name = sys.argv[1] worker_id = int(sys.argv[2]) kittydar_path = " ".join(sys.argv[3:]) # fetch inputs from HDFS if necessary hdfs_fetch_file("/input/kittys/CAT_0%d" % (worker_id), os.environ['FLAGS_task_data_dir']) # execute program command = "nodejs %s --dir %s/CAT_0%d/" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id) print "RUNNING: %s" % (command) subprocess.call(shlex.split(command)) print "Deleting scratch data..." del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir']) subprocess.call(shlex.split(del_command)) print "All done -- goodbye from Napper!" sys.exit(0)
import sys, socket, time, logging import shlex, subprocess from hdfs import * logging.basicConfig() if len(sys.argv) < 4: print "usage: napper_kittydar <job name> <worker ID> <executable>" sys.exit(1) job_name = sys.argv[1] worker_id = int(sys.argv[2]) kittydar_path = " ".join(sys.argv[3:]) # fetch inputs from HDFS if necessary hdfs_fetch_file("/input/kittydar_splits30/CAT_%02d" % (worker_id), os.environ['FLAGS_task_data_dir']) # execute program command = "nodejs %s --dir %s/CAT_%02d/" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id) print "RUNNING: %s" % (command) subprocess.call(shlex.split(command)) print "Deleting scratch data..." del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir']) subprocess.call(shlex.split(del_command)) print "All done -- goodbye from Napper!" sys.exit(0)
Use smaller data sets for Kittydar.
Use smaller data sets for Kittydar.
Python
mit
ms705/napper
import sys, socket, time, logging import shlex, subprocess from hdfs import * logging.basicConfig() if len(sys.argv) < 4: print "usage: napper_kittydar <job name> <worker ID> <executable>" sys.exit(1) job_name = sys.argv[1] worker_id = int(sys.argv[2]) kittydar_path = " ".join(sys.argv[3:]) # fetch inputs from HDFS if necessary hdfs_fetch_file("/input/kittys/CAT_0%d" % (worker_id), os.environ['FLAGS_task_data_dir']) # execute program command = "nodejs %s --dir %s/CAT_0%d/" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id) print "RUNNING: %s" % (command) subprocess.call(shlex.split(command)) print "Deleting scratch data..." del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir']) subprocess.call(shlex.split(del_command)) print "All done -- goodbye from Napper!" sys.exit(0) Use smaller data sets for Kittydar.
import sys, socket, time, logging import shlex, subprocess from hdfs import * logging.basicConfig() if len(sys.argv) < 4: print "usage: napper_kittydar <job name> <worker ID> <executable>" sys.exit(1) job_name = sys.argv[1] worker_id = int(sys.argv[2]) kittydar_path = " ".join(sys.argv[3:]) # fetch inputs from HDFS if necessary hdfs_fetch_file("/input/kittydar_splits30/CAT_%02d" % (worker_id), os.environ['FLAGS_task_data_dir']) # execute program command = "nodejs %s --dir %s/CAT_%02d/" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id) print "RUNNING: %s" % (command) subprocess.call(shlex.split(command)) print "Deleting scratch data..." del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir']) subprocess.call(shlex.split(del_command)) print "All done -- goodbye from Napper!" sys.exit(0)
<commit_before>import sys, socket, time, logging import shlex, subprocess from hdfs import * logging.basicConfig() if len(sys.argv) < 4: print "usage: napper_kittydar <job name> <worker ID> <executable>" sys.exit(1) job_name = sys.argv[1] worker_id = int(sys.argv[2]) kittydar_path = " ".join(sys.argv[3:]) # fetch inputs from HDFS if necessary hdfs_fetch_file("/input/kittys/CAT_0%d" % (worker_id), os.environ['FLAGS_task_data_dir']) # execute program command = "nodejs %s --dir %s/CAT_0%d/" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id) print "RUNNING: %s" % (command) subprocess.call(shlex.split(command)) print "Deleting scratch data..." del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir']) subprocess.call(shlex.split(del_command)) print "All done -- goodbye from Napper!" sys.exit(0) <commit_msg>Use smaller data sets for Kittydar.<commit_after>
import sys, socket, time, logging import shlex, subprocess from hdfs import * logging.basicConfig() if len(sys.argv) < 4: print "usage: napper_kittydar <job name> <worker ID> <executable>" sys.exit(1) job_name = sys.argv[1] worker_id = int(sys.argv[2]) kittydar_path = " ".join(sys.argv[3:]) # fetch inputs from HDFS if necessary hdfs_fetch_file("/input/kittydar_splits30/CAT_%02d" % (worker_id), os.environ['FLAGS_task_data_dir']) # execute program command = "nodejs %s --dir %s/CAT_%02d/" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id) print "RUNNING: %s" % (command) subprocess.call(shlex.split(command)) print "Deleting scratch data..." del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir']) subprocess.call(shlex.split(del_command)) print "All done -- goodbye from Napper!" sys.exit(0)
import sys, socket, time, logging import shlex, subprocess from hdfs import * logging.basicConfig() if len(sys.argv) < 4: print "usage: napper_kittydar <job name> <worker ID> <executable>" sys.exit(1) job_name = sys.argv[1] worker_id = int(sys.argv[2]) kittydar_path = " ".join(sys.argv[3:]) # fetch inputs from HDFS if necessary hdfs_fetch_file("/input/kittys/CAT_0%d" % (worker_id), os.environ['FLAGS_task_data_dir']) # execute program command = "nodejs %s --dir %s/CAT_0%d/" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id) print "RUNNING: %s" % (command) subprocess.call(shlex.split(command)) print "Deleting scratch data..." del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir']) subprocess.call(shlex.split(del_command)) print "All done -- goodbye from Napper!" sys.exit(0) Use smaller data sets for Kittydar.import sys, socket, time, logging import shlex, subprocess from hdfs import * logging.basicConfig() if len(sys.argv) < 4: print "usage: napper_kittydar <job name> <worker ID> <executable>" sys.exit(1) job_name = sys.argv[1] worker_id = int(sys.argv[2]) kittydar_path = " ".join(sys.argv[3:]) # fetch inputs from HDFS if necessary hdfs_fetch_file("/input/kittydar_splits30/CAT_%02d" % (worker_id), os.environ['FLAGS_task_data_dir']) # execute program command = "nodejs %s --dir %s/CAT_%02d/" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id) print "RUNNING: %s" % (command) subprocess.call(shlex.split(command)) print "Deleting scratch data..." del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir']) subprocess.call(shlex.split(del_command)) print "All done -- goodbye from Napper!" sys.exit(0)
<commit_before>import sys, socket, time, logging import shlex, subprocess from hdfs import * logging.basicConfig() if len(sys.argv) < 4: print "usage: napper_kittydar <job name> <worker ID> <executable>" sys.exit(1) job_name = sys.argv[1] worker_id = int(sys.argv[2]) kittydar_path = " ".join(sys.argv[3:]) # fetch inputs from HDFS if necessary hdfs_fetch_file("/input/kittys/CAT_0%d" % (worker_id), os.environ['FLAGS_task_data_dir']) # execute program command = "nodejs %s --dir %s/CAT_0%d/" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id) print "RUNNING: %s" % (command) subprocess.call(shlex.split(command)) print "Deleting scratch data..." del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir']) subprocess.call(shlex.split(del_command)) print "All done -- goodbye from Napper!" sys.exit(0) <commit_msg>Use smaller data sets for Kittydar.<commit_after>import sys, socket, time, logging import shlex, subprocess from hdfs import * logging.basicConfig() if len(sys.argv) < 4: print "usage: napper_kittydar <job name> <worker ID> <executable>" sys.exit(1) job_name = sys.argv[1] worker_id = int(sys.argv[2]) kittydar_path = " ".join(sys.argv[3:]) # fetch inputs from HDFS if necessary hdfs_fetch_file("/input/kittydar_splits30/CAT_%02d" % (worker_id), os.environ['FLAGS_task_data_dir']) # execute program command = "nodejs %s --dir %s/CAT_%02d/" % (kittydar_path, os.environ['FLAGS_task_data_dir'], worker_id) print "RUNNING: %s" % (command) subprocess.call(shlex.split(command)) print "Deleting scratch data..." del_command = "rm -rf %s" % (os.environ['FLAGS_task_data_dir']) subprocess.call(shlex.split(del_command)) print "All done -- goodbye from Napper!" sys.exit(0)
6061932ef2634098f604f3a118b8ead971bd4295
main.py
main.py
import os import logging import gevent from flask import Flask, render_template, url_for, redirect from flask_sockets import Sockets import io import string import random app = Flask(__name__) path = os.getcwd() app.config['DEBUG'] = True sockets = Sockets(app) def rand_id(size=8): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size)) @app.route('/', methods=['GET', 'POST']) def main(): return redirect(url_for('static', filename='index.html')) @sockets.route('/submit') def submit(ws): user_id = rand_id() while not ws.closed: gevent.sleep() data = ws.receive() if data and data != "start": file_name = "_".join(["file", user_id, rand_id()]) wave_file = io.open(file_name, "wb") wave_file.write(data) wave_file.close() # process_file(file_name)
import os import logging import gevent from flask import Flask, render_template, url_for, redirect from flask_sockets import Sockets import io import string import random app = Flask(__name__) path = os.getcwd() app.config['DEBUG'] = True sockets = Sockets(app) def rand_id(size=8): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size)) @app.route('/', methods=['GET', 'POST']) def main(): return redirect(url_for('static', filename='index.html')) @sockets.route('/submit') def submit(ws): user_id = rand_id() while not ws.closed: gevent.sleep() data = ws.receive() if data and data != "start": file_name = "_".join(["file", user_id, rand_id()]) wave_file = io.open(file_name, "wb") wave_file.write(data) wave_file.close() # process_file(file_name) # os.remove(file_name)
Delete wav files after processions
Delete wav files after processions
Python
mit
j-salazar/mchacks15,j-salazar/mchacks15,j-salazar/mchacks15
import os import logging import gevent from flask import Flask, render_template, url_for, redirect from flask_sockets import Sockets import io import string import random app = Flask(__name__) path = os.getcwd() app.config['DEBUG'] = True sockets = Sockets(app) def rand_id(size=8): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size)) @app.route('/', methods=['GET', 'POST']) def main(): return redirect(url_for('static', filename='index.html')) @sockets.route('/submit') def submit(ws): user_id = rand_id() while not ws.closed: gevent.sleep() data = ws.receive() if data and data != "start": file_name = "_".join(["file", user_id, rand_id()]) wave_file = io.open(file_name, "wb") wave_file.write(data) wave_file.close() # process_file(file_name) Delete wav files after processions
import os import logging import gevent from flask import Flask, render_template, url_for, redirect from flask_sockets import Sockets import io import string import random app = Flask(__name__) path = os.getcwd() app.config['DEBUG'] = True sockets = Sockets(app) def rand_id(size=8): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size)) @app.route('/', methods=['GET', 'POST']) def main(): return redirect(url_for('static', filename='index.html')) @sockets.route('/submit') def submit(ws): user_id = rand_id() while not ws.closed: gevent.sleep() data = ws.receive() if data and data != "start": file_name = "_".join(["file", user_id, rand_id()]) wave_file = io.open(file_name, "wb") wave_file.write(data) wave_file.close() # process_file(file_name) # os.remove(file_name)
<commit_before>import os import logging import gevent from flask import Flask, render_template, url_for, redirect from flask_sockets import Sockets import io import string import random app = Flask(__name__) path = os.getcwd() app.config['DEBUG'] = True sockets = Sockets(app) def rand_id(size=8): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size)) @app.route('/', methods=['GET', 'POST']) def main(): return redirect(url_for('static', filename='index.html')) @sockets.route('/submit') def submit(ws): user_id = rand_id() while not ws.closed: gevent.sleep() data = ws.receive() if data and data != "start": file_name = "_".join(["file", user_id, rand_id()]) wave_file = io.open(file_name, "wb") wave_file.write(data) wave_file.close() # process_file(file_name) <commit_msg>Delete wav files after processions<commit_after>
import os import logging import gevent from flask import Flask, render_template, url_for, redirect from flask_sockets import Sockets import io import string import random app = Flask(__name__) path = os.getcwd() app.config['DEBUG'] = True sockets = Sockets(app) def rand_id(size=8): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size)) @app.route('/', methods=['GET', 'POST']) def main(): return redirect(url_for('static', filename='index.html')) @sockets.route('/submit') def submit(ws): user_id = rand_id() while not ws.closed: gevent.sleep() data = ws.receive() if data and data != "start": file_name = "_".join(["file", user_id, rand_id()]) wave_file = io.open(file_name, "wb") wave_file.write(data) wave_file.close() # process_file(file_name) # os.remove(file_name)
import os import logging import gevent from flask import Flask, render_template, url_for, redirect from flask_sockets import Sockets import io import string import random app = Flask(__name__) path = os.getcwd() app.config['DEBUG'] = True sockets = Sockets(app) def rand_id(size=8): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size)) @app.route('/', methods=['GET', 'POST']) def main(): return redirect(url_for('static', filename='index.html')) @sockets.route('/submit') def submit(ws): user_id = rand_id() while not ws.closed: gevent.sleep() data = ws.receive() if data and data != "start": file_name = "_".join(["file", user_id, rand_id()]) wave_file = io.open(file_name, "wb") wave_file.write(data) wave_file.close() # process_file(file_name) Delete wav files after processionsimport os import logging import gevent from flask import Flask, render_template, url_for, redirect from flask_sockets import Sockets import io import string import random app = Flask(__name__) path = os.getcwd() app.config['DEBUG'] = True sockets = Sockets(app) def rand_id(size=8): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size)) @app.route('/', methods=['GET', 'POST']) def main(): return redirect(url_for('static', filename='index.html')) @sockets.route('/submit') def submit(ws): user_id = rand_id() while not ws.closed: gevent.sleep() data = ws.receive() if data and data != "start": file_name = "_".join(["file", user_id, rand_id()]) wave_file = io.open(file_name, "wb") wave_file.write(data) wave_file.close() # process_file(file_name) # os.remove(file_name)
<commit_before>import os import logging import gevent from flask import Flask, render_template, url_for, redirect from flask_sockets import Sockets import io import string import random app = Flask(__name__) path = os.getcwd() app.config['DEBUG'] = True sockets = Sockets(app) def rand_id(size=8): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size)) @app.route('/', methods=['GET', 'POST']) def main(): return redirect(url_for('static', filename='index.html')) @sockets.route('/submit') def submit(ws): user_id = rand_id() while not ws.closed: gevent.sleep() data = ws.receive() if data and data != "start": file_name = "_".join(["file", user_id, rand_id()]) wave_file = io.open(file_name, "wb") wave_file.write(data) wave_file.close() # process_file(file_name) <commit_msg>Delete wav files after processions<commit_after>import os import logging import gevent from flask import Flask, render_template, url_for, redirect from flask_sockets import Sockets import io import string import random app = Flask(__name__) path = os.getcwd() app.config['DEBUG'] = True sockets = Sockets(app) def rand_id(size=8): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size)) @app.route('/', methods=['GET', 'POST']) def main(): return redirect(url_for('static', filename='index.html')) @sockets.route('/submit') def submit(ws): user_id = rand_id() while not ws.closed: gevent.sleep() data = ws.receive() if data and data != "start": file_name = "_".join(["file", user_id, rand_id()]) wave_file = io.open(file_name, "wb") wave_file.write(data) wave_file.close() # process_file(file_name) # os.remove(file_name)
4210d2ecb1b74c2a94c704c20eec9faaf75c5a9a
main.py
main.py
import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg)
import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) if len(unread) > 0: msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg)
Send SMS, only if there are notifications
Send SMS, only if there are notifications
Python
mit
Walz/github-sms-notifications
import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg) Send SMS, only if there are notifications
import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) if len(unread) > 0: msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg)
<commit_before>import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg) <commit_msg>Send SMS, only if there are notifications<commit_after>
import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) if len(unread) > 0: msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg)
import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg) Send SMS, only if there are notificationsimport sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) if len(unread) > 0: msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg)
<commit_before>import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg) <commit_msg>Send SMS, only if there are notifications<commit_after>import sys,requests,configparser github_username = '' github_token = '' free_user = '' free_pass = '' def loadConfig(): config = configparser.ConfigParser() config.read('config.cfg') global github_username,github_token,free_user,free_pass try: github_username = config['Github']['username'] github_token = config['Github']['token'] free_user = config['Free']['user'] free_pass = config['Free']['pass'] except: print('Missing information in config.cfg, see README.md') sys.exit(1) def githubAPI(action): r = requests.get('https://api.github.com' + action, auth=(github_username, github_token)) return r.json() def sendSMS(msg): return requests.post('https://smsapi.free-mobile.fr/sendmsg', params={'user' : free_user, 'pass' : free_pass, 'msg' : msg}).status_code if __name__ == '__main__': loadConfig() notifs = githubAPI('/notifications') unread = [] for notif in notifs: if notif['unread']: unread.append(notif) if len(unread) > 0: msg = '[Github] Unread notifications :\n' for n in unread: msg += " [" + n['subject']['type'] + "] " + n['subject']['title'] + " in " + n['repository']['full_name'] + "\n" sendSMS(msg)
8423013f2864863671c8d39a5a09131e68de0002
valohai_cli/cli.py
valohai_cli/cli.py
import logging import click from valohai_cli.plugin_cli import RecursiveHelpPluginCLI from valohai_cli.table import TABLE_FORMATS, TABLE_FORMAT_META_KEY @click.command(cls=RecursiveHelpPluginCLI, commands_module='valohai_cli.commands') @click.option('--debug/--no-debug', default=False, envvar='VALOHAI_DEBUG') @click.option('--table-format', type=click.Choice(TABLE_FORMATS), default='human') @click.pass_context def cli(ctx, debug, table_format): if debug: logging.basicConfig(level=logging.DEBUG) ctx.debug = debug ctx.meta[TABLE_FORMAT_META_KEY] = table_format
import logging import platform import sys import click from valohai_cli.messages import warn from valohai_cli.plugin_cli import RecursiveHelpPluginCLI from valohai_cli.table import TABLE_FORMATS, TABLE_FORMAT_META_KEY @click.command(cls=RecursiveHelpPluginCLI, commands_module='valohai_cli.commands') @click.option('--debug/--no-debug', default=False, envvar='VALOHAI_DEBUG') @click.option('--table-format', type=click.Choice(TABLE_FORMATS), default='human') @click.pass_context def cli(ctx, debug, table_format): if debug: logging.basicConfig(level=logging.DEBUG) ctx.debug = debug ctx.meta[TABLE_FORMAT_META_KEY] = table_format if platform.python_implementation() in ('CPython', 'PyPy') and sys.version_info[:2] < (3, 5): warn( 'A future version of the tool will drop support Python versions older than 3.5. ' 'You are currently using Python %s. Please upgrade!' % platform.python_version() )
Add a warning about future versions dropping Py2 support
Add a warning about future versions dropping Py2 support Refs #64
Python
mit
valohai/valohai-cli
import logging import click from valohai_cli.plugin_cli import RecursiveHelpPluginCLI from valohai_cli.table import TABLE_FORMATS, TABLE_FORMAT_META_KEY @click.command(cls=RecursiveHelpPluginCLI, commands_module='valohai_cli.commands') @click.option('--debug/--no-debug', default=False, envvar='VALOHAI_DEBUG') @click.option('--table-format', type=click.Choice(TABLE_FORMATS), default='human') @click.pass_context def cli(ctx, debug, table_format): if debug: logging.basicConfig(level=logging.DEBUG) ctx.debug = debug ctx.meta[TABLE_FORMAT_META_KEY] = table_format Add a warning about future versions dropping Py2 support Refs #64
import logging import platform import sys import click from valohai_cli.messages import warn from valohai_cli.plugin_cli import RecursiveHelpPluginCLI from valohai_cli.table import TABLE_FORMATS, TABLE_FORMAT_META_KEY @click.command(cls=RecursiveHelpPluginCLI, commands_module='valohai_cli.commands') @click.option('--debug/--no-debug', default=False, envvar='VALOHAI_DEBUG') @click.option('--table-format', type=click.Choice(TABLE_FORMATS), default='human') @click.pass_context def cli(ctx, debug, table_format): if debug: logging.basicConfig(level=logging.DEBUG) ctx.debug = debug ctx.meta[TABLE_FORMAT_META_KEY] = table_format if platform.python_implementation() in ('CPython', 'PyPy') and sys.version_info[:2] < (3, 5): warn( 'A future version of the tool will drop support Python versions older than 3.5. ' 'You are currently using Python %s. Please upgrade!' % platform.python_version() )
<commit_before>import logging import click from valohai_cli.plugin_cli import RecursiveHelpPluginCLI from valohai_cli.table import TABLE_FORMATS, TABLE_FORMAT_META_KEY @click.command(cls=RecursiveHelpPluginCLI, commands_module='valohai_cli.commands') @click.option('--debug/--no-debug', default=False, envvar='VALOHAI_DEBUG') @click.option('--table-format', type=click.Choice(TABLE_FORMATS), default='human') @click.pass_context def cli(ctx, debug, table_format): if debug: logging.basicConfig(level=logging.DEBUG) ctx.debug = debug ctx.meta[TABLE_FORMAT_META_KEY] = table_format <commit_msg>Add a warning about future versions dropping Py2 support Refs #64<commit_after>
import logging import platform import sys import click from valohai_cli.messages import warn from valohai_cli.plugin_cli import RecursiveHelpPluginCLI from valohai_cli.table import TABLE_FORMATS, TABLE_FORMAT_META_KEY @click.command(cls=RecursiveHelpPluginCLI, commands_module='valohai_cli.commands') @click.option('--debug/--no-debug', default=False, envvar='VALOHAI_DEBUG') @click.option('--table-format', type=click.Choice(TABLE_FORMATS), default='human') @click.pass_context def cli(ctx, debug, table_format): if debug: logging.basicConfig(level=logging.DEBUG) ctx.debug = debug ctx.meta[TABLE_FORMAT_META_KEY] = table_format if platform.python_implementation() in ('CPython', 'PyPy') and sys.version_info[:2] < (3, 5): warn( 'A future version of the tool will drop support Python versions older than 3.5. ' 'You are currently using Python %s. Please upgrade!' % platform.python_version() )
import logging import click from valohai_cli.plugin_cli import RecursiveHelpPluginCLI from valohai_cli.table import TABLE_FORMATS, TABLE_FORMAT_META_KEY @click.command(cls=RecursiveHelpPluginCLI, commands_module='valohai_cli.commands') @click.option('--debug/--no-debug', default=False, envvar='VALOHAI_DEBUG') @click.option('--table-format', type=click.Choice(TABLE_FORMATS), default='human') @click.pass_context def cli(ctx, debug, table_format): if debug: logging.basicConfig(level=logging.DEBUG) ctx.debug = debug ctx.meta[TABLE_FORMAT_META_KEY] = table_format Add a warning about future versions dropping Py2 support Refs #64import logging import platform import sys import click from valohai_cli.messages import warn from valohai_cli.plugin_cli import RecursiveHelpPluginCLI from valohai_cli.table import TABLE_FORMATS, TABLE_FORMAT_META_KEY @click.command(cls=RecursiveHelpPluginCLI, commands_module='valohai_cli.commands') @click.option('--debug/--no-debug', default=False, envvar='VALOHAI_DEBUG') @click.option('--table-format', type=click.Choice(TABLE_FORMATS), default='human') @click.pass_context def cli(ctx, debug, table_format): if debug: logging.basicConfig(level=logging.DEBUG) ctx.debug = debug ctx.meta[TABLE_FORMAT_META_KEY] = table_format if platform.python_implementation() in ('CPython', 'PyPy') and sys.version_info[:2] < (3, 5): warn( 'A future version of the tool will drop support Python versions older than 3.5. ' 'You are currently using Python %s. Please upgrade!' % platform.python_version() )
<commit_before>import logging import click from valohai_cli.plugin_cli import RecursiveHelpPluginCLI from valohai_cli.table import TABLE_FORMATS, TABLE_FORMAT_META_KEY @click.command(cls=RecursiveHelpPluginCLI, commands_module='valohai_cli.commands') @click.option('--debug/--no-debug', default=False, envvar='VALOHAI_DEBUG') @click.option('--table-format', type=click.Choice(TABLE_FORMATS), default='human') @click.pass_context def cli(ctx, debug, table_format): if debug: logging.basicConfig(level=logging.DEBUG) ctx.debug = debug ctx.meta[TABLE_FORMAT_META_KEY] = table_format <commit_msg>Add a warning about future versions dropping Py2 support Refs #64<commit_after>import logging import platform import sys import click from valohai_cli.messages import warn from valohai_cli.plugin_cli import RecursiveHelpPluginCLI from valohai_cli.table import TABLE_FORMATS, TABLE_FORMAT_META_KEY @click.command(cls=RecursiveHelpPluginCLI, commands_module='valohai_cli.commands') @click.option('--debug/--no-debug', default=False, envvar='VALOHAI_DEBUG') @click.option('--table-format', type=click.Choice(TABLE_FORMATS), default='human') @click.pass_context def cli(ctx, debug, table_format): if debug: logging.basicConfig(level=logging.DEBUG) ctx.debug = debug ctx.meta[TABLE_FORMAT_META_KEY] = table_format if platform.python_implementation() in ('CPython', 'PyPy') and sys.version_info[:2] < (3, 5): warn( 'A future version of the tool will drop support Python versions older than 3.5. ' 'You are currently using Python %s. Please upgrade!' % platform.python_version() )
4c70c4b9558a5f18d7aa32f153db4de773d66ef2
opps/images/tests/generate.py
opps/images/tests/generate.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from mock import patch from unittest import TestCase from django.template import Template, Context class TestThumborURLTTagMock(TestCase): url = 'oppsproject.org/path/image.jpg' generate_url_path = 'opps.images.templatetags.images_tags.image_url' def render(self, arguments): source = u'{% load images_tags %}{% image_url '+ arguments +' %}' template = Template(source) rendered = template.render(Context({'url': self.url})) return rendered.strip() def test_should_pass_the_image_url_arg_to_the_helper(self): self.assertEqual(self.render(u'url unsafe=True'), u'http://localhost:8888/unsafe/localhost:8000/media/' u'oppsproject.org/path/image.jpg') def test_should_pass_kwargs_to_the_helper(self): self.assertEqual(self.render(u'url width=300 height=200 unsafe=True'), u'http://localhost:8888/unsafe/300x200/' u'localhost:8000/media/oppsproject.org/path/' u'image.jpg')
#!/usr/bin/env python # -*- coding: utf-8 -*- from unittest import TestCase from django.template import Template, Context class TestImagesTags(TestCase): url = 'oppsproject.org/path/image.jpg' generate_url_path = 'opps.images.templatetags.images_tags.image_url' def render(self, arguments): source = u'{% load images_tags %}{% image_url '+ arguments +' %}' template = Template(source) rendered = template.render(Context({'url': self.url})) return rendered.strip() def test_templatetag_return(self): self.assertTrue(self.render(u'url unsafe=True')) def test_should_pass_the_image_url_arg_to_the_helper(self): self.assertEqual(self.render(u'url unsafe=True'), u'http://localhost:8888/unsafe/localhost:8000/media/' u'oppsproject.org/path/image.jpg') def test_should_pass_kwargs_to_the_helper(self): self.assertEqual(self.render(u'url width=300 height=200 unsafe=True'), u'http://localhost:8888/unsafe/300x200/' u'localhost:8000/media/oppsproject.org/path/' u'image.jpg')
Add test templatetag image_url return
Add test templatetag image_url return
Python
mit
opps/opps,opps/opps,YACOWS/opps,YACOWS/opps,opps/opps,YACOWS/opps,williamroot/opps,williamroot/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,jeanmask/opps
#!/usr/bin/env python # -*- coding: utf-8 -*- from mock import patch from unittest import TestCase from django.template import Template, Context class TestThumborURLTTagMock(TestCase): url = 'oppsproject.org/path/image.jpg' generate_url_path = 'opps.images.templatetags.images_tags.image_url' def render(self, arguments): source = u'{% load images_tags %}{% image_url '+ arguments +' %}' template = Template(source) rendered = template.render(Context({'url': self.url})) return rendered.strip() def test_should_pass_the_image_url_arg_to_the_helper(self): self.assertEqual(self.render(u'url unsafe=True'), u'http://localhost:8888/unsafe/localhost:8000/media/' u'oppsproject.org/path/image.jpg') def test_should_pass_kwargs_to_the_helper(self): self.assertEqual(self.render(u'url width=300 height=200 unsafe=True'), u'http://localhost:8888/unsafe/300x200/' u'localhost:8000/media/oppsproject.org/path/' u'image.jpg') Add test templatetag image_url return
#!/usr/bin/env python # -*- coding: utf-8 -*- from unittest import TestCase from django.template import Template, Context class TestImagesTags(TestCase): url = 'oppsproject.org/path/image.jpg' generate_url_path = 'opps.images.templatetags.images_tags.image_url' def render(self, arguments): source = u'{% load images_tags %}{% image_url '+ arguments +' %}' template = Template(source) rendered = template.render(Context({'url': self.url})) return rendered.strip() def test_templatetag_return(self): self.assertTrue(self.render(u'url unsafe=True')) def test_should_pass_the_image_url_arg_to_the_helper(self): self.assertEqual(self.render(u'url unsafe=True'), u'http://localhost:8888/unsafe/localhost:8000/media/' u'oppsproject.org/path/image.jpg') def test_should_pass_kwargs_to_the_helper(self): self.assertEqual(self.render(u'url width=300 height=200 unsafe=True'), u'http://localhost:8888/unsafe/300x200/' u'localhost:8000/media/oppsproject.org/path/' u'image.jpg')
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from mock import patch from unittest import TestCase from django.template import Template, Context class TestThumborURLTTagMock(TestCase): url = 'oppsproject.org/path/image.jpg' generate_url_path = 'opps.images.templatetags.images_tags.image_url' def render(self, arguments): source = u'{% load images_tags %}{% image_url '+ arguments +' %}' template = Template(source) rendered = template.render(Context({'url': self.url})) return rendered.strip() def test_should_pass_the_image_url_arg_to_the_helper(self): self.assertEqual(self.render(u'url unsafe=True'), u'http://localhost:8888/unsafe/localhost:8000/media/' u'oppsproject.org/path/image.jpg') def test_should_pass_kwargs_to_the_helper(self): self.assertEqual(self.render(u'url width=300 height=200 unsafe=True'), u'http://localhost:8888/unsafe/300x200/' u'localhost:8000/media/oppsproject.org/path/' u'image.jpg') <commit_msg>Add test templatetag image_url return<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from unittest import TestCase from django.template import Template, Context class TestImagesTags(TestCase): url = 'oppsproject.org/path/image.jpg' generate_url_path = 'opps.images.templatetags.images_tags.image_url' def render(self, arguments): source = u'{% load images_tags %}{% image_url '+ arguments +' %}' template = Template(source) rendered = template.render(Context({'url': self.url})) return rendered.strip() def test_templatetag_return(self): self.assertTrue(self.render(u'url unsafe=True')) def test_should_pass_the_image_url_arg_to_the_helper(self): self.assertEqual(self.render(u'url unsafe=True'), u'http://localhost:8888/unsafe/localhost:8000/media/' u'oppsproject.org/path/image.jpg') def test_should_pass_kwargs_to_the_helper(self): self.assertEqual(self.render(u'url width=300 height=200 unsafe=True'), u'http://localhost:8888/unsafe/300x200/' u'localhost:8000/media/oppsproject.org/path/' u'image.jpg')
#!/usr/bin/env python # -*- coding: utf-8 -*- from mock import patch from unittest import TestCase from django.template import Template, Context class TestThumborURLTTagMock(TestCase): url = 'oppsproject.org/path/image.jpg' generate_url_path = 'opps.images.templatetags.images_tags.image_url' def render(self, arguments): source = u'{% load images_tags %}{% image_url '+ arguments +' %}' template = Template(source) rendered = template.render(Context({'url': self.url})) return rendered.strip() def test_should_pass_the_image_url_arg_to_the_helper(self): self.assertEqual(self.render(u'url unsafe=True'), u'http://localhost:8888/unsafe/localhost:8000/media/' u'oppsproject.org/path/image.jpg') def test_should_pass_kwargs_to_the_helper(self): self.assertEqual(self.render(u'url width=300 height=200 unsafe=True'), u'http://localhost:8888/unsafe/300x200/' u'localhost:8000/media/oppsproject.org/path/' u'image.jpg') Add test templatetag image_url return#!/usr/bin/env python # -*- coding: utf-8 -*- from unittest import TestCase from django.template import Template, Context class TestImagesTags(TestCase): url = 'oppsproject.org/path/image.jpg' generate_url_path = 'opps.images.templatetags.images_tags.image_url' def render(self, arguments): source = u'{% load images_tags %}{% image_url '+ arguments +' %}' template = Template(source) rendered = template.render(Context({'url': self.url})) return rendered.strip() def test_templatetag_return(self): self.assertTrue(self.render(u'url unsafe=True')) def test_should_pass_the_image_url_arg_to_the_helper(self): self.assertEqual(self.render(u'url unsafe=True'), u'http://localhost:8888/unsafe/localhost:8000/media/' u'oppsproject.org/path/image.jpg') def test_should_pass_kwargs_to_the_helper(self): self.assertEqual(self.render(u'url width=300 height=200 unsafe=True'), u'http://localhost:8888/unsafe/300x200/' u'localhost:8000/media/oppsproject.org/path/' u'image.jpg')
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from mock import patch from unittest import TestCase from django.template import Template, Context class TestThumborURLTTagMock(TestCase): url = 'oppsproject.org/path/image.jpg' generate_url_path = 'opps.images.templatetags.images_tags.image_url' def render(self, arguments): source = u'{% load images_tags %}{% image_url '+ arguments +' %}' template = Template(source) rendered = template.render(Context({'url': self.url})) return rendered.strip() def test_should_pass_the_image_url_arg_to_the_helper(self): self.assertEqual(self.render(u'url unsafe=True'), u'http://localhost:8888/unsafe/localhost:8000/media/' u'oppsproject.org/path/image.jpg') def test_should_pass_kwargs_to_the_helper(self): self.assertEqual(self.render(u'url width=300 height=200 unsafe=True'), u'http://localhost:8888/unsafe/300x200/' u'localhost:8000/media/oppsproject.org/path/' u'image.jpg') <commit_msg>Add test templatetag image_url return<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from unittest import TestCase from django.template import Template, Context class TestImagesTags(TestCase): url = 'oppsproject.org/path/image.jpg' generate_url_path = 'opps.images.templatetags.images_tags.image_url' def render(self, arguments): source = u'{% load images_tags %}{% image_url '+ arguments +' %}' template = Template(source) rendered = template.render(Context({'url': self.url})) return rendered.strip() def test_templatetag_return(self): self.assertTrue(self.render(u'url unsafe=True')) def test_should_pass_the_image_url_arg_to_the_helper(self): self.assertEqual(self.render(u'url unsafe=True'), u'http://localhost:8888/unsafe/localhost:8000/media/' u'oppsproject.org/path/image.jpg') def test_should_pass_kwargs_to_the_helper(self): self.assertEqual(self.render(u'url width=300 height=200 unsafe=True'), u'http://localhost:8888/unsafe/300x200/' u'localhost:8000/media/oppsproject.org/path/' u'image.jpg')
5f90db398bd67aec857900f5838fb6af8f3b8c23
test/test_process.py
test/test_process.py
import piquant.process as ps import os.path import time import utils SCRIPT_NAME = "./script.sh" def _write_and_run_script(dirname, command): utils.write_executable_script(dirname, SCRIPT_NAME, command) ps.run_in_directory(dirname, SCRIPT_NAME) time.sleep(0.1) def test_run_in_directory_executes_command_in_directory(): with utils.temp_dir_created() as dirname: _write_and_run_script(dirname, "pwd > out.txt") with open(dirname + os.path.sep + 'out.txt') as f: path = f.readlines()[0].strip() assert path == dirname def test_run_in_directory_include_command_line_args(): with utils.temp_dir_created() as dirname: ps.run_in_directory(dirname, "touch", [SCRIPT_NAME]) time.sleep(0.1) assert os.path.exists(dirname + os.path.sep + SCRIPT_NAME)
import piquant.process as ps import os.path import time import utils SCRIPT_NAME = "./script.sh" def _write_and_run_script(dirname, command): utils.write_executable_script(dirname, SCRIPT_NAME, command) ps.run_in_directory(dirname, SCRIPT_NAME) time.sleep(0.1) def test_run_in_directory_executes_command_in_directory(): with utils.temp_dir_created() as dirname: _write_and_run_script(dirname, "pwd > out.txt") with open(dirname + os.path.sep + 'out.txt') as f: path = f.readlines()[0].strip() assert path == os.path.realpath(dirname) def test_run_in_directory_include_command_line_args(): with utils.temp_dir_created() as dirname: ps.run_in_directory(dirname, "touch", [SCRIPT_NAME]) time.sleep(0.1) assert os.path.exists(dirname + os.path.sep + SCRIPT_NAME)
Fix test not working on OS X.
Fix test not working on OS X. test_run_in_directory_executes_command_in_directory() didn't take into account that the directory returned by tempfile.mkdtemp() might be a symlink.
Python
mit
lweasel/piquant,lweasel/piquant
import piquant.process as ps import os.path import time import utils SCRIPT_NAME = "./script.sh" def _write_and_run_script(dirname, command): utils.write_executable_script(dirname, SCRIPT_NAME, command) ps.run_in_directory(dirname, SCRIPT_NAME) time.sleep(0.1) def test_run_in_directory_executes_command_in_directory(): with utils.temp_dir_created() as dirname: _write_and_run_script(dirname, "pwd > out.txt") with open(dirname + os.path.sep + 'out.txt') as f: path = f.readlines()[0].strip() assert path == dirname def test_run_in_directory_include_command_line_args(): with utils.temp_dir_created() as dirname: ps.run_in_directory(dirname, "touch", [SCRIPT_NAME]) time.sleep(0.1) assert os.path.exists(dirname + os.path.sep + SCRIPT_NAME) Fix test not working on OS X. test_run_in_directory_executes_command_in_directory() didn't take into account that the directory returned by tempfile.mkdtemp() might be a symlink.
import piquant.process as ps import os.path import time import utils SCRIPT_NAME = "./script.sh" def _write_and_run_script(dirname, command): utils.write_executable_script(dirname, SCRIPT_NAME, command) ps.run_in_directory(dirname, SCRIPT_NAME) time.sleep(0.1) def test_run_in_directory_executes_command_in_directory(): with utils.temp_dir_created() as dirname: _write_and_run_script(dirname, "pwd > out.txt") with open(dirname + os.path.sep + 'out.txt') as f: path = f.readlines()[0].strip() assert path == os.path.realpath(dirname) def test_run_in_directory_include_command_line_args(): with utils.temp_dir_created() as dirname: ps.run_in_directory(dirname, "touch", [SCRIPT_NAME]) time.sleep(0.1) assert os.path.exists(dirname + os.path.sep + SCRIPT_NAME)
<commit_before>import piquant.process as ps import os.path import time import utils SCRIPT_NAME = "./script.sh" def _write_and_run_script(dirname, command): utils.write_executable_script(dirname, SCRIPT_NAME, command) ps.run_in_directory(dirname, SCRIPT_NAME) time.sleep(0.1) def test_run_in_directory_executes_command_in_directory(): with utils.temp_dir_created() as dirname: _write_and_run_script(dirname, "pwd > out.txt") with open(dirname + os.path.sep + 'out.txt') as f: path = f.readlines()[0].strip() assert path == dirname def test_run_in_directory_include_command_line_args(): with utils.temp_dir_created() as dirname: ps.run_in_directory(dirname, "touch", [SCRIPT_NAME]) time.sleep(0.1) assert os.path.exists(dirname + os.path.sep + SCRIPT_NAME) <commit_msg>Fix test not working on OS X. test_run_in_directory_executes_command_in_directory() didn't take into account that the directory returned by tempfile.mkdtemp() might be a symlink.<commit_after>
import piquant.process as ps import os.path import time import utils SCRIPT_NAME = "./script.sh" def _write_and_run_script(dirname, command): utils.write_executable_script(dirname, SCRIPT_NAME, command) ps.run_in_directory(dirname, SCRIPT_NAME) time.sleep(0.1) def test_run_in_directory_executes_command_in_directory(): with utils.temp_dir_created() as dirname: _write_and_run_script(dirname, "pwd > out.txt") with open(dirname + os.path.sep + 'out.txt') as f: path = f.readlines()[0].strip() assert path == os.path.realpath(dirname) def test_run_in_directory_include_command_line_args(): with utils.temp_dir_created() as dirname: ps.run_in_directory(dirname, "touch", [SCRIPT_NAME]) time.sleep(0.1) assert os.path.exists(dirname + os.path.sep + SCRIPT_NAME)
import piquant.process as ps import os.path import time import utils SCRIPT_NAME = "./script.sh" def _write_and_run_script(dirname, command): utils.write_executable_script(dirname, SCRIPT_NAME, command) ps.run_in_directory(dirname, SCRIPT_NAME) time.sleep(0.1) def test_run_in_directory_executes_command_in_directory(): with utils.temp_dir_created() as dirname: _write_and_run_script(dirname, "pwd > out.txt") with open(dirname + os.path.sep + 'out.txt') as f: path = f.readlines()[0].strip() assert path == dirname def test_run_in_directory_include_command_line_args(): with utils.temp_dir_created() as dirname: ps.run_in_directory(dirname, "touch", [SCRIPT_NAME]) time.sleep(0.1) assert os.path.exists(dirname + os.path.sep + SCRIPT_NAME) Fix test not working on OS X. test_run_in_directory_executes_command_in_directory() didn't take into account that the directory returned by tempfile.mkdtemp() might be a symlink.import piquant.process as ps import os.path import time import utils SCRIPT_NAME = "./script.sh" def _write_and_run_script(dirname, command): utils.write_executable_script(dirname, SCRIPT_NAME, command) ps.run_in_directory(dirname, SCRIPT_NAME) time.sleep(0.1) def test_run_in_directory_executes_command_in_directory(): with utils.temp_dir_created() as dirname: _write_and_run_script(dirname, "pwd > out.txt") with open(dirname + os.path.sep + 'out.txt') as f: path = f.readlines()[0].strip() assert path == os.path.realpath(dirname) def test_run_in_directory_include_command_line_args(): with utils.temp_dir_created() as dirname: ps.run_in_directory(dirname, "touch", [SCRIPT_NAME]) time.sleep(0.1) assert os.path.exists(dirname + os.path.sep + SCRIPT_NAME)
<commit_before>import piquant.process as ps import os.path import time import utils SCRIPT_NAME = "./script.sh" def _write_and_run_script(dirname, command): utils.write_executable_script(dirname, SCRIPT_NAME, command) ps.run_in_directory(dirname, SCRIPT_NAME) time.sleep(0.1) def test_run_in_directory_executes_command_in_directory(): with utils.temp_dir_created() as dirname: _write_and_run_script(dirname, "pwd > out.txt") with open(dirname + os.path.sep + 'out.txt') as f: path = f.readlines()[0].strip() assert path == dirname def test_run_in_directory_include_command_line_args(): with utils.temp_dir_created() as dirname: ps.run_in_directory(dirname, "touch", [SCRIPT_NAME]) time.sleep(0.1) assert os.path.exists(dirname + os.path.sep + SCRIPT_NAME) <commit_msg>Fix test not working on OS X. test_run_in_directory_executes_command_in_directory() didn't take into account that the directory returned by tempfile.mkdtemp() might be a symlink.<commit_after>import piquant.process as ps import os.path import time import utils SCRIPT_NAME = "./script.sh" def _write_and_run_script(dirname, command): utils.write_executable_script(dirname, SCRIPT_NAME, command) ps.run_in_directory(dirname, SCRIPT_NAME) time.sleep(0.1) def test_run_in_directory_executes_command_in_directory(): with utils.temp_dir_created() as dirname: _write_and_run_script(dirname, "pwd > out.txt") with open(dirname + os.path.sep + 'out.txt') as f: path = f.readlines()[0].strip() assert path == os.path.realpath(dirname) def test_run_in_directory_include_command_line_args(): with utils.temp_dir_created() as dirname: ps.run_in_directory(dirname, "touch", [SCRIPT_NAME]) time.sleep(0.1) assert os.path.exists(dirname + os.path.sep + SCRIPT_NAME)
791e254c6f1efed88bdc0714ee9bb264634e74a8
transunit.py
transunit.py
#from lxml import etree as ET class TransUnit(object): "Container for XLIFF trans-unit element" def __init__(self, argument): self.origin_unit = argument self.attributes = argument.attrib self.id = '' self.ns = '' self.state = '' @staticmethod def create(xml_tu): tunit = TransUnit(xml_tu) tunit.id = tunit.attributes['id'] tunit.ns = tunit.__read_ns() tunit.state = tunit.__get_state_from_target() return tunit def __get_state_from_target(self): target = self.origin_unit.find('{}target'.format(self.ns)) if "state" in target.attrib.keys(): return target.attrib['state'] else: return '' def __has_ns(self): return '{' in self.origin_unit.tag def __read_ns(self): if self.__has_ns(): ns, tag = self.origin_unit.tag.split('}') ns = ns + '}' return ns else: return '' def has_any_state(self, list_of_states): return self.state in list_of_states
class TransUnit(object): "Container for XLIFF trans-unit element" def __init__(self, argument): self.origin_unit = argument self.attributes = argument.attrib self.id = '' self.ns = '' self.state = '' @staticmethod def create(xml_tu): tunit = TransUnit(xml_tu) tunit.id = tunit.attributes['id'] tunit.ns = tunit._read_ns() tunit.state = tunit._get_state_from_target() return tunit def _read_ns(self): if self._has_ns(): ns, tag = self.origin_unit.tag.split('}') ns = ns + '}' return ns else: return '' def _has_ns(self): return '{' in self.origin_unit.tag def _get_state_from_target(self): target = self.origin_unit.find('{}target'.format(self.ns)) if "state" in target.attrib.keys(): return target.attrib['state'] else: return '' def has_any_state(self, list_of_states): return self.state in list_of_states
Restructure transUnit class for better readibility
Restructure transUnit class for better readibility
Python
mit
jakub-szczepaniak/xliff
#from lxml import etree as ET class TransUnit(object): "Container for XLIFF trans-unit element" def __init__(self, argument): self.origin_unit = argument self.attributes = argument.attrib self.id = '' self.ns = '' self.state = '' @staticmethod def create(xml_tu): tunit = TransUnit(xml_tu) tunit.id = tunit.attributes['id'] tunit.ns = tunit.__read_ns() tunit.state = tunit.__get_state_from_target() return tunit def __get_state_from_target(self): target = self.origin_unit.find('{}target'.format(self.ns)) if "state" in target.attrib.keys(): return target.attrib['state'] else: return '' def __has_ns(self): return '{' in self.origin_unit.tag def __read_ns(self): if self.__has_ns(): ns, tag = self.origin_unit.tag.split('}') ns = ns + '}' return ns else: return '' def has_any_state(self, list_of_states): return self.state in list_of_states Restructure transUnit class for better readibility
class TransUnit(object): "Container for XLIFF trans-unit element" def __init__(self, argument): self.origin_unit = argument self.attributes = argument.attrib self.id = '' self.ns = '' self.state = '' @staticmethod def create(xml_tu): tunit = TransUnit(xml_tu) tunit.id = tunit.attributes['id'] tunit.ns = tunit._read_ns() tunit.state = tunit._get_state_from_target() return tunit def _read_ns(self): if self._has_ns(): ns, tag = self.origin_unit.tag.split('}') ns = ns + '}' return ns else: return '' def _has_ns(self): return '{' in self.origin_unit.tag def _get_state_from_target(self): target = self.origin_unit.find('{}target'.format(self.ns)) if "state" in target.attrib.keys(): return target.attrib['state'] else: return '' def has_any_state(self, list_of_states): return self.state in list_of_states
<commit_before>#from lxml import etree as ET class TransUnit(object): "Container for XLIFF trans-unit element" def __init__(self, argument): self.origin_unit = argument self.attributes = argument.attrib self.id = '' self.ns = '' self.state = '' @staticmethod def create(xml_tu): tunit = TransUnit(xml_tu) tunit.id = tunit.attributes['id'] tunit.ns = tunit.__read_ns() tunit.state = tunit.__get_state_from_target() return tunit def __get_state_from_target(self): target = self.origin_unit.find('{}target'.format(self.ns)) if "state" in target.attrib.keys(): return target.attrib['state'] else: return '' def __has_ns(self): return '{' in self.origin_unit.tag def __read_ns(self): if self.__has_ns(): ns, tag = self.origin_unit.tag.split('}') ns = ns + '}' return ns else: return '' def has_any_state(self, list_of_states): return self.state in list_of_states <commit_msg>Restructure transUnit class for better readibility<commit_after>
class TransUnit(object): "Container for XLIFF trans-unit element" def __init__(self, argument): self.origin_unit = argument self.attributes = argument.attrib self.id = '' self.ns = '' self.state = '' @staticmethod def create(xml_tu): tunit = TransUnit(xml_tu) tunit.id = tunit.attributes['id'] tunit.ns = tunit._read_ns() tunit.state = tunit._get_state_from_target() return tunit def _read_ns(self): if self._has_ns(): ns, tag = self.origin_unit.tag.split('}') ns = ns + '}' return ns else: return '' def _has_ns(self): return '{' in self.origin_unit.tag def _get_state_from_target(self): target = self.origin_unit.find('{}target'.format(self.ns)) if "state" in target.attrib.keys(): return target.attrib['state'] else: return '' def has_any_state(self, list_of_states): return self.state in list_of_states
#from lxml import etree as ET class TransUnit(object): "Container for XLIFF trans-unit element" def __init__(self, argument): self.origin_unit = argument self.attributes = argument.attrib self.id = '' self.ns = '' self.state = '' @staticmethod def create(xml_tu): tunit = TransUnit(xml_tu) tunit.id = tunit.attributes['id'] tunit.ns = tunit.__read_ns() tunit.state = tunit.__get_state_from_target() return tunit def __get_state_from_target(self): target = self.origin_unit.find('{}target'.format(self.ns)) if "state" in target.attrib.keys(): return target.attrib['state'] else: return '' def __has_ns(self): return '{' in self.origin_unit.tag def __read_ns(self): if self.__has_ns(): ns, tag = self.origin_unit.tag.split('}') ns = ns + '}' return ns else: return '' def has_any_state(self, list_of_states): return self.state in list_of_states Restructure transUnit class for better readibilityclass TransUnit(object): "Container for XLIFF trans-unit element" def __init__(self, argument): self.origin_unit = argument self.attributes = argument.attrib self.id = '' self.ns = '' self.state = '' @staticmethod def create(xml_tu): tunit = TransUnit(xml_tu) tunit.id = tunit.attributes['id'] tunit.ns = tunit._read_ns() tunit.state = tunit._get_state_from_target() return tunit def _read_ns(self): if self._has_ns(): ns, tag = self.origin_unit.tag.split('}') ns = ns + '}' return ns else: return '' def _has_ns(self): return '{' in self.origin_unit.tag def _get_state_from_target(self): target = self.origin_unit.find('{}target'.format(self.ns)) if "state" in target.attrib.keys(): return target.attrib['state'] else: return '' def has_any_state(self, list_of_states): return self.state in list_of_states
<commit_before>#from lxml import etree as ET class TransUnit(object): "Container for XLIFF trans-unit element" def __init__(self, argument): self.origin_unit = argument self.attributes = argument.attrib self.id = '' self.ns = '' self.state = '' @staticmethod def create(xml_tu): tunit = TransUnit(xml_tu) tunit.id = tunit.attributes['id'] tunit.ns = tunit.__read_ns() tunit.state = tunit.__get_state_from_target() return tunit def __get_state_from_target(self): target = self.origin_unit.find('{}target'.format(self.ns)) if "state" in target.attrib.keys(): return target.attrib['state'] else: return '' def __has_ns(self): return '{' in self.origin_unit.tag def __read_ns(self): if self.__has_ns(): ns, tag = self.origin_unit.tag.split('}') ns = ns + '}' return ns else: return '' def has_any_state(self, list_of_states): return self.state in list_of_states <commit_msg>Restructure transUnit class for better readibility<commit_after>class TransUnit(object): "Container for XLIFF trans-unit element" def __init__(self, argument): self.origin_unit = argument self.attributes = argument.attrib self.id = '' self.ns = '' self.state = '' @staticmethod def create(xml_tu): tunit = TransUnit(xml_tu) tunit.id = tunit.attributes['id'] tunit.ns = tunit._read_ns() tunit.state = tunit._get_state_from_target() return tunit def _read_ns(self): if self._has_ns(): ns, tag = self.origin_unit.tag.split('}') ns = ns + '}' return ns else: return '' def _has_ns(self): return '{' in self.origin_unit.tag def _get_state_from_target(self): target = self.origin_unit.find('{}target'.format(self.ns)) if "state" in target.attrib.keys(): return target.attrib['state'] else: return '' def has_any_state(self, list_of_states): return self.state in list_of_states
184c94252a909528fee2bc29c421c814bf7c49ee
django_fake_database_backends/backends/mysql/schema.py
django_fake_database_backends/backends/mysql/schema.py
from django.db.backends.mysql.schema import DatabaseSchemaEditor \ as BaseDatabaseSchemaEditor import sys class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def execute(self, sql, params=()): sql = str(sql) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append( (sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) # If not collecting the sql, do not execute def quote_value(self, value): if type(value) == bool: return str(int(value)) if type(value) == int: return value if type(value) == float: if value % 1 == .0: return int(value) return value # TODO escape correctly all values for mysql # Preferably without having the mysql client as dep if sys.version_info.major == 3: return "b\"'{0}'\"".format(value) return "'{0}'".format(value) def _field_should_be_indexed(self, model, field): create_index = super( DatabaseSchemaEditor, self)._field_should_be_indexed(model, field) if (create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return create_index
from django.db.backends.mysql.schema import DatabaseSchemaEditor \ as BaseDatabaseSchemaEditor import datetime import sys class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def execute(self, sql, params=()): sql = str(sql) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append( (sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) # If not collecting the sql, do not execute def quote_value(self, value): if isinstance(value, bool): return str(int(value)) if isinstance(value, int): return value if isinstance(value, float): if value % 1 == .0: return int(value) return value if self._is_date_or_time(value) and sys.version_info.major == 2: return value if sys.version_info.major == 3: return "b\"'{0}'\"".format(value) return "'{0}'".format(value) def _is_date_or_time(self, value): try: datetime.datetime.strptime(value, '%H:%M:%S') return True except Exception: try: datetime.datetime.strptime(value, '%Y-%m-%d') return True except Exception: return False def _field_should_be_indexed(self, model, field): create_index = super( DatabaseSchemaEditor, self)._field_should_be_indexed(model, field) if (create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return create_index
Add quotes around date and time for python 2
Add quotes around date and time for python 2
Python
mit
David-Wobrock/django-fake-database-backends
from django.db.backends.mysql.schema import DatabaseSchemaEditor \ as BaseDatabaseSchemaEditor import sys class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def execute(self, sql, params=()): sql = str(sql) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append( (sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) # If not collecting the sql, do not execute def quote_value(self, value): if type(value) == bool: return str(int(value)) if type(value) == int: return value if type(value) == float: if value % 1 == .0: return int(value) return value # TODO escape correctly all values for mysql # Preferably without having the mysql client as dep if sys.version_info.major == 3: return "b\"'{0}'\"".format(value) return "'{0}'".format(value) def _field_should_be_indexed(self, model, field): create_index = super( DatabaseSchemaEditor, self)._field_should_be_indexed(model, field) if (create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return create_index Add quotes around date and time for python 2
from django.db.backends.mysql.schema import DatabaseSchemaEditor \ as BaseDatabaseSchemaEditor import datetime import sys class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def execute(self, sql, params=()): sql = str(sql) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append( (sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) # If not collecting the sql, do not execute def quote_value(self, value): if isinstance(value, bool): return str(int(value)) if isinstance(value, int): return value if isinstance(value, float): if value % 1 == .0: return int(value) return value if self._is_date_or_time(value) and sys.version_info.major == 2: return value if sys.version_info.major == 3: return "b\"'{0}'\"".format(value) return "'{0}'".format(value) def _is_date_or_time(self, value): try: datetime.datetime.strptime(value, '%H:%M:%S') return True except Exception: try: datetime.datetime.strptime(value, '%Y-%m-%d') return True except Exception: return False def _field_should_be_indexed(self, model, field): create_index = super( DatabaseSchemaEditor, self)._field_should_be_indexed(model, field) if (create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return create_index
<commit_before>from django.db.backends.mysql.schema import DatabaseSchemaEditor \ as BaseDatabaseSchemaEditor import sys class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def execute(self, sql, params=()): sql = str(sql) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append( (sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) # If not collecting the sql, do not execute def quote_value(self, value): if type(value) == bool: return str(int(value)) if type(value) == int: return value if type(value) == float: if value % 1 == .0: return int(value) return value # TODO escape correctly all values for mysql # Preferably without having the mysql client as dep if sys.version_info.major == 3: return "b\"'{0}'\"".format(value) return "'{0}'".format(value) def _field_should_be_indexed(self, model, field): create_index = super( DatabaseSchemaEditor, self)._field_should_be_indexed(model, field) if (create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return create_index <commit_msg>Add quotes around date and time for python 2<commit_after>
from django.db.backends.mysql.schema import DatabaseSchemaEditor \ as BaseDatabaseSchemaEditor import datetime import sys class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def execute(self, sql, params=()): sql = str(sql) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append( (sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) # If not collecting the sql, do not execute def quote_value(self, value): if isinstance(value, bool): return str(int(value)) if isinstance(value, int): return value if isinstance(value, float): if value % 1 == .0: return int(value) return value if self._is_date_or_time(value) and sys.version_info.major == 2: return value if sys.version_info.major == 3: return "b\"'{0}'\"".format(value) return "'{0}'".format(value) def _is_date_or_time(self, value): try: datetime.datetime.strptime(value, '%H:%M:%S') return True except Exception: try: datetime.datetime.strptime(value, '%Y-%m-%d') return True except Exception: return False def _field_should_be_indexed(self, model, field): create_index = super( DatabaseSchemaEditor, self)._field_should_be_indexed(model, field) if (create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return create_index
from django.db.backends.mysql.schema import DatabaseSchemaEditor \ as BaseDatabaseSchemaEditor import sys class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def execute(self, sql, params=()): sql = str(sql) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append( (sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) # If not collecting the sql, do not execute def quote_value(self, value): if type(value) == bool: return str(int(value)) if type(value) == int: return value if type(value) == float: if value % 1 == .0: return int(value) return value # TODO escape correctly all values for mysql # Preferably without having the mysql client as dep if sys.version_info.major == 3: return "b\"'{0}'\"".format(value) return "'{0}'".format(value) def _field_should_be_indexed(self, model, field): create_index = super( DatabaseSchemaEditor, self)._field_should_be_indexed(model, field) if (create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return create_index Add quotes around date and time for python 2from django.db.backends.mysql.schema import DatabaseSchemaEditor \ as BaseDatabaseSchemaEditor import datetime import sys class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def execute(self, sql, params=()): sql = str(sql) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append( (sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) # If not collecting the sql, do not execute def quote_value(self, value): if isinstance(value, bool): return str(int(value)) if isinstance(value, int): return value if isinstance(value, float): if value % 1 == .0: return int(value) return value if self._is_date_or_time(value) and sys.version_info.major == 2: return value if sys.version_info.major == 3: return "b\"'{0}'\"".format(value) return "'{0}'".format(value) def _is_date_or_time(self, value): try: datetime.datetime.strptime(value, '%H:%M:%S') return True except Exception: try: datetime.datetime.strptime(value, '%Y-%m-%d') return True except Exception: return False def _field_should_be_indexed(self, model, field): create_index = super( DatabaseSchemaEditor, self)._field_should_be_indexed(model, field) if (create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return create_index
<commit_before>from django.db.backends.mysql.schema import DatabaseSchemaEditor \ as BaseDatabaseSchemaEditor import sys class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def execute(self, sql, params=()): sql = str(sql) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append( (sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) # If not collecting the sql, do not execute def quote_value(self, value): if type(value) == bool: return str(int(value)) if type(value) == int: return value if type(value) == float: if value % 1 == .0: return int(value) return value # TODO escape correctly all values for mysql # Preferably without having the mysql client as dep if sys.version_info.major == 3: return "b\"'{0}'\"".format(value) return "'{0}'".format(value) def _field_should_be_indexed(self, model, field): create_index = super( DatabaseSchemaEditor, self)._field_should_be_indexed(model, field) if (create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return create_index <commit_msg>Add quotes around date and time for python 2<commit_after>from django.db.backends.mysql.schema import DatabaseSchemaEditor \ as BaseDatabaseSchemaEditor import datetime import sys class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def execute(self, sql, params=()): sql = str(sql) if self.collect_sql: ending = "" if sql.endswith(";") else ";" if params is not None: self.collected_sql.append( (sql % tuple(map(self.quote_value, params))) + ending) else: self.collected_sql.append(sql + ending) # If not collecting the sql, do not execute def quote_value(self, value): if isinstance(value, bool): return str(int(value)) if isinstance(value, int): return value if isinstance(value, float): if value % 1 == .0: return int(value) return value if self._is_date_or_time(value) and sys.version_info.major == 2: return value if sys.version_info.major == 3: return "b\"'{0}'\"".format(value) return "'{0}'".format(value) def _is_date_or_time(self, value): try: datetime.datetime.strptime(value, '%H:%M:%S') return True except Exception: try: datetime.datetime.strptime(value, '%Y-%m-%d') return True except Exception: return False def _field_should_be_indexed(self, model, field): create_index = super( DatabaseSchemaEditor, self)._field_should_be_indexed(model, field) if (create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return create_index
23cd1ea939df8d78952f9b096207de0a3453529f
tests/test_create.py
tests/test_create.py
from matador.commands import CreateTicket, CreatePackage from dulwich.repo import Repo from pathlib import Path def test_add_to_git(project_repo): pass def test_create_ticket(session, project_repo): test_ticket = 'test-ticket' CreateTicket(ticket=test_ticket) ticket_folder = Path(project_repo, 'deploy', 'tickets', test_ticket) deploy_file = Path(ticket_folder, 'deploy.py') repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message assert ticket_folder.exists() assert deploy_file.exists() expected_message = bytes( 'Create ticket %s\n' % test_ticket, encoding='UTF-8') assert commit_message == expected_message def test_create_package(session, project_repo): test_package = 'test-package' CreatePackage(package=test_package) package_folder = Path(project_repo, 'deploy', 'packages', test_package) package_file = Path(package_folder, 'tickets.yml') remove_file = Path(package_folder, 'remove.py') assert package_folder.exists() assert package_file.exists() assert remove_file.exists()
from matador.commands import CreateTicket, CreatePackage from dulwich.repo import Repo from pathlib import Path def test_add_to_git(project_repo): pass def test_create_ticket(session, project_repo): test_ticket = 'test-ticket' CreateTicket(ticket=test_ticket) ticket_folder = Path(project_repo, 'deploy', 'tickets', test_ticket) deploy_file = Path(ticket_folder, 'deploy.py') assert ticket_folder.exists() assert deploy_file.exists() repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message expected_message = bytes( 'Create ticket %s\n' % test_ticket, encoding='UTF-8') assert commit_message == expected_message def test_create_package(session, project_repo): test_package = 'test-package' CreatePackage(package=test_package) package_folder = Path(project_repo, 'deploy', 'packages', test_package) package_file = Path(package_folder, 'tickets.yml') remove_file = Path(package_folder, 'remove.py') assert package_folder.exists() assert package_file.exists() assert remove_file.exists() repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message expected_message = bytes( 'Create package %s\n' % test_package, encoding='UTF-8') assert commit_message == expected_message
Add test for package commit
Add test for package commit
Python
mit
Empiria/matador
from matador.commands import CreateTicket, CreatePackage from dulwich.repo import Repo from pathlib import Path def test_add_to_git(project_repo): pass def test_create_ticket(session, project_repo): test_ticket = 'test-ticket' CreateTicket(ticket=test_ticket) ticket_folder = Path(project_repo, 'deploy', 'tickets', test_ticket) deploy_file = Path(ticket_folder, 'deploy.py') repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message assert ticket_folder.exists() assert deploy_file.exists() expected_message = bytes( 'Create ticket %s\n' % test_ticket, encoding='UTF-8') assert commit_message == expected_message def test_create_package(session, project_repo): test_package = 'test-package' CreatePackage(package=test_package) package_folder = Path(project_repo, 'deploy', 'packages', test_package) package_file = Path(package_folder, 'tickets.yml') remove_file = Path(package_folder, 'remove.py') assert package_folder.exists() assert package_file.exists() assert remove_file.exists() Add test for package commit
from matador.commands import CreateTicket, CreatePackage from dulwich.repo import Repo from pathlib import Path def test_add_to_git(project_repo): pass def test_create_ticket(session, project_repo): test_ticket = 'test-ticket' CreateTicket(ticket=test_ticket) ticket_folder = Path(project_repo, 'deploy', 'tickets', test_ticket) deploy_file = Path(ticket_folder, 'deploy.py') assert ticket_folder.exists() assert deploy_file.exists() repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message expected_message = bytes( 'Create ticket %s\n' % test_ticket, encoding='UTF-8') assert commit_message == expected_message def test_create_package(session, project_repo): test_package = 'test-package' CreatePackage(package=test_package) package_folder = Path(project_repo, 'deploy', 'packages', test_package) package_file = Path(package_folder, 'tickets.yml') remove_file = Path(package_folder, 'remove.py') assert package_folder.exists() assert package_file.exists() assert remove_file.exists() repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message expected_message = bytes( 'Create package %s\n' % test_package, encoding='UTF-8') assert commit_message == expected_message
<commit_before>from matador.commands import CreateTicket, CreatePackage from dulwich.repo import Repo from pathlib import Path def test_add_to_git(project_repo): pass def test_create_ticket(session, project_repo): test_ticket = 'test-ticket' CreateTicket(ticket=test_ticket) ticket_folder = Path(project_repo, 'deploy', 'tickets', test_ticket) deploy_file = Path(ticket_folder, 'deploy.py') repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message assert ticket_folder.exists() assert deploy_file.exists() expected_message = bytes( 'Create ticket %s\n' % test_ticket, encoding='UTF-8') assert commit_message == expected_message def test_create_package(session, project_repo): test_package = 'test-package' CreatePackage(package=test_package) package_folder = Path(project_repo, 'deploy', 'packages', test_package) package_file = Path(package_folder, 'tickets.yml') remove_file = Path(package_folder, 'remove.py') assert package_folder.exists() assert package_file.exists() assert remove_file.exists() <commit_msg>Add test for package commit<commit_after>
from matador.commands import CreateTicket, CreatePackage from dulwich.repo import Repo from pathlib import Path def test_add_to_git(project_repo): pass def test_create_ticket(session, project_repo): test_ticket = 'test-ticket' CreateTicket(ticket=test_ticket) ticket_folder = Path(project_repo, 'deploy', 'tickets', test_ticket) deploy_file = Path(ticket_folder, 'deploy.py') assert ticket_folder.exists() assert deploy_file.exists() repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message expected_message = bytes( 'Create ticket %s\n' % test_ticket, encoding='UTF-8') assert commit_message == expected_message def test_create_package(session, project_repo): test_package = 'test-package' CreatePackage(package=test_package) package_folder = Path(project_repo, 'deploy', 'packages', test_package) package_file = Path(package_folder, 'tickets.yml') remove_file = Path(package_folder, 'remove.py') assert package_folder.exists() assert package_file.exists() assert remove_file.exists() repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message expected_message = bytes( 'Create package %s\n' % test_package, encoding='UTF-8') assert commit_message == expected_message
from matador.commands import CreateTicket, CreatePackage from dulwich.repo import Repo from pathlib import Path def test_add_to_git(project_repo): pass def test_create_ticket(session, project_repo): test_ticket = 'test-ticket' CreateTicket(ticket=test_ticket) ticket_folder = Path(project_repo, 'deploy', 'tickets', test_ticket) deploy_file = Path(ticket_folder, 'deploy.py') repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message assert ticket_folder.exists() assert deploy_file.exists() expected_message = bytes( 'Create ticket %s\n' % test_ticket, encoding='UTF-8') assert commit_message == expected_message def test_create_package(session, project_repo): test_package = 'test-package' CreatePackage(package=test_package) package_folder = Path(project_repo, 'deploy', 'packages', test_package) package_file = Path(package_folder, 'tickets.yml') remove_file = Path(package_folder, 'remove.py') assert package_folder.exists() assert package_file.exists() assert remove_file.exists() Add test for package commitfrom matador.commands import CreateTicket, CreatePackage from dulwich.repo import Repo from pathlib import Path def test_add_to_git(project_repo): pass def test_create_ticket(session, project_repo): test_ticket = 'test-ticket' CreateTicket(ticket=test_ticket) ticket_folder = Path(project_repo, 'deploy', 'tickets', test_ticket) deploy_file = Path(ticket_folder, 'deploy.py') assert ticket_folder.exists() assert deploy_file.exists() repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message expected_message = bytes( 'Create ticket %s\n' % test_ticket, encoding='UTF-8') assert commit_message == expected_message def test_create_package(session, project_repo): test_package = 'test-package' CreatePackage(package=test_package) package_folder = Path(project_repo, 'deploy', 'packages', test_package) package_file = Path(package_folder, 'tickets.yml') remove_file = Path(package_folder, 'remove.py') assert package_folder.exists() assert package_file.exists() assert remove_file.exists() repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message expected_message = bytes( 'Create package %s\n' % test_package, encoding='UTF-8') assert commit_message == expected_message
<commit_before>from matador.commands import CreateTicket, CreatePackage from dulwich.repo import Repo from pathlib import Path def test_add_to_git(project_repo): pass def test_create_ticket(session, project_repo): test_ticket = 'test-ticket' CreateTicket(ticket=test_ticket) ticket_folder = Path(project_repo, 'deploy', 'tickets', test_ticket) deploy_file = Path(ticket_folder, 'deploy.py') repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message assert ticket_folder.exists() assert deploy_file.exists() expected_message = bytes( 'Create ticket %s\n' % test_ticket, encoding='UTF-8') assert commit_message == expected_message def test_create_package(session, project_repo): test_package = 'test-package' CreatePackage(package=test_package) package_folder = Path(project_repo, 'deploy', 'packages', test_package) package_file = Path(package_folder, 'tickets.yml') remove_file = Path(package_folder, 'remove.py') assert package_folder.exists() assert package_file.exists() assert remove_file.exists() <commit_msg>Add test for package commit<commit_after>from matador.commands import CreateTicket, CreatePackage from dulwich.repo import Repo from pathlib import Path def test_add_to_git(project_repo): pass def test_create_ticket(session, project_repo): test_ticket = 'test-ticket' CreateTicket(ticket=test_ticket) ticket_folder = Path(project_repo, 'deploy', 'tickets', test_ticket) deploy_file = Path(ticket_folder, 'deploy.py') assert ticket_folder.exists() assert deploy_file.exists() repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message expected_message = bytes( 'Create ticket %s\n' % test_ticket, encoding='UTF-8') assert commit_message == expected_message def test_create_package(session, project_repo): test_package = 'test-package' CreatePackage(package=test_package) package_folder = Path(project_repo, 'deploy', 'packages', test_package) package_file = Path(package_folder, 'tickets.yml') remove_file = Path(package_folder, 'remove.py') assert package_folder.exists() assert package_file.exists() assert remove_file.exists() repo = Repo(str(project_repo)) last_commit = repo.get_object(repo.head()) commit_message = last_commit.message expected_message = bytes( 'Create package %s\n' % test_package, encoding='UTF-8') assert commit_message == expected_message
cd4fe7636d6d5254189de870f262cfa9c3c0461a
sirius/__init__.py
sirius/__init__.py
import os as _os from . import LI_V00 from . import TB_V01 from . import BO_V901 from . import BO_V02 from . import TS_V01 from . import SI_V12 from . import coordinate_system with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'TB_V01', 'BO_V901', 'TS_V01', 'SI_V12', 'BO_V02'] li = LI_V00 tb = TB_V01 bo = BO_V901 ts = TS_V01 si = SI_V12
import os as _os from . import LI_V00 from . import TB_V01 from . import BO_V02 from . import TS_V01 from . import SI_V12 from . import coordinate_system with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'TB_V01', 'BO_V02', 'TS_V01', 'SI_V12'] li = LI_V00 tb = TB_V01 bo = BO_V02 ts = TS_V01 si = SI_V12
Change booster default version (BO.V02)
Change booster default version (BO.V02)
Python
mit
lnls-fac/sirius
import os as _os from . import LI_V00 from . import TB_V01 from . import BO_V901 from . import BO_V02 from . import TS_V01 from . import SI_V12 from . import coordinate_system with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'TB_V01', 'BO_V901', 'TS_V01', 'SI_V12', 'BO_V02'] li = LI_V00 tb = TB_V01 bo = BO_V901 ts = TS_V01 si = SI_V12 Change booster default version (BO.V02)
import os as _os from . import LI_V00 from . import TB_V01 from . import BO_V02 from . import TS_V01 from . import SI_V12 from . import coordinate_system with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'TB_V01', 'BO_V02', 'TS_V01', 'SI_V12'] li = LI_V00 tb = TB_V01 bo = BO_V02 ts = TS_V01 si = SI_V12
<commit_before>import os as _os from . import LI_V00 from . import TB_V01 from . import BO_V901 from . import BO_V02 from . import TS_V01 from . import SI_V12 from . import coordinate_system with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'TB_V01', 'BO_V901', 'TS_V01', 'SI_V12', 'BO_V02'] li = LI_V00 tb = TB_V01 bo = BO_V901 ts = TS_V01 si = SI_V12 <commit_msg>Change booster default version (BO.V02)<commit_after>
import os as _os from . import LI_V00 from . import TB_V01 from . import BO_V02 from . import TS_V01 from . import SI_V12 from . import coordinate_system with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'TB_V01', 'BO_V02', 'TS_V01', 'SI_V12'] li = LI_V00 tb = TB_V01 bo = BO_V02 ts = TS_V01 si = SI_V12
import os as _os from . import LI_V00 from . import TB_V01 from . import BO_V901 from . import BO_V02 from . import TS_V01 from . import SI_V12 from . import coordinate_system with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'TB_V01', 'BO_V901', 'TS_V01', 'SI_V12', 'BO_V02'] li = LI_V00 tb = TB_V01 bo = BO_V901 ts = TS_V01 si = SI_V12 Change booster default version (BO.V02)import os as _os from . import LI_V00 from . import TB_V01 from . import BO_V02 from . import TS_V01 from . import SI_V12 from . import coordinate_system with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'TB_V01', 'BO_V02', 'TS_V01', 'SI_V12'] li = LI_V00 tb = TB_V01 bo = BO_V02 ts = TS_V01 si = SI_V12
<commit_before>import os as _os from . import LI_V00 from . import TB_V01 from . import BO_V901 from . import BO_V02 from . import TS_V01 from . import SI_V12 from . import coordinate_system with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'TB_V01', 'BO_V901', 'TS_V01', 'SI_V12', 'BO_V02'] li = LI_V00 tb = TB_V01 bo = BO_V901 ts = TS_V01 si = SI_V12 <commit_msg>Change booster default version (BO.V02)<commit_after>import os as _os from . import LI_V00 from . import TB_V01 from . import BO_V02 from . import TS_V01 from . import SI_V12 from . import coordinate_system with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'TB_V01', 'BO_V02', 'TS_V01', 'SI_V12'] li = LI_V00 tb = TB_V01 bo = BO_V02 ts = TS_V01 si = SI_V12
ea7177614dc2094e95aeea33f6249f14c792fee8
Discord/modules/ciphers.py
Discord/modules/ciphers.py
def encode_caesar(message, key): encoded_message = "" for character in message: if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ? encoded_message += character continue shifted = ord(character) + int(key) if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'): encoded_message += chr(shifted - 26) else: encoded_message += chr(shifted) return encoded_message def decode_caesar(message, key): decoded_message = "" for character in message: if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ? decoded_message += character continue shifted = ord(character) - int(key) if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'): decoded_message += chr(shifted + 26) else: decoded_message += chr(shifted) return decoded_message def brute_force_caesar(message): decodes = "" for key in range(26): decodes += str(key) + ": " + decode_caesar(message, key) + '\n' return decodes
def encode_caesar(message, key): encoded_message = "" for character in message: if not character.isalpha() or not character.isascii(): encoded_message += character continue shifted = ord(character) + int(key) if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'): encoded_message += chr(shifted - 26) else: encoded_message += chr(shifted) return encoded_message def decode_caesar(message, key): decoded_message = "" for character in message: if not character.isalpha() or not character.isascii(): decoded_message += character continue shifted = ord(character) - int(key) if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'): decoded_message += chr(shifted + 26) else: decoded_message += chr(shifted) return decoded_message def brute_force_caesar(message): decodes = "" for key in range(26): decodes += str(key) + ": " + decode_caesar(message, key) + '\n' return decodes
Use string methods for encode and decode caesar functions
[Discord] Use string methods for encode and decode caesar functions To determine (in)valid characters to encode and decode
Python
mit
Harmon758/Harmonbot,Harmon758/Harmonbot
def encode_caesar(message, key): encoded_message = "" for character in message: if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ? encoded_message += character continue shifted = ord(character) + int(key) if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'): encoded_message += chr(shifted - 26) else: encoded_message += chr(shifted) return encoded_message def decode_caesar(message, key): decoded_message = "" for character in message: if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ? decoded_message += character continue shifted = ord(character) - int(key) if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'): decoded_message += chr(shifted + 26) else: decoded_message += chr(shifted) return decoded_message def brute_force_caesar(message): decodes = "" for key in range(26): decodes += str(key) + ": " + decode_caesar(message, key) + '\n' return decodes [Discord] Use string methods for encode and decode caesar functions To determine (in)valid characters to encode and decode
def encode_caesar(message, key): encoded_message = "" for character in message: if not character.isalpha() or not character.isascii(): encoded_message += character continue shifted = ord(character) + int(key) if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'): encoded_message += chr(shifted - 26) else: encoded_message += chr(shifted) return encoded_message def decode_caesar(message, key): decoded_message = "" for character in message: if not character.isalpha() or not character.isascii(): decoded_message += character continue shifted = ord(character) - int(key) if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'): decoded_message += chr(shifted + 26) else: decoded_message += chr(shifted) return decoded_message def brute_force_caesar(message): decodes = "" for key in range(26): decodes += str(key) + ": " + decode_caesar(message, key) + '\n' return decodes
<commit_before> def encode_caesar(message, key): encoded_message = "" for character in message: if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ? encoded_message += character continue shifted = ord(character) + int(key) if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'): encoded_message += chr(shifted - 26) else: encoded_message += chr(shifted) return encoded_message def decode_caesar(message, key): decoded_message = "" for character in message: if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ? decoded_message += character continue shifted = ord(character) - int(key) if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'): decoded_message += chr(shifted + 26) else: decoded_message += chr(shifted) return decoded_message def brute_force_caesar(message): decodes = "" for key in range(26): decodes += str(key) + ": " + decode_caesar(message, key) + '\n' return decodes <commit_msg>[Discord] Use string methods for encode and decode caesar functions To determine (in)valid characters to encode and decode<commit_after>
def encode_caesar(message, key): encoded_message = "" for character in message: if not character.isalpha() or not character.isascii(): encoded_message += character continue shifted = ord(character) + int(key) if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'): encoded_message += chr(shifted - 26) else: encoded_message += chr(shifted) return encoded_message def decode_caesar(message, key): decoded_message = "" for character in message: if not character.isalpha() or not character.isascii(): decoded_message += character continue shifted = ord(character) - int(key) if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'): decoded_message += chr(shifted + 26) else: decoded_message += chr(shifted) return decoded_message def brute_force_caesar(message): decodes = "" for key in range(26): decodes += str(key) + ": " + decode_caesar(message, key) + '\n' return decodes
def encode_caesar(message, key): encoded_message = "" for character in message: if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ? encoded_message += character continue shifted = ord(character) + int(key) if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'): encoded_message += chr(shifted - 26) else: encoded_message += chr(shifted) return encoded_message def decode_caesar(message, key): decoded_message = "" for character in message: if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ? decoded_message += character continue shifted = ord(character) - int(key) if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'): decoded_message += chr(shifted + 26) else: decoded_message += chr(shifted) return decoded_message def brute_force_caesar(message): decodes = "" for key in range(26): decodes += str(key) + ": " + decode_caesar(message, key) + '\n' return decodes [Discord] Use string methods for encode and decode caesar functions To determine (in)valid characters to encode and decode def encode_caesar(message, key): encoded_message = "" for character in message: if not character.isalpha() or not character.isascii(): encoded_message += character continue shifted = ord(character) + int(key) if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'): encoded_message += chr(shifted - 26) else: encoded_message += chr(shifted) return encoded_message def decode_caesar(message, key): decoded_message = "" for character in message: if not character.isalpha() or not character.isascii(): decoded_message += character continue shifted = ord(character) - int(key) if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'): decoded_message += chr(shifted + 26) else: decoded_message += chr(shifted) return decoded_message def brute_force_caesar(message): decodes = "" for key in range(26): decodes += str(key) + ": " + decode_caesar(message, key) + '\n' return decodes
<commit_before> def encode_caesar(message, key): encoded_message = "" for character in message: if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ? encoded_message += character continue shifted = ord(character) + int(key) if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'): encoded_message += chr(shifted - 26) else: encoded_message += chr(shifted) return encoded_message def decode_caesar(message, key): decoded_message = "" for character in message: if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ? decoded_message += character continue shifted = ord(character) - int(key) if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'): decoded_message += chr(shifted + 26) else: decoded_message += chr(shifted) return decoded_message def brute_force_caesar(message): decodes = "" for key in range(26): decodes += str(key) + ": " + decode_caesar(message, key) + '\n' return decodes <commit_msg>[Discord] Use string methods for encode and decode caesar functions To determine (in)valid characters to encode and decode<commit_after> def encode_caesar(message, key): encoded_message = "" for character in message: if not character.isalpha() or not character.isascii(): encoded_message += character continue shifted = ord(character) + int(key) if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'): encoded_message += chr(shifted - 26) else: encoded_message += chr(shifted) return encoded_message def decode_caesar(message, key): decoded_message = "" for character in message: if not character.isalpha() or not character.isascii(): decoded_message += character continue shifted = ord(character) - int(key) if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'): decoded_message += chr(shifted + 26) else: decoded_message += chr(shifted) return decoded_message def brute_force_caesar(message): decodes = "" for key in range(26): decodes += str(key) + ": " + decode_caesar(message, key) + '\n' return decodes
61b4aa0d99cddf88219157eb9120032c8aaf8998
nbcollate/cli.py
nbcollate/cli.py
#!/usr/bin/env python """nbcollate combines a set of Jupyter notebooks into a single notebook. Command-line interface for nbcollate. """ import argparse import logging import os import sys import nbformat from . import nbcollate, nb_add_metadata Parser = argparse.ArgumentParser(description="Create a combined notebook.") Parser.add_argument('-v', '--verbose', action='store_true') Parser.add_argument('notebook_files', nargs='+', metavar='NOTEBOOK_FILE') def main(): args = Parser.parse_args(sys.argv[1:]) if args.verbose: logging.basicConfig(format='%(message)s', level=logging.INFO) nbs = [nbformat.read(nbf, as_version=4) for nbf in args.notebook_files] anb = nbs[0] snbs = nbs[1:] nb = nbcollate(nbs[0], snbs) suffix = "-combined" root, ext = os.path.splitext(args.notebook_files[0]) out = "{}{}{}".format(root, suffix, ext) with open(out, 'w') as fp: nbformat.write(nb, fp)
#!/usr/bin/env python """nbcollate combines a set of Jupyter notebooks into a single notebook. Command-line interface for nbcollate. """ import argparse import logging import os import sys import nbformat from . import nbcollate, nb_add_metadata Parser = argparse.ArgumentParser(description="Create a combined notebook.") Parser.add_argument('-v', '--verbose', action='store_true') Parser.add_argument('notebook_files', nargs='+', metavar='NOTEBOOK_FILE') def main(): args = Parser.parse_args(sys.argv[1:]) if args.verbose: logging.basicConfig(format='%(message)s', level=logging.INFO) nbs = [nbformat.read(nbf, as_version=4) for nbf in args.notebook_files] anb = nbs[0] nb_add_metadata(anb) snbs = nbs[1:] nb = nbcollate(nbs[0], snbs) suffix = "-combined" root, ext = os.path.splitext(args.notebook_files[0]) out = "{}{}{}".format(root, suffix, ext) with open(out, 'w') as fp: nbformat.write(nb, fp)
Add metadata to assignment notebook
Add metadata to assignment notebook
Python
mit
olin-computing/nbcollate
#!/usr/bin/env python """nbcollate combines a set of Jupyter notebooks into a single notebook. Command-line interface for nbcollate. """ import argparse import logging import os import sys import nbformat from . import nbcollate, nb_add_metadata Parser = argparse.ArgumentParser(description="Create a combined notebook.") Parser.add_argument('-v', '--verbose', action='store_true') Parser.add_argument('notebook_files', nargs='+', metavar='NOTEBOOK_FILE') def main(): args = Parser.parse_args(sys.argv[1:]) if args.verbose: logging.basicConfig(format='%(message)s', level=logging.INFO) nbs = [nbformat.read(nbf, as_version=4) for nbf in args.notebook_files] anb = nbs[0] snbs = nbs[1:] nb = nbcollate(nbs[0], snbs) suffix = "-combined" root, ext = os.path.splitext(args.notebook_files[0]) out = "{}{}{}".format(root, suffix, ext) with open(out, 'w') as fp: nbformat.write(nb, fp) Add metadata to assignment notebook
#!/usr/bin/env python """nbcollate combines a set of Jupyter notebooks into a single notebook. Command-line interface for nbcollate. """ import argparse import logging import os import sys import nbformat from . import nbcollate, nb_add_metadata Parser = argparse.ArgumentParser(description="Create a combined notebook.") Parser.add_argument('-v', '--verbose', action='store_true') Parser.add_argument('notebook_files', nargs='+', metavar='NOTEBOOK_FILE') def main(): args = Parser.parse_args(sys.argv[1:]) if args.verbose: logging.basicConfig(format='%(message)s', level=logging.INFO) nbs = [nbformat.read(nbf, as_version=4) for nbf in args.notebook_files] anb = nbs[0] nb_add_metadata(anb) snbs = nbs[1:] nb = nbcollate(nbs[0], snbs) suffix = "-combined" root, ext = os.path.splitext(args.notebook_files[0]) out = "{}{}{}".format(root, suffix, ext) with open(out, 'w') as fp: nbformat.write(nb, fp)
<commit_before>#!/usr/bin/env python """nbcollate combines a set of Jupyter notebooks into a single notebook. Command-line interface for nbcollate. """ import argparse import logging import os import sys import nbformat from . import nbcollate, nb_add_metadata Parser = argparse.ArgumentParser(description="Create a combined notebook.") Parser.add_argument('-v', '--verbose', action='store_true') Parser.add_argument('notebook_files', nargs='+', metavar='NOTEBOOK_FILE') def main(): args = Parser.parse_args(sys.argv[1:]) if args.verbose: logging.basicConfig(format='%(message)s', level=logging.INFO) nbs = [nbformat.read(nbf, as_version=4) for nbf in args.notebook_files] anb = nbs[0] snbs = nbs[1:] nb = nbcollate(nbs[0], snbs) suffix = "-combined" root, ext = os.path.splitext(args.notebook_files[0]) out = "{}{}{}".format(root, suffix, ext) with open(out, 'w') as fp: nbformat.write(nb, fp) <commit_msg>Add metadata to assignment notebook<commit_after>
#!/usr/bin/env python """nbcollate combines a set of Jupyter notebooks into a single notebook. Command-line interface for nbcollate. """ import argparse import logging import os import sys import nbformat from . import nbcollate, nb_add_metadata Parser = argparse.ArgumentParser(description="Create a combined notebook.") Parser.add_argument('-v', '--verbose', action='store_true') Parser.add_argument('notebook_files', nargs='+', metavar='NOTEBOOK_FILE') def main(): args = Parser.parse_args(sys.argv[1:]) if args.verbose: logging.basicConfig(format='%(message)s', level=logging.INFO) nbs = [nbformat.read(nbf, as_version=4) for nbf in args.notebook_files] anb = nbs[0] nb_add_metadata(anb) snbs = nbs[1:] nb = nbcollate(nbs[0], snbs) suffix = "-combined" root, ext = os.path.splitext(args.notebook_files[0]) out = "{}{}{}".format(root, suffix, ext) with open(out, 'w') as fp: nbformat.write(nb, fp)
#!/usr/bin/env python """nbcollate combines a set of Jupyter notebooks into a single notebook. Command-line interface for nbcollate. """ import argparse import logging import os import sys import nbformat from . import nbcollate, nb_add_metadata Parser = argparse.ArgumentParser(description="Create a combined notebook.") Parser.add_argument('-v', '--verbose', action='store_true') Parser.add_argument('notebook_files', nargs='+', metavar='NOTEBOOK_FILE') def main(): args = Parser.parse_args(sys.argv[1:]) if args.verbose: logging.basicConfig(format='%(message)s', level=logging.INFO) nbs = [nbformat.read(nbf, as_version=4) for nbf in args.notebook_files] anb = nbs[0] snbs = nbs[1:] nb = nbcollate(nbs[0], snbs) suffix = "-combined" root, ext = os.path.splitext(args.notebook_files[0]) out = "{}{}{}".format(root, suffix, ext) with open(out, 'w') as fp: nbformat.write(nb, fp) Add metadata to assignment notebook#!/usr/bin/env python """nbcollate combines a set of Jupyter notebooks into a single notebook. Command-line interface for nbcollate. """ import argparse import logging import os import sys import nbformat from . import nbcollate, nb_add_metadata Parser = argparse.ArgumentParser(description="Create a combined notebook.") Parser.add_argument('-v', '--verbose', action='store_true') Parser.add_argument('notebook_files', nargs='+', metavar='NOTEBOOK_FILE') def main(): args = Parser.parse_args(sys.argv[1:]) if args.verbose: logging.basicConfig(format='%(message)s', level=logging.INFO) nbs = [nbformat.read(nbf, as_version=4) for nbf in args.notebook_files] anb = nbs[0] nb_add_metadata(anb) snbs = nbs[1:] nb = nbcollate(nbs[0], snbs) suffix = "-combined" root, ext = os.path.splitext(args.notebook_files[0]) out = "{}{}{}".format(root, suffix, ext) with open(out, 'w') as fp: nbformat.write(nb, fp)
<commit_before>#!/usr/bin/env python """nbcollate combines a set of Jupyter notebooks into a single notebook. Command-line interface for nbcollate. """ import argparse import logging import os import sys import nbformat from . import nbcollate, nb_add_metadata Parser = argparse.ArgumentParser(description="Create a combined notebook.") Parser.add_argument('-v', '--verbose', action='store_true') Parser.add_argument('notebook_files', nargs='+', metavar='NOTEBOOK_FILE') def main(): args = Parser.parse_args(sys.argv[1:]) if args.verbose: logging.basicConfig(format='%(message)s', level=logging.INFO) nbs = [nbformat.read(nbf, as_version=4) for nbf in args.notebook_files] anb = nbs[0] snbs = nbs[1:] nb = nbcollate(nbs[0], snbs) suffix = "-combined" root, ext = os.path.splitext(args.notebook_files[0]) out = "{}{}{}".format(root, suffix, ext) with open(out, 'w') as fp: nbformat.write(nb, fp) <commit_msg>Add metadata to assignment notebook<commit_after>#!/usr/bin/env python """nbcollate combines a set of Jupyter notebooks into a single notebook. Command-line interface for nbcollate. """ import argparse import logging import os import sys import nbformat from . import nbcollate, nb_add_metadata Parser = argparse.ArgumentParser(description="Create a combined notebook.") Parser.add_argument('-v', '--verbose', action='store_true') Parser.add_argument('notebook_files', nargs='+', metavar='NOTEBOOK_FILE') def main(): args = Parser.parse_args(sys.argv[1:]) if args.verbose: logging.basicConfig(format='%(message)s', level=logging.INFO) nbs = [nbformat.read(nbf, as_version=4) for nbf in args.notebook_files] anb = nbs[0] nb_add_metadata(anb) snbs = nbs[1:] nb = nbcollate(nbs[0], snbs) suffix = "-combined" root, ext = os.path.splitext(args.notebook_files[0]) out = "{}{}{}".format(root, suffix, ext) with open(out, 'w') as fp: nbformat.write(nb, fp)
9f9d36025db87b7326b235131063ef852f43cef8
euxfel_h5tools/h5index.py
euxfel_h5tools/h5index.py
import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) def visitor(path, item): if isinstance(item, h5py.Dataset): writer.writerow([path, item.shape, item.dtype.str]) grp.visititems(visitor) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main()
import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ all_datasets = [] def visitor(path, item): if isinstance(item, h5py.Dataset): all_datasets.append([path, item.shape, item.dtype.str]) grp.visititems(visitor) writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) for row in sorted(all_datasets): writer.writerow(row) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main()
Sort datasets for index of HDF5 files
Sort datasets for index of HDF5 files
Python
bsd-3-clause
European-XFEL/h5tools-py
import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) def visitor(path, item): if isinstance(item, h5py.Dataset): writer.writerow([path, item.shape, item.dtype.str]) grp.visititems(visitor) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main() Sort datasets for index of HDF5 files
import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ all_datasets = [] def visitor(path, item): if isinstance(item, h5py.Dataset): all_datasets.append([path, item.shape, item.dtype.str]) grp.visititems(visitor) writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) for row in sorted(all_datasets): writer.writerow(row) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main()
<commit_before>import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) def visitor(path, item): if isinstance(item, h5py.Dataset): writer.writerow([path, item.shape, item.dtype.str]) grp.visititems(visitor) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main() <commit_msg>Sort datasets for index of HDF5 files<commit_after>
import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ all_datasets = [] def visitor(path, item): if isinstance(item, h5py.Dataset): all_datasets.append([path, item.shape, item.dtype.str]) grp.visititems(visitor) writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) for row in sorted(all_datasets): writer.writerow(row) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main()
import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) def visitor(path, item): if isinstance(item, h5py.Dataset): writer.writerow([path, item.shape, item.dtype.str]) grp.visititems(visitor) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main() Sort datasets for index of HDF5 filesimport csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ all_datasets = [] def visitor(path, item): if isinstance(item, h5py.Dataset): all_datasets.append([path, item.shape, item.dtype.str]) grp.visititems(visitor) writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) for row in sorted(all_datasets): writer.writerow(row) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main()
<commit_before>import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) def visitor(path, item): if isinstance(item, h5py.Dataset): writer.writerow([path, item.shape, item.dtype.str]) grp.visititems(visitor) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main() <commit_msg>Sort datasets for index of HDF5 files<commit_after>import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ all_datasets = [] def visitor(path, item): if isinstance(item, h5py.Dataset): all_datasets.append([path, item.shape, item.dtype.str]) grp.visititems(visitor) writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) for row in sorted(all_datasets): writer.writerow(row) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main()
24a0fcbaea3bca88278f294f41e4b6abd1e82cf3
src/rocommand/__init__.py
src/rocommand/__init__.py
# __init__.py #__version__ = "0.2.1" # Initial version with installation package #__version__ = "0.2.2" # Updated README documentation for PyPI page #__version__ = "0.2.3" # Experimenting with distribution options #__version__ = "0.2.4" # Added MANIFEST.in so that data files are part of sdist #__version__ = "0.2.5" # Drop references to lpod-show from test suite __version__ = "0.2.6" # Enhacements to handling of directories and external references
# __init__.py #__version__ = "0.2.1" # Initial version with installation package #__version__ = "0.2.2" # Updated README documentation for PyPI page #__version__ = "0.2.3" # Experimenting with distribution options #__version__ = "0.2.4" # Added MANIFEST.in so that data files are part of sdist #__version__ = "0.2.5" # Drop references to lpod-show from test suite #__version__ = "0.2.6" # Enhacements to handling of directories and external references __version__ = "0.2.7" # Decouple MINIM constraints from target RO # ROSRS (v6) support, support evaluation of RODL/ROSRS objects # new annotation and linking options, annotations with CURIE (QName) properties # add ro remove command, fix URI escaping problems
Add comments summarizing changes in this version
Add comments summarizing changes in this version
Python
mit
wf4ever/ro-manager,wf4ever/ro-manager,wf4ever/ro-manager,wf4ever/ro-manager
# __init__.py #__version__ = "0.2.1" # Initial version with installation package #__version__ = "0.2.2" # Updated README documentation for PyPI page #__version__ = "0.2.3" # Experimenting with distribution options #__version__ = "0.2.4" # Added MANIFEST.in so that data files are part of sdist #__version__ = "0.2.5" # Drop references to lpod-show from test suite __version__ = "0.2.6" # Enhacements to handling of directories and external references Add comments summarizing changes in this version
# __init__.py #__version__ = "0.2.1" # Initial version with installation package #__version__ = "0.2.2" # Updated README documentation for PyPI page #__version__ = "0.2.3" # Experimenting with distribution options #__version__ = "0.2.4" # Added MANIFEST.in so that data files are part of sdist #__version__ = "0.2.5" # Drop references to lpod-show from test suite #__version__ = "0.2.6" # Enhacements to handling of directories and external references __version__ = "0.2.7" # Decouple MINIM constraints from target RO # ROSRS (v6) support, support evaluation of RODL/ROSRS objects # new annotation and linking options, annotations with CURIE (QName) properties # add ro remove command, fix URI escaping problems
<commit_before># __init__.py #__version__ = "0.2.1" # Initial version with installation package #__version__ = "0.2.2" # Updated README documentation for PyPI page #__version__ = "0.2.3" # Experimenting with distribution options #__version__ = "0.2.4" # Added MANIFEST.in so that data files are part of sdist #__version__ = "0.2.5" # Drop references to lpod-show from test suite __version__ = "0.2.6" # Enhacements to handling of directories and external references <commit_msg>Add comments summarizing changes in this version<commit_after>
# __init__.py #__version__ = "0.2.1" # Initial version with installation package #__version__ = "0.2.2" # Updated README documentation for PyPI page #__version__ = "0.2.3" # Experimenting with distribution options #__version__ = "0.2.4" # Added MANIFEST.in so that data files are part of sdist #__version__ = "0.2.5" # Drop references to lpod-show from test suite #__version__ = "0.2.6" # Enhacements to handling of directories and external references __version__ = "0.2.7" # Decouple MINIM constraints from target RO # ROSRS (v6) support, support evaluation of RODL/ROSRS objects # new annotation and linking options, annotations with CURIE (QName) properties # add ro remove command, fix URI escaping problems
# __init__.py #__version__ = "0.2.1" # Initial version with installation package #__version__ = "0.2.2" # Updated README documentation for PyPI page #__version__ = "0.2.3" # Experimenting with distribution options #__version__ = "0.2.4" # Added MANIFEST.in so that data files are part of sdist #__version__ = "0.2.5" # Drop references to lpod-show from test suite __version__ = "0.2.6" # Enhacements to handling of directories and external references Add comments summarizing changes in this version# __init__.py #__version__ = "0.2.1" # Initial version with installation package #__version__ = "0.2.2" # Updated README documentation for PyPI page #__version__ = "0.2.3" # Experimenting with distribution options #__version__ = "0.2.4" # Added MANIFEST.in so that data files are part of sdist #__version__ = "0.2.5" # Drop references to lpod-show from test suite #__version__ = "0.2.6" # Enhacements to handling of directories and external references __version__ = "0.2.7" # Decouple MINIM constraints from target RO # ROSRS (v6) support, support evaluation of RODL/ROSRS objects # new annotation and linking options, annotations with CURIE (QName) properties # add ro remove command, fix URI escaping problems
<commit_before># __init__.py #__version__ = "0.2.1" # Initial version with installation package #__version__ = "0.2.2" # Updated README documentation for PyPI page #__version__ = "0.2.3" # Experimenting with distribution options #__version__ = "0.2.4" # Added MANIFEST.in so that data files are part of sdist #__version__ = "0.2.5" # Drop references to lpod-show from test suite __version__ = "0.2.6" # Enhacements to handling of directories and external references <commit_msg>Add comments summarizing changes in this version<commit_after># __init__.py #__version__ = "0.2.1" # Initial version with installation package #__version__ = "0.2.2" # Updated README documentation for PyPI page #__version__ = "0.2.3" # Experimenting with distribution options #__version__ = "0.2.4" # Added MANIFEST.in so that data files are part of sdist #__version__ = "0.2.5" # Drop references to lpod-show from test suite #__version__ = "0.2.6" # Enhacements to handling of directories and external references __version__ = "0.2.7" # Decouple MINIM constraints from target RO # ROSRS (v6) support, support evaluation of RODL/ROSRS objects # new annotation and linking options, annotations with CURIE (QName) properties # add ro remove command, fix URI escaping problems
71ce7f3e745b9cee357f867f126dce65f6e210ac
main.py
main.py
import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_road(( (100, 100), (150, 300), (250, 500), (400, 500), (700, 200), )) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
Add more roads on map
Add more roads on map
Python
mit
thibault/vroom
import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30) Add more roads on map
import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_road(( (100, 100), (150, 300), (250, 500), (400, 500), (700, 200), )) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
<commit_before>import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30) <commit_msg>Add more roads on map<commit_after>
import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_road(( (100, 100), (150, 300), (250, 500), (400, 500), (700, 200), )) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30) Add more roads on mapimport os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_road(( (100, 100), (150, 300), (250, 500), (400, 500), (700, 200), )) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
<commit_before>import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30) <commit_msg>Add more roads on map<commit_after>import os import sys PROJECT_ROOT = os.path.dirname(__file__) sys.path.insert(0, os.path.join(PROJECT_ROOT, 'vroom')) import pygame import math from world import Universe # Initialize pygame pygame.init() size = width, height = 800, 600 black = 0, 0, 0 screen = pygame.display.set_mode(size) clock = pygame.time.Clock() clock.tick(30) # Initialize the universe universe = Universe(screen) universe.add_road(( (100, 100), (150, 300), (250, 500), (400, 500), (700, 200), )) universe.add_nest(100, 100, math.pi / 2, 30.0) universe.add_hole(600, 100) while 1: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() screen.fill(black) universe.update(clock.get_time()) universe.draw() pygame.display.flip() clock.tick(30)
d4d517611104a8b42ccc79a310c510edd5f0eae5
numba/cuda/simulator/cudadrv/driver.py
numba/cuda/simulator/cudadrv/driver.py
''' Most of the driver API is unsupported in the simulator, but some stubs are provided to allow tests to import correctly. ''' def device_memset(dst, val, size, stream=0): dst.view('u1')[:size].fill(bytes([val])[0]) def host_to_device(dst, src, size, stream=0): dst.view('u1')[:size] = src.view('u1')[:size] def device_to_host(dst, src, size, stream=0): host_to_device(dst, src, size) def device_memory_size(obj): return obj.itemsize * obj.size def device_to_device(dst, src, size, stream=0): host_to_device(dst, src, size) class FakeDriver(object): def get_device_count(self): return 1 driver = FakeDriver() Linker = None class LinkerError(RuntimeError): pass class CudaAPIError(RuntimeError): pass def launch_kernel(*args, **kwargs): msg = 'Launching kernels directly is not supported in the simulator' raise RuntimeError(msg)
''' Most of the driver API is unsupported in the simulator, but some stubs are provided to allow tests to import correctly. ''' def device_memset(dst, val, size, stream=0): dst.view('u1')[:size].fill(bytes([val])[0]) def host_to_device(dst, src, size, stream=0): dst.view('u1')[:size] = src.view('u1')[:size] def device_to_host(dst, src, size, stream=0): host_to_device(dst, src, size) def device_memory_size(obj): return obj.itemsize * obj.size def device_to_device(dst, src, size, stream=0): host_to_device(dst, src, size) class FakeDriver(object): def get_device_count(self): return 1 driver = FakeDriver() Linker = None class LinkerError(RuntimeError): pass class CudaAPIError(RuntimeError): pass def launch_kernel(*args, **kwargs): msg = 'Launching kernels directly is not supported in the simulator' raise RuntimeError(msg) USE_NV_BINDING = False
Fix simulator by adding missing USE_NV_BINDING to simulator
CUDA: Fix simulator by adding missing USE_NV_BINDING to simulator
Python
bsd-2-clause
cpcloud/numba,numba/numba,seibert/numba,IntelLabs/numba,cpcloud/numba,cpcloud/numba,seibert/numba,IntelLabs/numba,numba/numba,IntelLabs/numba,numba/numba,cpcloud/numba,seibert/numba,numba/numba,seibert/numba,IntelLabs/numba,numba/numba,cpcloud/numba,IntelLabs/numba,seibert/numba
''' Most of the driver API is unsupported in the simulator, but some stubs are provided to allow tests to import correctly. ''' def device_memset(dst, val, size, stream=0): dst.view('u1')[:size].fill(bytes([val])[0]) def host_to_device(dst, src, size, stream=0): dst.view('u1')[:size] = src.view('u1')[:size] def device_to_host(dst, src, size, stream=0): host_to_device(dst, src, size) def device_memory_size(obj): return obj.itemsize * obj.size def device_to_device(dst, src, size, stream=0): host_to_device(dst, src, size) class FakeDriver(object): def get_device_count(self): return 1 driver = FakeDriver() Linker = None class LinkerError(RuntimeError): pass class CudaAPIError(RuntimeError): pass def launch_kernel(*args, **kwargs): msg = 'Launching kernels directly is not supported in the simulator' raise RuntimeError(msg) CUDA: Fix simulator by adding missing USE_NV_BINDING to simulator
''' Most of the driver API is unsupported in the simulator, but some stubs are provided to allow tests to import correctly. ''' def device_memset(dst, val, size, stream=0): dst.view('u1')[:size].fill(bytes([val])[0]) def host_to_device(dst, src, size, stream=0): dst.view('u1')[:size] = src.view('u1')[:size] def device_to_host(dst, src, size, stream=0): host_to_device(dst, src, size) def device_memory_size(obj): return obj.itemsize * obj.size def device_to_device(dst, src, size, stream=0): host_to_device(dst, src, size) class FakeDriver(object): def get_device_count(self): return 1 driver = FakeDriver() Linker = None class LinkerError(RuntimeError): pass class CudaAPIError(RuntimeError): pass def launch_kernel(*args, **kwargs): msg = 'Launching kernels directly is not supported in the simulator' raise RuntimeError(msg) USE_NV_BINDING = False
<commit_before>''' Most of the driver API is unsupported in the simulator, but some stubs are provided to allow tests to import correctly. ''' def device_memset(dst, val, size, stream=0): dst.view('u1')[:size].fill(bytes([val])[0]) def host_to_device(dst, src, size, stream=0): dst.view('u1')[:size] = src.view('u1')[:size] def device_to_host(dst, src, size, stream=0): host_to_device(dst, src, size) def device_memory_size(obj): return obj.itemsize * obj.size def device_to_device(dst, src, size, stream=0): host_to_device(dst, src, size) class FakeDriver(object): def get_device_count(self): return 1 driver = FakeDriver() Linker = None class LinkerError(RuntimeError): pass class CudaAPIError(RuntimeError): pass def launch_kernel(*args, **kwargs): msg = 'Launching kernels directly is not supported in the simulator' raise RuntimeError(msg) <commit_msg>CUDA: Fix simulator by adding missing USE_NV_BINDING to simulator<commit_after>
''' Most of the driver API is unsupported in the simulator, but some stubs are provided to allow tests to import correctly. ''' def device_memset(dst, val, size, stream=0): dst.view('u1')[:size].fill(bytes([val])[0]) def host_to_device(dst, src, size, stream=0): dst.view('u1')[:size] = src.view('u1')[:size] def device_to_host(dst, src, size, stream=0): host_to_device(dst, src, size) def device_memory_size(obj): return obj.itemsize * obj.size def device_to_device(dst, src, size, stream=0): host_to_device(dst, src, size) class FakeDriver(object): def get_device_count(self): return 1 driver = FakeDriver() Linker = None class LinkerError(RuntimeError): pass class CudaAPIError(RuntimeError): pass def launch_kernel(*args, **kwargs): msg = 'Launching kernels directly is not supported in the simulator' raise RuntimeError(msg) USE_NV_BINDING = False
''' Most of the driver API is unsupported in the simulator, but some stubs are provided to allow tests to import correctly. ''' def device_memset(dst, val, size, stream=0): dst.view('u1')[:size].fill(bytes([val])[0]) def host_to_device(dst, src, size, stream=0): dst.view('u1')[:size] = src.view('u1')[:size] def device_to_host(dst, src, size, stream=0): host_to_device(dst, src, size) def device_memory_size(obj): return obj.itemsize * obj.size def device_to_device(dst, src, size, stream=0): host_to_device(dst, src, size) class FakeDriver(object): def get_device_count(self): return 1 driver = FakeDriver() Linker = None class LinkerError(RuntimeError): pass class CudaAPIError(RuntimeError): pass def launch_kernel(*args, **kwargs): msg = 'Launching kernels directly is not supported in the simulator' raise RuntimeError(msg) CUDA: Fix simulator by adding missing USE_NV_BINDING to simulator''' Most of the driver API is unsupported in the simulator, but some stubs are provided to allow tests to import correctly. ''' def device_memset(dst, val, size, stream=0): dst.view('u1')[:size].fill(bytes([val])[0]) def host_to_device(dst, src, size, stream=0): dst.view('u1')[:size] = src.view('u1')[:size] def device_to_host(dst, src, size, stream=0): host_to_device(dst, src, size) def device_memory_size(obj): return obj.itemsize * obj.size def device_to_device(dst, src, size, stream=0): host_to_device(dst, src, size) class FakeDriver(object): def get_device_count(self): return 1 driver = FakeDriver() Linker = None class LinkerError(RuntimeError): pass class CudaAPIError(RuntimeError): pass def launch_kernel(*args, **kwargs): msg = 'Launching kernels directly is not supported in the simulator' raise RuntimeError(msg) USE_NV_BINDING = False
<commit_before>''' Most of the driver API is unsupported in the simulator, but some stubs are provided to allow tests to import correctly. ''' def device_memset(dst, val, size, stream=0): dst.view('u1')[:size].fill(bytes([val])[0]) def host_to_device(dst, src, size, stream=0): dst.view('u1')[:size] = src.view('u1')[:size] def device_to_host(dst, src, size, stream=0): host_to_device(dst, src, size) def device_memory_size(obj): return obj.itemsize * obj.size def device_to_device(dst, src, size, stream=0): host_to_device(dst, src, size) class FakeDriver(object): def get_device_count(self): return 1 driver = FakeDriver() Linker = None class LinkerError(RuntimeError): pass class CudaAPIError(RuntimeError): pass def launch_kernel(*args, **kwargs): msg = 'Launching kernels directly is not supported in the simulator' raise RuntimeError(msg) <commit_msg>CUDA: Fix simulator by adding missing USE_NV_BINDING to simulator<commit_after>''' Most of the driver API is unsupported in the simulator, but some stubs are provided to allow tests to import correctly. ''' def device_memset(dst, val, size, stream=0): dst.view('u1')[:size].fill(bytes([val])[0]) def host_to_device(dst, src, size, stream=0): dst.view('u1')[:size] = src.view('u1')[:size] def device_to_host(dst, src, size, stream=0): host_to_device(dst, src, size) def device_memory_size(obj): return obj.itemsize * obj.size def device_to_device(dst, src, size, stream=0): host_to_device(dst, src, size) class FakeDriver(object): def get_device_count(self): return 1 driver = FakeDriver() Linker = None class LinkerError(RuntimeError): pass class CudaAPIError(RuntimeError): pass def launch_kernel(*args, **kwargs): msg = 'Launching kernels directly is not supported in the simulator' raise RuntimeError(msg) USE_NV_BINDING = False
7865d7a37562be8b0af9b3668043d8c08138814b
examples/get_each_args.py
examples/get_each_args.py
#! /usr/bin/env python # -*- coding: utf-8 -*- from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all)
#! /usr/bin/env python # -*- coding: utf-8 -*- import sys import os sys.path.insert(0, os.path.abspath('..')) from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all)
Add clint to import paths
Add clint to import paths
Python
isc
kennethreitz/clint
#! /usr/bin/env python # -*- coding: utf-8 -*- from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all) Add clint to import paths
#! /usr/bin/env python # -*- coding: utf-8 -*- import sys import os sys.path.insert(0, os.path.abspath('..')) from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all)
<commit_before>#! /usr/bin/env python # -*- coding: utf-8 -*- from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all) <commit_msg>Add clint to import paths<commit_after>
#! /usr/bin/env python # -*- coding: utf-8 -*- import sys import os sys.path.insert(0, os.path.abspath('..')) from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all)
#! /usr/bin/env python # -*- coding: utf-8 -*- from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all) Add clint to import paths#! /usr/bin/env python # -*- coding: utf-8 -*- import sys import os sys.path.insert(0, os.path.abspath('..')) from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all)
<commit_before>#! /usr/bin/env python # -*- coding: utf-8 -*- from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all) <commit_msg>Add clint to import paths<commit_after>#! /usr/bin/env python # -*- coding: utf-8 -*- import sys import os sys.path.insert(0, os.path.abspath('..')) from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all)
19ea9c2078ffb506bcab9f175e4275577901c599
bulbs/indexable/management/commands/synces.py
bulbs/indexable/management/commands/synces.py
from django.core.management.base import NoArgsCommand from elasticutils import get_es from pyelasticsearch.exceptions import IndexAlreadyExistsError, ElasticHttpError from bulbs.indexable.conf import settings from bulbs.indexable.models import polymorphic_indexable_registry class Command(NoArgsCommand): help = 'Creates indexes and mappings for for Indexable objects.' def handle(self, *args, **options): indexes = {} for name, model in polymorphic_indexable_registry.all_models.items(): index = model.get_index_name() if index not in indexes: indexes[index] = {} indexes[index].update(model.get_mapping()) es = get_es(urls=settings.ES_URLS) for index, mappings in indexes.items(): try: es.create_index(index, settings={ "mappings": mappings, "settings": settings.ES_SETTINGS }) except IndexAlreadyExistsError: pass except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) for doctype, mapping in mappings.items(): try: es.put_mapping(index, doctype, dict(doctype=mapping)) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error)
from django.core.management.base import NoArgsCommand from elasticutils import get_es from pyelasticsearch.exceptions import IndexAlreadyExistsError, ElasticHttpError from bulbs.indexable.conf import settings from bulbs.indexable.models import polymorphic_indexable_registry class Command(NoArgsCommand): help = 'Creates indexes and mappings for for Indexable objects.' def handle(self, *args, **options): indexes = {} for name, model in polymorphic_indexable_registry.all_models.items(): index = model.get_index_name() if index not in indexes: indexes[index] = {} indexes[index].update(model.get_mapping()) es = get_es(urls=settings.ES_URLS) for index, mappings in indexes.items(): try: es.create_index(index, settings={ "settings": settings.ES_SETTINGS }) except IndexAlreadyExistsError: es.update_settings(index, settings.ES_SETTINGS) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) for doctype, mapping in mappings.items(): try: es.put_mapping(index, doctype, dict(doctype=mapping)) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error)
Create index mappings independently of index, just update settings?
Create index mappings independently of index, just update settings?
Python
mit
theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,pombredanne/django-bulbs,theonion/django-bulbs,pombredanne/django-bulbs,theonion/django-bulbs
from django.core.management.base import NoArgsCommand from elasticutils import get_es from pyelasticsearch.exceptions import IndexAlreadyExistsError, ElasticHttpError from bulbs.indexable.conf import settings from bulbs.indexable.models import polymorphic_indexable_registry class Command(NoArgsCommand): help = 'Creates indexes and mappings for for Indexable objects.' def handle(self, *args, **options): indexes = {} for name, model in polymorphic_indexable_registry.all_models.items(): index = model.get_index_name() if index not in indexes: indexes[index] = {} indexes[index].update(model.get_mapping()) es = get_es(urls=settings.ES_URLS) for index, mappings in indexes.items(): try: es.create_index(index, settings={ "mappings": mappings, "settings": settings.ES_SETTINGS }) except IndexAlreadyExistsError: pass except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) for doctype, mapping in mappings.items(): try: es.put_mapping(index, doctype, dict(doctype=mapping)) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) Create index mappings independently of index, just update settings?
from django.core.management.base import NoArgsCommand from elasticutils import get_es from pyelasticsearch.exceptions import IndexAlreadyExistsError, ElasticHttpError from bulbs.indexable.conf import settings from bulbs.indexable.models import polymorphic_indexable_registry class Command(NoArgsCommand): help = 'Creates indexes and mappings for for Indexable objects.' def handle(self, *args, **options): indexes = {} for name, model in polymorphic_indexable_registry.all_models.items(): index = model.get_index_name() if index not in indexes: indexes[index] = {} indexes[index].update(model.get_mapping()) es = get_es(urls=settings.ES_URLS) for index, mappings in indexes.items(): try: es.create_index(index, settings={ "settings": settings.ES_SETTINGS }) except IndexAlreadyExistsError: es.update_settings(index, settings.ES_SETTINGS) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) for doctype, mapping in mappings.items(): try: es.put_mapping(index, doctype, dict(doctype=mapping)) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error)
<commit_before>from django.core.management.base import NoArgsCommand from elasticutils import get_es from pyelasticsearch.exceptions import IndexAlreadyExistsError, ElasticHttpError from bulbs.indexable.conf import settings from bulbs.indexable.models import polymorphic_indexable_registry class Command(NoArgsCommand): help = 'Creates indexes and mappings for for Indexable objects.' def handle(self, *args, **options): indexes = {} for name, model in polymorphic_indexable_registry.all_models.items(): index = model.get_index_name() if index not in indexes: indexes[index] = {} indexes[index].update(model.get_mapping()) es = get_es(urls=settings.ES_URLS) for index, mappings in indexes.items(): try: es.create_index(index, settings={ "mappings": mappings, "settings": settings.ES_SETTINGS }) except IndexAlreadyExistsError: pass except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) for doctype, mapping in mappings.items(): try: es.put_mapping(index, doctype, dict(doctype=mapping)) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) <commit_msg>Create index mappings independently of index, just update settings?<commit_after>
from django.core.management.base import NoArgsCommand from elasticutils import get_es from pyelasticsearch.exceptions import IndexAlreadyExistsError, ElasticHttpError from bulbs.indexable.conf import settings from bulbs.indexable.models import polymorphic_indexable_registry class Command(NoArgsCommand): help = 'Creates indexes and mappings for for Indexable objects.' def handle(self, *args, **options): indexes = {} for name, model in polymorphic_indexable_registry.all_models.items(): index = model.get_index_name() if index not in indexes: indexes[index] = {} indexes[index].update(model.get_mapping()) es = get_es(urls=settings.ES_URLS) for index, mappings in indexes.items(): try: es.create_index(index, settings={ "settings": settings.ES_SETTINGS }) except IndexAlreadyExistsError: es.update_settings(index, settings.ES_SETTINGS) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) for doctype, mapping in mappings.items(): try: es.put_mapping(index, doctype, dict(doctype=mapping)) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error)
from django.core.management.base import NoArgsCommand from elasticutils import get_es from pyelasticsearch.exceptions import IndexAlreadyExistsError, ElasticHttpError from bulbs.indexable.conf import settings from bulbs.indexable.models import polymorphic_indexable_registry class Command(NoArgsCommand): help = 'Creates indexes and mappings for for Indexable objects.' def handle(self, *args, **options): indexes = {} for name, model in polymorphic_indexable_registry.all_models.items(): index = model.get_index_name() if index not in indexes: indexes[index] = {} indexes[index].update(model.get_mapping()) es = get_es(urls=settings.ES_URLS) for index, mappings in indexes.items(): try: es.create_index(index, settings={ "mappings": mappings, "settings": settings.ES_SETTINGS }) except IndexAlreadyExistsError: pass except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) for doctype, mapping in mappings.items(): try: es.put_mapping(index, doctype, dict(doctype=mapping)) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) Create index mappings independently of index, just update settings?from django.core.management.base import NoArgsCommand from elasticutils import get_es from pyelasticsearch.exceptions import IndexAlreadyExistsError, ElasticHttpError from bulbs.indexable.conf import settings from bulbs.indexable.models import polymorphic_indexable_registry class Command(NoArgsCommand): help = 'Creates indexes and mappings for for Indexable objects.' def handle(self, *args, **options): indexes = {} for name, model in polymorphic_indexable_registry.all_models.items(): index = model.get_index_name() if index not in indexes: indexes[index] = {} indexes[index].update(model.get_mapping()) es = get_es(urls=settings.ES_URLS) for index, mappings in indexes.items(): try: es.create_index(index, settings={ "settings": settings.ES_SETTINGS }) except IndexAlreadyExistsError: es.update_settings(index, settings.ES_SETTINGS) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) for doctype, mapping in mappings.items(): try: es.put_mapping(index, doctype, dict(doctype=mapping)) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error)
<commit_before>from django.core.management.base import NoArgsCommand from elasticutils import get_es from pyelasticsearch.exceptions import IndexAlreadyExistsError, ElasticHttpError from bulbs.indexable.conf import settings from bulbs.indexable.models import polymorphic_indexable_registry class Command(NoArgsCommand): help = 'Creates indexes and mappings for for Indexable objects.' def handle(self, *args, **options): indexes = {} for name, model in polymorphic_indexable_registry.all_models.items(): index = model.get_index_name() if index not in indexes: indexes[index] = {} indexes[index].update(model.get_mapping()) es = get_es(urls=settings.ES_URLS) for index, mappings in indexes.items(): try: es.create_index(index, settings={ "mappings": mappings, "settings": settings.ES_SETTINGS }) except IndexAlreadyExistsError: pass except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) for doctype, mapping in mappings.items(): try: es.put_mapping(index, doctype, dict(doctype=mapping)) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) <commit_msg>Create index mappings independently of index, just update settings?<commit_after>from django.core.management.base import NoArgsCommand from elasticutils import get_es from pyelasticsearch.exceptions import IndexAlreadyExistsError, ElasticHttpError from bulbs.indexable.conf import settings from bulbs.indexable.models import polymorphic_indexable_registry class Command(NoArgsCommand): help = 'Creates indexes and mappings for for Indexable objects.' def handle(self, *args, **options): indexes = {} for name, model in polymorphic_indexable_registry.all_models.items(): index = model.get_index_name() if index not in indexes: indexes[index] = {} indexes[index].update(model.get_mapping()) es = get_es(urls=settings.ES_URLS) for index, mappings in indexes.items(): try: es.create_index(index, settings={ "settings": settings.ES_SETTINGS }) except IndexAlreadyExistsError: es.update_settings(index, settings.ES_SETTINGS) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error) for doctype, mapping in mappings.items(): try: es.put_mapping(index, doctype, dict(doctype=mapping)) except ElasticHttpError as e: self.stderr.write("ES Error: %s" % e.error)
55290d237b851c9a245b710fb8b85c4d9a0b9388
addons/base_action_rule/migrations/8.0.1.0/post-migration.py
addons/base_action_rule/migrations/8.0.1.0/post-migration.py
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 HBEE (http://www.hbee.eu) # @author: Paulius Sladkevičius <paulius@hbee.eu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): cr.execute( "UPDATE base_action_rule SET kind = 'on_create_or_write', " "filter_pre_id = null, trg_date_id = null, trg_date_range = null, " "trg_date_range_type = null" )
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 HBEE (http://www.hbee.eu) # @author: Paulius Sladkevičius <paulius@hbee.eu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): cr.execute( "UPDATE base_action_rule SET kind = 'on_create_or_write', " "filter_pre_id = null, trg_date_id = null, trg_date_range = null, " "trg_date_range_type = null where trg_date_id = null" ) cr.exceute( "UPDATE base_action_rule SET kind = 'on_time', " "filter_pre_id = null where trg_date_id != null" )
Set action rules kind to 'on_time'
Set action rules kind to 'on_time'
Python
agpl-3.0
0k/OpenUpgrade,0k/OpenUpgrade,hifly/OpenUpgrade,Endika/OpenUpgrade,0k/OpenUpgrade,pedrobaeza/OpenUpgrade,bwrsandman/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,grap/OpenUpgrade,pedrobaeza/OpenUpgrade,grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,damdam-s/OpenUpgrade,mvaled/OpenUpgrade,mvaled/OpenUpgrade,Endika/OpenUpgrade,pedrobaeza/OpenUpgrade,damdam-s/OpenUpgrade,damdam-s/OpenUpgrade,sebalix/OpenUpgrade,Endika/OpenUpgrade,sebalix/OpenUpgrade,blaggacao/OpenUpgrade,hifly/OpenUpgrade,kirca/OpenUpgrade,pedrobaeza/OpenUpgrade,mvaled/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade,bwrsandman/OpenUpgrade,csrocha/OpenUpgrade,grap/OpenUpgrade,mvaled/OpenUpgrade,kirca/OpenUpgrade,kirca/OpenUpgrade,bwrsandman/OpenUpgrade,blaggacao/OpenUpgrade,sebalix/OpenUpgrade,kirca/OpenUpgrade,sebalix/OpenUpgrade,mvaled/OpenUpgrade,sebalix/OpenUpgrade,csrocha/OpenUpgrade,0k/OpenUpgrade,csrocha/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,grap/OpenUpgrade,damdam-s/OpenUpgrade,sebalix/OpenUpgrade,blaggacao/OpenUpgrade,damdam-s/OpenUpgrade,0k/OpenUpgrade,csrocha/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,hifly/OpenUpgrade,damdam-s/OpenUpgrade,mvaled/OpenUpgrade,csrocha/OpenUpgrade,damdam-s/OpenUpgrade,bwrsandman/OpenUpgrade,bwrsandman/OpenUpgrade,kirca/OpenUpgrade,blaggacao/OpenUpgrade,pedrobaeza/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,grap/OpenUpgrade,grap/OpenUpgrade,pedrobaeza/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,kirca/OpenUpgrade,Endika/OpenUpgrade,OpenUpgrade/OpenUpgrade,mvaled/OpenUpgrade,OpenUpgrade/OpenUpgrade,bwrsandman/OpenUpgrade,blaggacao/OpenUpgrade,csrocha/OpenUpgrade,OpenUpgrade/OpenUpgrade,pedrobaeza/OpenUpgrade,kirca/OpenUpgrade,bwrsandman/OpenUpgrade,csrocha/OpenUpgrade,0k/OpenUpgrade,OpenUpgrade/OpenUpgrade,hifly/OpenUpgrade,hifly/OpenUpgrade,blaggacao/OpenUpgrade,sebalix/OpenUpgrade,hifly/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,hifly/OpenUpgrade,blaggacao/OpenUpgrade
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 HBEE (http://www.hbee.eu) # @author: Paulius Sladkevičius <paulius@hbee.eu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): cr.execute( "UPDATE base_action_rule SET kind = 'on_create_or_write', " "filter_pre_id = null, trg_date_id = null, trg_date_range = null, " "trg_date_range_type = null" ) Set action rules kind to 'on_time'
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 HBEE (http://www.hbee.eu) # @author: Paulius Sladkevičius <paulius@hbee.eu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): cr.execute( "UPDATE base_action_rule SET kind = 'on_create_or_write', " "filter_pre_id = null, trg_date_id = null, trg_date_range = null, " "trg_date_range_type = null where trg_date_id = null" ) cr.exceute( "UPDATE base_action_rule SET kind = 'on_time', " "filter_pre_id = null where trg_date_id != null" )
<commit_before># -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 HBEE (http://www.hbee.eu) # @author: Paulius Sladkevičius <paulius@hbee.eu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): cr.execute( "UPDATE base_action_rule SET kind = 'on_create_or_write', " "filter_pre_id = null, trg_date_id = null, trg_date_range = null, " "trg_date_range_type = null" ) <commit_msg>Set action rules kind to 'on_time'<commit_after>
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 HBEE (http://www.hbee.eu) # @author: Paulius Sladkevičius <paulius@hbee.eu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): cr.execute( "UPDATE base_action_rule SET kind = 'on_create_or_write', " "filter_pre_id = null, trg_date_id = null, trg_date_range = null, " "trg_date_range_type = null where trg_date_id = null" ) cr.exceute( "UPDATE base_action_rule SET kind = 'on_time', " "filter_pre_id = null where trg_date_id != null" )
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 HBEE (http://www.hbee.eu) # @author: Paulius Sladkevičius <paulius@hbee.eu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): cr.execute( "UPDATE base_action_rule SET kind = 'on_create_or_write', " "filter_pre_id = null, trg_date_id = null, trg_date_range = null, " "trg_date_range_type = null" ) Set action rules kind to 'on_time'# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 HBEE (http://www.hbee.eu) # @author: Paulius Sladkevičius <paulius@hbee.eu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): cr.execute( "UPDATE base_action_rule SET kind = 'on_create_or_write', " "filter_pre_id = null, trg_date_id = null, trg_date_range = null, " "trg_date_range_type = null where trg_date_id = null" ) cr.exceute( "UPDATE base_action_rule SET kind = 'on_time', " "filter_pre_id = null where trg_date_id != null" )
<commit_before># -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 HBEE (http://www.hbee.eu) # @author: Paulius Sladkevičius <paulius@hbee.eu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): cr.execute( "UPDATE base_action_rule SET kind = 'on_create_or_write', " "filter_pre_id = null, trg_date_id = null, trg_date_range = null, " "trg_date_range_type = null" ) <commit_msg>Set action rules kind to 'on_time'<commit_after># -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 HBEE (http://www.hbee.eu) # @author: Paulius Sladkevičius <paulius@hbee.eu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): cr.execute( "UPDATE base_action_rule SET kind = 'on_create_or_write', " "filter_pre_id = null, trg_date_id = null, trg_date_range = null, " "trg_date_range_type = null where trg_date_id = null" ) cr.exceute( "UPDATE base_action_rule SET kind = 'on_time', " "filter_pre_id = null where trg_date_id != null" )
c78e1e7aff407355d26c12d06663a7dfb95314b1
accelerator/tests/contexts/analyze_judging_context.py
accelerator/tests/contexts/analyze_judging_context.py
from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options]
from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts])
Add needed_reads method to avoid magic numbers
Add needed_reads method to avoid magic numbers
Python
mit
masschallenge/django-accelerator,masschallenge/django-accelerator
from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] Add needed_reads method to avoid magic numbers
from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts])
<commit_before>from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] <commit_msg>Add needed_reads method to avoid magic numbers<commit_after>
from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts])
from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] Add needed_reads method to avoid magic numbersfrom accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts])
<commit_before>from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] <commit_msg>Add needed_reads method to avoid magic numbers<commit_after>from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts])
e69559b81e9b52eb0834df67b0197aa0f734db3c
wafer/talks/admin.py
wafer/talks/admin.py
from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') admin.site.register(Talk, TalkAdmin)
from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') list_editable = ('status',) admin.site.register(Talk, TalkAdmin)
Make talk status editale from the talk list overview
Make talk status editale from the talk list overview
Python
isc
CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer
from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') admin.site.register(Talk, TalkAdmin) Make talk status editale from the talk list overview
from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') list_editable = ('status',) admin.site.register(Talk, TalkAdmin)
<commit_before>from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') admin.site.register(Talk, TalkAdmin) <commit_msg>Make talk status editale from the talk list overview<commit_after>
from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') list_editable = ('status',) admin.site.register(Talk, TalkAdmin)
from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') admin.site.register(Talk, TalkAdmin) Make talk status editale from the talk list overviewfrom django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') list_editable = ('status',) admin.site.register(Talk, TalkAdmin)
<commit_before>from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') admin.site.register(Talk, TalkAdmin) <commit_msg>Make talk status editale from the talk list overview<commit_after>from django.contrib import admin from wafer.talks.models import Talk class TalkAdmin(admin.ModelAdmin): list_display = ('corresponding_author', 'title', 'status') list_editable = ('status',) admin.site.register(Talk, TalkAdmin)
49bd893340f456f18101fb2fec7d66026326f401
bot/action/extra/messages/stored_message.py
bot/action/extra/messages/stored_message.py
import json from bot.action.core.command import UnderscoredCommandBuilder from bot.action.extra.messages import analyzer from bot.api.domain import ApiObject class StoredMessage: def __init__(self, message_id, message, *edited_messages): self.message_id = message_id self.message = message self.edited_messages = edited_messages @property def user_id(self): return self.message.from_ def printable_info(self, event, user_storage_handler): show_command = UnderscoredCommandBuilder.build_command(event.command, self.message_id) return analyzer.get_short_info(user_storage_handler, self, show_command) def printable_full_message(self, user_storage_handler): return analyzer.get_full_content(user_storage_handler, self) @staticmethod def deserialize(message_id, data): messages = [] for line in data.splitlines(): message_data = json.loads(line) message = ApiObject.wrap_api_object(message_data) messages.append(message) return StoredMessage(message_id, *messages)
import json from bot.action.core.command import UnderscoredCommandBuilder from bot.action.extra.messages import analyzer, StoredMessageMapper from bot.api.domain import ApiObject class StoredMessage: def __init__(self, message_id, message, *edited_messages, incomplete=False): self.message_id = message_id self.message = message self.edited_messages = edited_messages self.incomplete = incomplete @property def user_id(self): return self.message.from_ def printable_info(self, event, user_storage_handler): show_command = UnderscoredCommandBuilder.build_command(event.command, self.message_id) return analyzer.get_short_info(user_storage_handler, self, show_command) def printable_full_message(self, user_storage_handler): return analyzer.get_full_content(user_storage_handler, self) @staticmethod def deserialize(message_id, data): messages = [] for line in data.splitlines(): message_data = json.loads(line) message = ApiObject.wrap_api_object(message_data) messages.append(message) return StoredMessage(message_id, *messages) @staticmethod def from_message(message): message_id = message.message_id data = StoredMessageMapper.from_api(message).map().to_data() mapped_message = ApiObject.wrap_api_object(data) return StoredMessage(message_id, mapped_message, incomplete=True)
Add a from_message static method to StoredMessage, also an incomplete field
Add a from_message static method to StoredMessage, also an incomplete field
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
import json from bot.action.core.command import UnderscoredCommandBuilder from bot.action.extra.messages import analyzer from bot.api.domain import ApiObject class StoredMessage: def __init__(self, message_id, message, *edited_messages): self.message_id = message_id self.message = message self.edited_messages = edited_messages @property def user_id(self): return self.message.from_ def printable_info(self, event, user_storage_handler): show_command = UnderscoredCommandBuilder.build_command(event.command, self.message_id) return analyzer.get_short_info(user_storage_handler, self, show_command) def printable_full_message(self, user_storage_handler): return analyzer.get_full_content(user_storage_handler, self) @staticmethod def deserialize(message_id, data): messages = [] for line in data.splitlines(): message_data = json.loads(line) message = ApiObject.wrap_api_object(message_data) messages.append(message) return StoredMessage(message_id, *messages) Add a from_message static method to StoredMessage, also an incomplete field
import json from bot.action.core.command import UnderscoredCommandBuilder from bot.action.extra.messages import analyzer, StoredMessageMapper from bot.api.domain import ApiObject class StoredMessage: def __init__(self, message_id, message, *edited_messages, incomplete=False): self.message_id = message_id self.message = message self.edited_messages = edited_messages self.incomplete = incomplete @property def user_id(self): return self.message.from_ def printable_info(self, event, user_storage_handler): show_command = UnderscoredCommandBuilder.build_command(event.command, self.message_id) return analyzer.get_short_info(user_storage_handler, self, show_command) def printable_full_message(self, user_storage_handler): return analyzer.get_full_content(user_storage_handler, self) @staticmethod def deserialize(message_id, data): messages = [] for line in data.splitlines(): message_data = json.loads(line) message = ApiObject.wrap_api_object(message_data) messages.append(message) return StoredMessage(message_id, *messages) @staticmethod def from_message(message): message_id = message.message_id data = StoredMessageMapper.from_api(message).map().to_data() mapped_message = ApiObject.wrap_api_object(data) return StoredMessage(message_id, mapped_message, incomplete=True)
<commit_before>import json from bot.action.core.command import UnderscoredCommandBuilder from bot.action.extra.messages import analyzer from bot.api.domain import ApiObject class StoredMessage: def __init__(self, message_id, message, *edited_messages): self.message_id = message_id self.message = message self.edited_messages = edited_messages @property def user_id(self): return self.message.from_ def printable_info(self, event, user_storage_handler): show_command = UnderscoredCommandBuilder.build_command(event.command, self.message_id) return analyzer.get_short_info(user_storage_handler, self, show_command) def printable_full_message(self, user_storage_handler): return analyzer.get_full_content(user_storage_handler, self) @staticmethod def deserialize(message_id, data): messages = [] for line in data.splitlines(): message_data = json.loads(line) message = ApiObject.wrap_api_object(message_data) messages.append(message) return StoredMessage(message_id, *messages) <commit_msg>Add a from_message static method to StoredMessage, also an incomplete field<commit_after>
import json from bot.action.core.command import UnderscoredCommandBuilder from bot.action.extra.messages import analyzer, StoredMessageMapper from bot.api.domain import ApiObject class StoredMessage: def __init__(self, message_id, message, *edited_messages, incomplete=False): self.message_id = message_id self.message = message self.edited_messages = edited_messages self.incomplete = incomplete @property def user_id(self): return self.message.from_ def printable_info(self, event, user_storage_handler): show_command = UnderscoredCommandBuilder.build_command(event.command, self.message_id) return analyzer.get_short_info(user_storage_handler, self, show_command) def printable_full_message(self, user_storage_handler): return analyzer.get_full_content(user_storage_handler, self) @staticmethod def deserialize(message_id, data): messages = [] for line in data.splitlines(): message_data = json.loads(line) message = ApiObject.wrap_api_object(message_data) messages.append(message) return StoredMessage(message_id, *messages) @staticmethod def from_message(message): message_id = message.message_id data = StoredMessageMapper.from_api(message).map().to_data() mapped_message = ApiObject.wrap_api_object(data) return StoredMessage(message_id, mapped_message, incomplete=True)
import json from bot.action.core.command import UnderscoredCommandBuilder from bot.action.extra.messages import analyzer from bot.api.domain import ApiObject class StoredMessage: def __init__(self, message_id, message, *edited_messages): self.message_id = message_id self.message = message self.edited_messages = edited_messages @property def user_id(self): return self.message.from_ def printable_info(self, event, user_storage_handler): show_command = UnderscoredCommandBuilder.build_command(event.command, self.message_id) return analyzer.get_short_info(user_storage_handler, self, show_command) def printable_full_message(self, user_storage_handler): return analyzer.get_full_content(user_storage_handler, self) @staticmethod def deserialize(message_id, data): messages = [] for line in data.splitlines(): message_data = json.loads(line) message = ApiObject.wrap_api_object(message_data) messages.append(message) return StoredMessage(message_id, *messages) Add a from_message static method to StoredMessage, also an incomplete fieldimport json from bot.action.core.command import UnderscoredCommandBuilder from bot.action.extra.messages import analyzer, StoredMessageMapper from bot.api.domain import ApiObject class StoredMessage: def __init__(self, message_id, message, *edited_messages, incomplete=False): self.message_id = message_id self.message = message self.edited_messages = edited_messages self.incomplete = incomplete @property def user_id(self): return self.message.from_ def printable_info(self, event, user_storage_handler): show_command = UnderscoredCommandBuilder.build_command(event.command, self.message_id) return analyzer.get_short_info(user_storage_handler, self, show_command) def printable_full_message(self, user_storage_handler): return analyzer.get_full_content(user_storage_handler, self) @staticmethod def deserialize(message_id, data): messages = [] for line in data.splitlines(): message_data = json.loads(line) message = ApiObject.wrap_api_object(message_data) messages.append(message) return StoredMessage(message_id, *messages) @staticmethod def from_message(message): message_id = message.message_id data = StoredMessageMapper.from_api(message).map().to_data() mapped_message = ApiObject.wrap_api_object(data) return StoredMessage(message_id, mapped_message, incomplete=True)
<commit_before>import json from bot.action.core.command import UnderscoredCommandBuilder from bot.action.extra.messages import analyzer from bot.api.domain import ApiObject class StoredMessage: def __init__(self, message_id, message, *edited_messages): self.message_id = message_id self.message = message self.edited_messages = edited_messages @property def user_id(self): return self.message.from_ def printable_info(self, event, user_storage_handler): show_command = UnderscoredCommandBuilder.build_command(event.command, self.message_id) return analyzer.get_short_info(user_storage_handler, self, show_command) def printable_full_message(self, user_storage_handler): return analyzer.get_full_content(user_storage_handler, self) @staticmethod def deserialize(message_id, data): messages = [] for line in data.splitlines(): message_data = json.loads(line) message = ApiObject.wrap_api_object(message_data) messages.append(message) return StoredMessage(message_id, *messages) <commit_msg>Add a from_message static method to StoredMessage, also an incomplete field<commit_after>import json from bot.action.core.command import UnderscoredCommandBuilder from bot.action.extra.messages import analyzer, StoredMessageMapper from bot.api.domain import ApiObject class StoredMessage: def __init__(self, message_id, message, *edited_messages, incomplete=False): self.message_id = message_id self.message = message self.edited_messages = edited_messages self.incomplete = incomplete @property def user_id(self): return self.message.from_ def printable_info(self, event, user_storage_handler): show_command = UnderscoredCommandBuilder.build_command(event.command, self.message_id) return analyzer.get_short_info(user_storage_handler, self, show_command) def printable_full_message(self, user_storage_handler): return analyzer.get_full_content(user_storage_handler, self) @staticmethod def deserialize(message_id, data): messages = [] for line in data.splitlines(): message_data = json.loads(line) message = ApiObject.wrap_api_object(message_data) messages.append(message) return StoredMessage(message_id, *messages) @staticmethod def from_message(message): message_id = message.message_id data = StoredMessageMapper.from_api(message).map().to_data() mapped_message = ApiObject.wrap_api_object(data) return StoredMessage(message_id, mapped_message, incomplete=True)
ec75d49f1b83a03b57683b622b68a6af2200a567
onitu/api/router.py
onitu/api/router.py
from threading import Thread import zmq from logbook import Logger class Router(Thread): """Thread waiting for a request by another Driver and responding to it with the chunked asked. """ def __init__(self, name, redis, get_chunk): super(Router, self).__init__() self.name = name self.redis = redis self.get_chunk = get_chunk self.router = None self.logger = Logger("{} - Router".format(self.name)) self.context = zmq.Context.instance() def run(self): self.router = self.context.socket(zmq.ROUTER) port = self.router.bind_to_random_port('tcp://*') self.redis.set('drivers:{}:router'.format(self.name), port) while True: self.logger.info("Listening...") msg = self.router.recv_multipart() self._respond_to(*msg) def _respond_to(self, identity, filename, offset, size): """Calls the `get_chunk` handler defined by the Driver to get the chunk and send it to the addressee. """ self.logger.debug("Getting chunk of size {} from offset {} in {}" .format(size, offset, filename)) chunk = self.get_chunk(filename, int(offset), int(size)) self.router.send_multipart((identity, chunk)) self.logger.debug("Chunk sended")
from threading import Thread import zmq from logbook import Logger class Router(Thread): """Thread waiting for a request by another Driver and responding to it with the chunked asked. """ def __init__(self, name, redis, get_chunk): super(Router, self).__init__() self.name = name self.redis = redis self.get_chunk = get_chunk self.router = None self.logger = Logger("{} - Router".format(self.name)) self.context = zmq.Context.instance() def run(self): self.router = self.context.socket(zmq.ROUTER) port = self.router.bind_to_random_port('tcp://*') self.redis.set('drivers:{}:router'.format(self.name), port) while True: msg = self.router.recv_multipart() self._respond_to(*msg) def _respond_to(self, identity, filename, offset, size): """Calls the `get_chunk` handler defined by the Driver to get the chunk and send it to the addressee. """ self.logger.debug("Getting chunk of size {} from offset {} in {}" .format(size, offset, filename)) chunk = self.get_chunk(filename, int(offset), int(size)) self.router.send_multipart((identity, chunk))
Remove annoying logging in Router
Remove annoying logging in Router
Python
mit
onitu/onitu,onitu/onitu,onitu/onitu
from threading import Thread import zmq from logbook import Logger class Router(Thread): """Thread waiting for a request by another Driver and responding to it with the chunked asked. """ def __init__(self, name, redis, get_chunk): super(Router, self).__init__() self.name = name self.redis = redis self.get_chunk = get_chunk self.router = None self.logger = Logger("{} - Router".format(self.name)) self.context = zmq.Context.instance() def run(self): self.router = self.context.socket(zmq.ROUTER) port = self.router.bind_to_random_port('tcp://*') self.redis.set('drivers:{}:router'.format(self.name), port) while True: self.logger.info("Listening...") msg = self.router.recv_multipart() self._respond_to(*msg) def _respond_to(self, identity, filename, offset, size): """Calls the `get_chunk` handler defined by the Driver to get the chunk and send it to the addressee. """ self.logger.debug("Getting chunk of size {} from offset {} in {}" .format(size, offset, filename)) chunk = self.get_chunk(filename, int(offset), int(size)) self.router.send_multipart((identity, chunk)) self.logger.debug("Chunk sended") Remove annoying logging in Router
from threading import Thread import zmq from logbook import Logger class Router(Thread): """Thread waiting for a request by another Driver and responding to it with the chunked asked. """ def __init__(self, name, redis, get_chunk): super(Router, self).__init__() self.name = name self.redis = redis self.get_chunk = get_chunk self.router = None self.logger = Logger("{} - Router".format(self.name)) self.context = zmq.Context.instance() def run(self): self.router = self.context.socket(zmq.ROUTER) port = self.router.bind_to_random_port('tcp://*') self.redis.set('drivers:{}:router'.format(self.name), port) while True: msg = self.router.recv_multipart() self._respond_to(*msg) def _respond_to(self, identity, filename, offset, size): """Calls the `get_chunk` handler defined by the Driver to get the chunk and send it to the addressee. """ self.logger.debug("Getting chunk of size {} from offset {} in {}" .format(size, offset, filename)) chunk = self.get_chunk(filename, int(offset), int(size)) self.router.send_multipart((identity, chunk))
<commit_before>from threading import Thread import zmq from logbook import Logger class Router(Thread): """Thread waiting for a request by another Driver and responding to it with the chunked asked. """ def __init__(self, name, redis, get_chunk): super(Router, self).__init__() self.name = name self.redis = redis self.get_chunk = get_chunk self.router = None self.logger = Logger("{} - Router".format(self.name)) self.context = zmq.Context.instance() def run(self): self.router = self.context.socket(zmq.ROUTER) port = self.router.bind_to_random_port('tcp://*') self.redis.set('drivers:{}:router'.format(self.name), port) while True: self.logger.info("Listening...") msg = self.router.recv_multipart() self._respond_to(*msg) def _respond_to(self, identity, filename, offset, size): """Calls the `get_chunk` handler defined by the Driver to get the chunk and send it to the addressee. """ self.logger.debug("Getting chunk of size {} from offset {} in {}" .format(size, offset, filename)) chunk = self.get_chunk(filename, int(offset), int(size)) self.router.send_multipart((identity, chunk)) self.logger.debug("Chunk sended") <commit_msg>Remove annoying logging in Router<commit_after>
from threading import Thread import zmq from logbook import Logger class Router(Thread): """Thread waiting for a request by another Driver and responding to it with the chunked asked. """ def __init__(self, name, redis, get_chunk): super(Router, self).__init__() self.name = name self.redis = redis self.get_chunk = get_chunk self.router = None self.logger = Logger("{} - Router".format(self.name)) self.context = zmq.Context.instance() def run(self): self.router = self.context.socket(zmq.ROUTER) port = self.router.bind_to_random_port('tcp://*') self.redis.set('drivers:{}:router'.format(self.name), port) while True: msg = self.router.recv_multipart() self._respond_to(*msg) def _respond_to(self, identity, filename, offset, size): """Calls the `get_chunk` handler defined by the Driver to get the chunk and send it to the addressee. """ self.logger.debug("Getting chunk of size {} from offset {} in {}" .format(size, offset, filename)) chunk = self.get_chunk(filename, int(offset), int(size)) self.router.send_multipart((identity, chunk))
from threading import Thread import zmq from logbook import Logger class Router(Thread): """Thread waiting for a request by another Driver and responding to it with the chunked asked. """ def __init__(self, name, redis, get_chunk): super(Router, self).__init__() self.name = name self.redis = redis self.get_chunk = get_chunk self.router = None self.logger = Logger("{} - Router".format(self.name)) self.context = zmq.Context.instance() def run(self): self.router = self.context.socket(zmq.ROUTER) port = self.router.bind_to_random_port('tcp://*') self.redis.set('drivers:{}:router'.format(self.name), port) while True: self.logger.info("Listening...") msg = self.router.recv_multipart() self._respond_to(*msg) def _respond_to(self, identity, filename, offset, size): """Calls the `get_chunk` handler defined by the Driver to get the chunk and send it to the addressee. """ self.logger.debug("Getting chunk of size {} from offset {} in {}" .format(size, offset, filename)) chunk = self.get_chunk(filename, int(offset), int(size)) self.router.send_multipart((identity, chunk)) self.logger.debug("Chunk sended") Remove annoying logging in Routerfrom threading import Thread import zmq from logbook import Logger class Router(Thread): """Thread waiting for a request by another Driver and responding to it with the chunked asked. """ def __init__(self, name, redis, get_chunk): super(Router, self).__init__() self.name = name self.redis = redis self.get_chunk = get_chunk self.router = None self.logger = Logger("{} - Router".format(self.name)) self.context = zmq.Context.instance() def run(self): self.router = self.context.socket(zmq.ROUTER) port = self.router.bind_to_random_port('tcp://*') self.redis.set('drivers:{}:router'.format(self.name), port) while True: msg = self.router.recv_multipart() self._respond_to(*msg) def _respond_to(self, identity, filename, offset, size): """Calls the `get_chunk` handler defined by the Driver to get the chunk and send it to the addressee. """ self.logger.debug("Getting chunk of size {} from offset {} in {}" .format(size, offset, filename)) chunk = self.get_chunk(filename, int(offset), int(size)) self.router.send_multipart((identity, chunk))
<commit_before>from threading import Thread import zmq from logbook import Logger class Router(Thread): """Thread waiting for a request by another Driver and responding to it with the chunked asked. """ def __init__(self, name, redis, get_chunk): super(Router, self).__init__() self.name = name self.redis = redis self.get_chunk = get_chunk self.router = None self.logger = Logger("{} - Router".format(self.name)) self.context = zmq.Context.instance() def run(self): self.router = self.context.socket(zmq.ROUTER) port = self.router.bind_to_random_port('tcp://*') self.redis.set('drivers:{}:router'.format(self.name), port) while True: self.logger.info("Listening...") msg = self.router.recv_multipart() self._respond_to(*msg) def _respond_to(self, identity, filename, offset, size): """Calls the `get_chunk` handler defined by the Driver to get the chunk and send it to the addressee. """ self.logger.debug("Getting chunk of size {} from offset {} in {}" .format(size, offset, filename)) chunk = self.get_chunk(filename, int(offset), int(size)) self.router.send_multipart((identity, chunk)) self.logger.debug("Chunk sended") <commit_msg>Remove annoying logging in Router<commit_after>from threading import Thread import zmq from logbook import Logger class Router(Thread): """Thread waiting for a request by another Driver and responding to it with the chunked asked. """ def __init__(self, name, redis, get_chunk): super(Router, self).__init__() self.name = name self.redis = redis self.get_chunk = get_chunk self.router = None self.logger = Logger("{} - Router".format(self.name)) self.context = zmq.Context.instance() def run(self): self.router = self.context.socket(zmq.ROUTER) port = self.router.bind_to_random_port('tcp://*') self.redis.set('drivers:{}:router'.format(self.name), port) while True: msg = self.router.recv_multipart() self._respond_to(*msg) def _respond_to(self, identity, filename, offset, size): """Calls the `get_chunk` handler defined by the Driver to get the chunk and send it to the addressee. """ self.logger.debug("Getting chunk of size {} from offset {} in {}" .format(size, offset, filename)) chunk = self.get_chunk(filename, int(offset), int(size)) self.router.send_multipart((identity, chunk))
d14160537292c87f870fa8c7d99c253b61420dde
blazeweb/registry.py
blazeweb/registry.py
from paste.registry import StackedObjectProxy as PasteSOP class StackedObjectProxy(PasteSOP): # override b/c of # http://trac.pythonpaste.org/pythonpaste/ticket/482 def _pop_object(self, obj=None): """Remove a thread-local object. If ``obj`` is given, it is checked against the popped object and an error is emitted if they don't match. """ try: popped = self.____local__.objects.pop() if obj is not None and popped is not obj: raise AssertionError( 'The object popped (%s) is not the same as the object ' 'expected (%s)' % (popped, obj)) except AttributeError: raise AssertionError('No object has been registered for this thread')
from paste.registry import StackedObjectProxy as PasteSOP class StackedObjectProxy(PasteSOP): # override b/c of # http://trac.pythonpaste.org/pythonpaste/ticket/482 def _pop_object(self, obj=None): """Remove a thread-local object. If ``obj`` is given, it is checked against the popped object and an error is emitted if they don't match. """ try: popped = self.____local__.objects.pop() if obj is not None and popped is not obj: raise AssertionError( 'The object popped (%s) is not the same as the object ' 'expected (%s)' % (popped, obj)) except AttributeError: raise AssertionError('No object has been registered for this thread') def __bool__(self): return bool(self._current_obj())
Fix boolean conversion of StackedObjectProxy in Python 3
Fix boolean conversion of StackedObjectProxy in Python 3
Python
bsd-3-clause
level12/blazeweb,level12/blazeweb,level12/blazeweb
from paste.registry import StackedObjectProxy as PasteSOP class StackedObjectProxy(PasteSOP): # override b/c of # http://trac.pythonpaste.org/pythonpaste/ticket/482 def _pop_object(self, obj=None): """Remove a thread-local object. If ``obj`` is given, it is checked against the popped object and an error is emitted if they don't match. """ try: popped = self.____local__.objects.pop() if obj is not None and popped is not obj: raise AssertionError( 'The object popped (%s) is not the same as the object ' 'expected (%s)' % (popped, obj)) except AttributeError: raise AssertionError('No object has been registered for this thread') Fix boolean conversion of StackedObjectProxy in Python 3
from paste.registry import StackedObjectProxy as PasteSOP class StackedObjectProxy(PasteSOP): # override b/c of # http://trac.pythonpaste.org/pythonpaste/ticket/482 def _pop_object(self, obj=None): """Remove a thread-local object. If ``obj`` is given, it is checked against the popped object and an error is emitted if they don't match. """ try: popped = self.____local__.objects.pop() if obj is not None and popped is not obj: raise AssertionError( 'The object popped (%s) is not the same as the object ' 'expected (%s)' % (popped, obj)) except AttributeError: raise AssertionError('No object has been registered for this thread') def __bool__(self): return bool(self._current_obj())
<commit_before> from paste.registry import StackedObjectProxy as PasteSOP class StackedObjectProxy(PasteSOP): # override b/c of # http://trac.pythonpaste.org/pythonpaste/ticket/482 def _pop_object(self, obj=None): """Remove a thread-local object. If ``obj`` is given, it is checked against the popped object and an error is emitted if they don't match. """ try: popped = self.____local__.objects.pop() if obj is not None and popped is not obj: raise AssertionError( 'The object popped (%s) is not the same as the object ' 'expected (%s)' % (popped, obj)) except AttributeError: raise AssertionError('No object has been registered for this thread') <commit_msg>Fix boolean conversion of StackedObjectProxy in Python 3<commit_after>
from paste.registry import StackedObjectProxy as PasteSOP class StackedObjectProxy(PasteSOP): # override b/c of # http://trac.pythonpaste.org/pythonpaste/ticket/482 def _pop_object(self, obj=None): """Remove a thread-local object. If ``obj`` is given, it is checked against the popped object and an error is emitted if they don't match. """ try: popped = self.____local__.objects.pop() if obj is not None and popped is not obj: raise AssertionError( 'The object popped (%s) is not the same as the object ' 'expected (%s)' % (popped, obj)) except AttributeError: raise AssertionError('No object has been registered for this thread') def __bool__(self): return bool(self._current_obj())
from paste.registry import StackedObjectProxy as PasteSOP class StackedObjectProxy(PasteSOP): # override b/c of # http://trac.pythonpaste.org/pythonpaste/ticket/482 def _pop_object(self, obj=None): """Remove a thread-local object. If ``obj`` is given, it is checked against the popped object and an error is emitted if they don't match. """ try: popped = self.____local__.objects.pop() if obj is not None and popped is not obj: raise AssertionError( 'The object popped (%s) is not the same as the object ' 'expected (%s)' % (popped, obj)) except AttributeError: raise AssertionError('No object has been registered for this thread') Fix boolean conversion of StackedObjectProxy in Python 3 from paste.registry import StackedObjectProxy as PasteSOP class StackedObjectProxy(PasteSOP): # override b/c of # http://trac.pythonpaste.org/pythonpaste/ticket/482 def _pop_object(self, obj=None): """Remove a thread-local object. If ``obj`` is given, it is checked against the popped object and an error is emitted if they don't match. """ try: popped = self.____local__.objects.pop() if obj is not None and popped is not obj: raise AssertionError( 'The object popped (%s) is not the same as the object ' 'expected (%s)' % (popped, obj)) except AttributeError: raise AssertionError('No object has been registered for this thread') def __bool__(self): return bool(self._current_obj())
<commit_before> from paste.registry import StackedObjectProxy as PasteSOP class StackedObjectProxy(PasteSOP): # override b/c of # http://trac.pythonpaste.org/pythonpaste/ticket/482 def _pop_object(self, obj=None): """Remove a thread-local object. If ``obj`` is given, it is checked against the popped object and an error is emitted if they don't match. """ try: popped = self.____local__.objects.pop() if obj is not None and popped is not obj: raise AssertionError( 'The object popped (%s) is not the same as the object ' 'expected (%s)' % (popped, obj)) except AttributeError: raise AssertionError('No object has been registered for this thread') <commit_msg>Fix boolean conversion of StackedObjectProxy in Python 3<commit_after> from paste.registry import StackedObjectProxy as PasteSOP class StackedObjectProxy(PasteSOP): # override b/c of # http://trac.pythonpaste.org/pythonpaste/ticket/482 def _pop_object(self, obj=None): """Remove a thread-local object. If ``obj`` is given, it is checked against the popped object and an error is emitted if they don't match. """ try: popped = self.____local__.objects.pop() if obj is not None and popped is not obj: raise AssertionError( 'The object popped (%s) is not the same as the object ' 'expected (%s)' % (popped, obj)) except AttributeError: raise AssertionError('No object has been registered for this thread') def __bool__(self): return bool(self._current_obj())
08bffa5f6df497f28fe3481fe80b517628b0f1a3
tmdb3/cache_engine.py
tmdb3/cache_engine.py
#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: cache_engine.py # Python Library # Author: Raymond Wagner # Purpose: Base cache engine class for collecting registered engines #----------------------- class Engines( object ): def __init__(self): self._engines = {} def register(self, engine): self._engines[engine.__name__] = engine self._engines[engine.name] = engine def __getitem__(self, key): return self._engines[key] Engines = Engines() class CacheEngineType( type ): """ Cache Engine Metaclass that registers new engines against the cache for named selection and use. """ def __init__(mcs, name, bases, attrs): super(CacheEngineType, mcs).__init__(name, bases, attrs) if name != 'CacheEngine': # skip base class Engines.register(mcs) class CacheEngine( object ): __metaclass__ = CacheEngineType name = 'unspecified' def __init__(self, parent): self.parent = parent def configure(self): raise RuntimeError def get(self, key): raise RuntimeError def put(self, key, value, lifetime): raise RuntimeError def expire(self, key): raise RuntimeError
#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: cache_engine.py # Python Library # Author: Raymond Wagner # Purpose: Base cache engine class for collecting registered engines #----------------------- class Engines( object ): def __init__(self): self._engines = {} def register(self, engine): self._engines[engine.__name__] = engine self._engines[engine.name] = engine def __getitem__(self, key): return self._engines[key] def __contains__(self, key): return self._engines.__contains__(key) Engines = Engines() class CacheEngineType( type ): """ Cache Engine Metaclass that registers new engines against the cache for named selection and use. """ def __init__(mcs, name, bases, attrs): super(CacheEngineType, mcs).__init__(name, bases, attrs) if name != 'CacheEngine': # skip base class Engines.register(mcs) class CacheEngine( object ): __metaclass__ = CacheEngineType name = 'unspecified' def __init__(self, parent): self.parent = parent def configure(self): raise RuntimeError def get(self, key): raise RuntimeError def put(self, key, value, lifetime): raise RuntimeError def expire(self, key): raise RuntimeError
Add __contains__ for proper lookup in cache Engines class.
Add __contains__ for proper lookup in cache Engines class.
Python
bsd-3-clause
wagnerrp/pytmdb3,naveenvhegde/pytmdb3
#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: cache_engine.py # Python Library # Author: Raymond Wagner # Purpose: Base cache engine class for collecting registered engines #----------------------- class Engines( object ): def __init__(self): self._engines = {} def register(self, engine): self._engines[engine.__name__] = engine self._engines[engine.name] = engine def __getitem__(self, key): return self._engines[key] Engines = Engines() class CacheEngineType( type ): """ Cache Engine Metaclass that registers new engines against the cache for named selection and use. """ def __init__(mcs, name, bases, attrs): super(CacheEngineType, mcs).__init__(name, bases, attrs) if name != 'CacheEngine': # skip base class Engines.register(mcs) class CacheEngine( object ): __metaclass__ = CacheEngineType name = 'unspecified' def __init__(self, parent): self.parent = parent def configure(self): raise RuntimeError def get(self, key): raise RuntimeError def put(self, key, value, lifetime): raise RuntimeError def expire(self, key): raise RuntimeError Add __contains__ for proper lookup in cache Engines class.
#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: cache_engine.py # Python Library # Author: Raymond Wagner # Purpose: Base cache engine class for collecting registered engines #----------------------- class Engines( object ): def __init__(self): self._engines = {} def register(self, engine): self._engines[engine.__name__] = engine self._engines[engine.name] = engine def __getitem__(self, key): return self._engines[key] def __contains__(self, key): return self._engines.__contains__(key) Engines = Engines() class CacheEngineType( type ): """ Cache Engine Metaclass that registers new engines against the cache for named selection and use. """ def __init__(mcs, name, bases, attrs): super(CacheEngineType, mcs).__init__(name, bases, attrs) if name != 'CacheEngine': # skip base class Engines.register(mcs) class CacheEngine( object ): __metaclass__ = CacheEngineType name = 'unspecified' def __init__(self, parent): self.parent = parent def configure(self): raise RuntimeError def get(self, key): raise RuntimeError def put(self, key, value, lifetime): raise RuntimeError def expire(self, key): raise RuntimeError
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: cache_engine.py # Python Library # Author: Raymond Wagner # Purpose: Base cache engine class for collecting registered engines #----------------------- class Engines( object ): def __init__(self): self._engines = {} def register(self, engine): self._engines[engine.__name__] = engine self._engines[engine.name] = engine def __getitem__(self, key): return self._engines[key] Engines = Engines() class CacheEngineType( type ): """ Cache Engine Metaclass that registers new engines against the cache for named selection and use. """ def __init__(mcs, name, bases, attrs): super(CacheEngineType, mcs).__init__(name, bases, attrs) if name != 'CacheEngine': # skip base class Engines.register(mcs) class CacheEngine( object ): __metaclass__ = CacheEngineType name = 'unspecified' def __init__(self, parent): self.parent = parent def configure(self): raise RuntimeError def get(self, key): raise RuntimeError def put(self, key, value, lifetime): raise RuntimeError def expire(self, key): raise RuntimeError <commit_msg>Add __contains__ for proper lookup in cache Engines class.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: cache_engine.py # Python Library # Author: Raymond Wagner # Purpose: Base cache engine class for collecting registered engines #----------------------- class Engines( object ): def __init__(self): self._engines = {} def register(self, engine): self._engines[engine.__name__] = engine self._engines[engine.name] = engine def __getitem__(self, key): return self._engines[key] def __contains__(self, key): return self._engines.__contains__(key) Engines = Engines() class CacheEngineType( type ): """ Cache Engine Metaclass that registers new engines against the cache for named selection and use. """ def __init__(mcs, name, bases, attrs): super(CacheEngineType, mcs).__init__(name, bases, attrs) if name != 'CacheEngine': # skip base class Engines.register(mcs) class CacheEngine( object ): __metaclass__ = CacheEngineType name = 'unspecified' def __init__(self, parent): self.parent = parent def configure(self): raise RuntimeError def get(self, key): raise RuntimeError def put(self, key, value, lifetime): raise RuntimeError def expire(self, key): raise RuntimeError
#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: cache_engine.py # Python Library # Author: Raymond Wagner # Purpose: Base cache engine class for collecting registered engines #----------------------- class Engines( object ): def __init__(self): self._engines = {} def register(self, engine): self._engines[engine.__name__] = engine self._engines[engine.name] = engine def __getitem__(self, key): return self._engines[key] Engines = Engines() class CacheEngineType( type ): """ Cache Engine Metaclass that registers new engines against the cache for named selection and use. """ def __init__(mcs, name, bases, attrs): super(CacheEngineType, mcs).__init__(name, bases, attrs) if name != 'CacheEngine': # skip base class Engines.register(mcs) class CacheEngine( object ): __metaclass__ = CacheEngineType name = 'unspecified' def __init__(self, parent): self.parent = parent def configure(self): raise RuntimeError def get(self, key): raise RuntimeError def put(self, key, value, lifetime): raise RuntimeError def expire(self, key): raise RuntimeError Add __contains__ for proper lookup in cache Engines class.#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: cache_engine.py # Python Library # Author: Raymond Wagner # Purpose: Base cache engine class for collecting registered engines #----------------------- class Engines( object ): def __init__(self): self._engines = {} def register(self, engine): self._engines[engine.__name__] = engine self._engines[engine.name] = engine def __getitem__(self, key): return self._engines[key] def __contains__(self, key): return self._engines.__contains__(key) Engines = Engines() class CacheEngineType( type ): """ Cache Engine Metaclass that registers new engines against the cache for named selection and use. """ def __init__(mcs, name, bases, attrs): super(CacheEngineType, mcs).__init__(name, bases, attrs) if name != 'CacheEngine': # skip base class Engines.register(mcs) class CacheEngine( object ): __metaclass__ = CacheEngineType name = 'unspecified' def __init__(self, parent): self.parent = parent def configure(self): raise RuntimeError def get(self, key): raise RuntimeError def put(self, key, value, lifetime): raise RuntimeError def expire(self, key): raise RuntimeError
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: cache_engine.py # Python Library # Author: Raymond Wagner # Purpose: Base cache engine class for collecting registered engines #----------------------- class Engines( object ): def __init__(self): self._engines = {} def register(self, engine): self._engines[engine.__name__] = engine self._engines[engine.name] = engine def __getitem__(self, key): return self._engines[key] Engines = Engines() class CacheEngineType( type ): """ Cache Engine Metaclass that registers new engines against the cache for named selection and use. """ def __init__(mcs, name, bases, attrs): super(CacheEngineType, mcs).__init__(name, bases, attrs) if name != 'CacheEngine': # skip base class Engines.register(mcs) class CacheEngine( object ): __metaclass__ = CacheEngineType name = 'unspecified' def __init__(self, parent): self.parent = parent def configure(self): raise RuntimeError def get(self, key): raise RuntimeError def put(self, key, value, lifetime): raise RuntimeError def expire(self, key): raise RuntimeError <commit_msg>Add __contains__ for proper lookup in cache Engines class.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- #----------------------- # Name: cache_engine.py # Python Library # Author: Raymond Wagner # Purpose: Base cache engine class for collecting registered engines #----------------------- class Engines( object ): def __init__(self): self._engines = {} def register(self, engine): self._engines[engine.__name__] = engine self._engines[engine.name] = engine def __getitem__(self, key): return self._engines[key] def __contains__(self, key): return self._engines.__contains__(key) Engines = Engines() class CacheEngineType( type ): """ Cache Engine Metaclass that registers new engines against the cache for named selection and use. """ def __init__(mcs, name, bases, attrs): super(CacheEngineType, mcs).__init__(name, bases, attrs) if name != 'CacheEngine': # skip base class Engines.register(mcs) class CacheEngine( object ): __metaclass__ = CacheEngineType name = 'unspecified' def __init__(self, parent): self.parent = parent def configure(self): raise RuntimeError def get(self, key): raise RuntimeError def put(self, key, value, lifetime): raise RuntimeError def expire(self, key): raise RuntimeError
0c913d4bf94637da916b609b1b1d0d34b03776b7
tests/test_logger.py
tests/test_logger.py
import pytest from mugloar import dragon, logger @pytest.fixture def log_instance(): """Returns a Logger instance""" return logger.Logger() @pytest.fixture def knight(): return {'agility': 8, 'endurance': 8, 'armor': 0, 'attack': 4} @pytest.fixture def dragon_instance(): return dragon.Dragon() @pytest.fixture def stats_map(): return {'attack': 'scaleThickness', 'armor': 'clawSharpness', 'agility': 'wingStrength', 'endurance': 'fireBreath'} def test_comparison(log_instance, knight, dragon_instance, stats_map): log_instance.comparison(knight, dragon_instance, stats_map)
import pytest from mugloar import dragon, logger @pytest.fixture def log_instance(): """Returns a Logger instance""" return logger.Logger() @pytest.fixture def knight(): return [('endurance', 8), ('attack', 8), ('armor', 0), ('agility', 4)] @pytest.fixture def dragon_instance(): return dragon.Dragon() @pytest.fixture def stats_map(): return {'attack': 'scaleThickness', 'armor': 'clawSharpness', 'agility': 'wingStrength', 'endurance': 'fireBreath'} def test_comparison(log_instance, knight, dragon_instance, stats_map): dragon_instance.set_relative_stats((5, 5, 5, 5), knight) log_instance.comparison(knight, dragon_instance, stats_map)
Implement rudimentary unit tests for logger class
Implement rudimentary unit tests for logger class
Python
mit
reinikai/mugloar
import pytest from mugloar import dragon, logger @pytest.fixture def log_instance(): """Returns a Logger instance""" return logger.Logger() @pytest.fixture def knight(): return {'agility': 8, 'endurance': 8, 'armor': 0, 'attack': 4} @pytest.fixture def dragon_instance(): return dragon.Dragon() @pytest.fixture def stats_map(): return {'attack': 'scaleThickness', 'armor': 'clawSharpness', 'agility': 'wingStrength', 'endurance': 'fireBreath'} def test_comparison(log_instance, knight, dragon_instance, stats_map): log_instance.comparison(knight, dragon_instance, stats_map) Implement rudimentary unit tests for logger class
import pytest from mugloar import dragon, logger @pytest.fixture def log_instance(): """Returns a Logger instance""" return logger.Logger() @pytest.fixture def knight(): return [('endurance', 8), ('attack', 8), ('armor', 0), ('agility', 4)] @pytest.fixture def dragon_instance(): return dragon.Dragon() @pytest.fixture def stats_map(): return {'attack': 'scaleThickness', 'armor': 'clawSharpness', 'agility': 'wingStrength', 'endurance': 'fireBreath'} def test_comparison(log_instance, knight, dragon_instance, stats_map): dragon_instance.set_relative_stats((5, 5, 5, 5), knight) log_instance.comparison(knight, dragon_instance, stats_map)
<commit_before>import pytest from mugloar import dragon, logger @pytest.fixture def log_instance(): """Returns a Logger instance""" return logger.Logger() @pytest.fixture def knight(): return {'agility': 8, 'endurance': 8, 'armor': 0, 'attack': 4} @pytest.fixture def dragon_instance(): return dragon.Dragon() @pytest.fixture def stats_map(): return {'attack': 'scaleThickness', 'armor': 'clawSharpness', 'agility': 'wingStrength', 'endurance': 'fireBreath'} def test_comparison(log_instance, knight, dragon_instance, stats_map): log_instance.comparison(knight, dragon_instance, stats_map) <commit_msg>Implement rudimentary unit tests for logger class<commit_after>
import pytest from mugloar import dragon, logger @pytest.fixture def log_instance(): """Returns a Logger instance""" return logger.Logger() @pytest.fixture def knight(): return [('endurance', 8), ('attack', 8), ('armor', 0), ('agility', 4)] @pytest.fixture def dragon_instance(): return dragon.Dragon() @pytest.fixture def stats_map(): return {'attack': 'scaleThickness', 'armor': 'clawSharpness', 'agility': 'wingStrength', 'endurance': 'fireBreath'} def test_comparison(log_instance, knight, dragon_instance, stats_map): dragon_instance.set_relative_stats((5, 5, 5, 5), knight) log_instance.comparison(knight, dragon_instance, stats_map)
import pytest from mugloar import dragon, logger @pytest.fixture def log_instance(): """Returns a Logger instance""" return logger.Logger() @pytest.fixture def knight(): return {'agility': 8, 'endurance': 8, 'armor': 0, 'attack': 4} @pytest.fixture def dragon_instance(): return dragon.Dragon() @pytest.fixture def stats_map(): return {'attack': 'scaleThickness', 'armor': 'clawSharpness', 'agility': 'wingStrength', 'endurance': 'fireBreath'} def test_comparison(log_instance, knight, dragon_instance, stats_map): log_instance.comparison(knight, dragon_instance, stats_map) Implement rudimentary unit tests for logger classimport pytest from mugloar import dragon, logger @pytest.fixture def log_instance(): """Returns a Logger instance""" return logger.Logger() @pytest.fixture def knight(): return [('endurance', 8), ('attack', 8), ('armor', 0), ('agility', 4)] @pytest.fixture def dragon_instance(): return dragon.Dragon() @pytest.fixture def stats_map(): return {'attack': 'scaleThickness', 'armor': 'clawSharpness', 'agility': 'wingStrength', 'endurance': 'fireBreath'} def test_comparison(log_instance, knight, dragon_instance, stats_map): dragon_instance.set_relative_stats((5, 5, 5, 5), knight) log_instance.comparison(knight, dragon_instance, stats_map)
<commit_before>import pytest from mugloar import dragon, logger @pytest.fixture def log_instance(): """Returns a Logger instance""" return logger.Logger() @pytest.fixture def knight(): return {'agility': 8, 'endurance': 8, 'armor': 0, 'attack': 4} @pytest.fixture def dragon_instance(): return dragon.Dragon() @pytest.fixture def stats_map(): return {'attack': 'scaleThickness', 'armor': 'clawSharpness', 'agility': 'wingStrength', 'endurance': 'fireBreath'} def test_comparison(log_instance, knight, dragon_instance, stats_map): log_instance.comparison(knight, dragon_instance, stats_map) <commit_msg>Implement rudimentary unit tests for logger class<commit_after>import pytest from mugloar import dragon, logger @pytest.fixture def log_instance(): """Returns a Logger instance""" return logger.Logger() @pytest.fixture def knight(): return [('endurance', 8), ('attack', 8), ('armor', 0), ('agility', 4)] @pytest.fixture def dragon_instance(): return dragon.Dragon() @pytest.fixture def stats_map(): return {'attack': 'scaleThickness', 'armor': 'clawSharpness', 'agility': 'wingStrength', 'endurance': 'fireBreath'} def test_comparison(log_instance, knight, dragon_instance, stats_map): dragon_instance.set_relative_stats((5, 5, 5, 5), knight) log_instance.comparison(knight, dragon_instance, stats_map)
3d5b893083e9d53516e1738b6fedbd890722b2e9
src/cadorsfeed/views/about.py
src/cadorsfeed/views/about.py
from flask import Module from cadorsfeed.views.util import render_file about = Module(__name__) @about.route('/') def homepage(): return render_file('index.html') @about.route('/disclaimer') def disclaimer(): return render_file('disclaimer.html') @about.route('/about') def about_page(): return render_file('about.html') @about.route('/privacy') def privacy_policy(): return render_file('privacy.html')
from flask import Module from cadorsfeed.views.util import render_file about = Module(__name__) @about.route('/') @about.route('/home') def homepage(): return render_file('index.html') @about.route('/disclaimer') def disclaimer(): return render_file('disclaimer.html') @about.route('/about') def about_page(): return render_file('about.html') @about.route('/privacy') def privacy_policy(): return render_file('privacy.html')
Make /home point to /.
Make /home point to /.
Python
mit
kurtraschke/cadors-parse,kurtraschke/cadors-parse
from flask import Module from cadorsfeed.views.util import render_file about = Module(__name__) @about.route('/') def homepage(): return render_file('index.html') @about.route('/disclaimer') def disclaimer(): return render_file('disclaimer.html') @about.route('/about') def about_page(): return render_file('about.html') @about.route('/privacy') def privacy_policy(): return render_file('privacy.html') Make /home point to /.
from flask import Module from cadorsfeed.views.util import render_file about = Module(__name__) @about.route('/') @about.route('/home') def homepage(): return render_file('index.html') @about.route('/disclaimer') def disclaimer(): return render_file('disclaimer.html') @about.route('/about') def about_page(): return render_file('about.html') @about.route('/privacy') def privacy_policy(): return render_file('privacy.html')
<commit_before>from flask import Module from cadorsfeed.views.util import render_file about = Module(__name__) @about.route('/') def homepage(): return render_file('index.html') @about.route('/disclaimer') def disclaimer(): return render_file('disclaimer.html') @about.route('/about') def about_page(): return render_file('about.html') @about.route('/privacy') def privacy_policy(): return render_file('privacy.html') <commit_msg>Make /home point to /.<commit_after>
from flask import Module from cadorsfeed.views.util import render_file about = Module(__name__) @about.route('/') @about.route('/home') def homepage(): return render_file('index.html') @about.route('/disclaimer') def disclaimer(): return render_file('disclaimer.html') @about.route('/about') def about_page(): return render_file('about.html') @about.route('/privacy') def privacy_policy(): return render_file('privacy.html')
from flask import Module from cadorsfeed.views.util import render_file about = Module(__name__) @about.route('/') def homepage(): return render_file('index.html') @about.route('/disclaimer') def disclaimer(): return render_file('disclaimer.html') @about.route('/about') def about_page(): return render_file('about.html') @about.route('/privacy') def privacy_policy(): return render_file('privacy.html') Make /home point to /.from flask import Module from cadorsfeed.views.util import render_file about = Module(__name__) @about.route('/') @about.route('/home') def homepage(): return render_file('index.html') @about.route('/disclaimer') def disclaimer(): return render_file('disclaimer.html') @about.route('/about') def about_page(): return render_file('about.html') @about.route('/privacy') def privacy_policy(): return render_file('privacy.html')
<commit_before>from flask import Module from cadorsfeed.views.util import render_file about = Module(__name__) @about.route('/') def homepage(): return render_file('index.html') @about.route('/disclaimer') def disclaimer(): return render_file('disclaimer.html') @about.route('/about') def about_page(): return render_file('about.html') @about.route('/privacy') def privacy_policy(): return render_file('privacy.html') <commit_msg>Make /home point to /.<commit_after>from flask import Module from cadorsfeed.views.util import render_file about = Module(__name__) @about.route('/') @about.route('/home') def homepage(): return render_file('index.html') @about.route('/disclaimer') def disclaimer(): return render_file('disclaimer.html') @about.route('/about') def about_page(): return render_file('about.html') @about.route('/privacy') def privacy_policy(): return render_file('privacy.html')
fc41951d1e395c3cdc8994b4c025e9776d67d4e0
http.py
http.py
from django.http import HttpResponse class HttpResponseCreated(HttpResponse): status_code = 201 class HttpResponseNoContent(HttpResponse): status_code = 204 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, allow_headers): """ RFC2616: The response MUST include an Allow header containing a list of valid methods for the requested resource. """ super(HttpResponseNotAllowed, self).__init__() try: iter(allow_headers) except TypeError: self['Allow'] = allow_headers else: self['Allow'] = ", ".join(allow_headers) class HttpResponseNotAcceptable(HttpResponse): status_code = 406 class HttpResponseConflict(HttpResponse): status_code = 409 class HttpResponseNotImplemented(HttpResponse): status_code = 501
from django.http import HttpResponse class HttpResponseCreated(HttpResponse): status_code = 201 class HttpResponseNoContent(HttpResponse): status_code = 204 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, allow_headers): """ RFC2616: The response MUST include an Allow header containing a list of valid methods for the requested resource. """ super(HttpResponseNotAllowed, self).__init__() try: iter(allow_headers) except TypeError: self['Allow'] = allow_headers else: self['Allow'] = ", ".join(allow_headers) class HttpResponseNotAcceptable(HttpResponse): status_code = 406 class HttpResponseConflict(HttpResponse): status_code = 409 class HttpResponseUnsupportedMediaType(HttpResponse): status_code = 415 class HttpResponseNotImplemented(HttpResponse): status_code = 501
Add Response class for unsupported media
Add Response class for unsupported media
Python
mit
danrex/django-riv,danrex/django-riv
from django.http import HttpResponse class HttpResponseCreated(HttpResponse): status_code = 201 class HttpResponseNoContent(HttpResponse): status_code = 204 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, allow_headers): """ RFC2616: The response MUST include an Allow header containing a list of valid methods for the requested resource. """ super(HttpResponseNotAllowed, self).__init__() try: iter(allow_headers) except TypeError: self['Allow'] = allow_headers else: self['Allow'] = ", ".join(allow_headers) class HttpResponseNotAcceptable(HttpResponse): status_code = 406 class HttpResponseConflict(HttpResponse): status_code = 409 class HttpResponseNotImplemented(HttpResponse): status_code = 501 Add Response class for unsupported media
from django.http import HttpResponse class HttpResponseCreated(HttpResponse): status_code = 201 class HttpResponseNoContent(HttpResponse): status_code = 204 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, allow_headers): """ RFC2616: The response MUST include an Allow header containing a list of valid methods for the requested resource. """ super(HttpResponseNotAllowed, self).__init__() try: iter(allow_headers) except TypeError: self['Allow'] = allow_headers else: self['Allow'] = ", ".join(allow_headers) class HttpResponseNotAcceptable(HttpResponse): status_code = 406 class HttpResponseConflict(HttpResponse): status_code = 409 class HttpResponseUnsupportedMediaType(HttpResponse): status_code = 415 class HttpResponseNotImplemented(HttpResponse): status_code = 501
<commit_before>from django.http import HttpResponse class HttpResponseCreated(HttpResponse): status_code = 201 class HttpResponseNoContent(HttpResponse): status_code = 204 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, allow_headers): """ RFC2616: The response MUST include an Allow header containing a list of valid methods for the requested resource. """ super(HttpResponseNotAllowed, self).__init__() try: iter(allow_headers) except TypeError: self['Allow'] = allow_headers else: self['Allow'] = ", ".join(allow_headers) class HttpResponseNotAcceptable(HttpResponse): status_code = 406 class HttpResponseConflict(HttpResponse): status_code = 409 class HttpResponseNotImplemented(HttpResponse): status_code = 501 <commit_msg>Add Response class for unsupported media<commit_after>
from django.http import HttpResponse class HttpResponseCreated(HttpResponse): status_code = 201 class HttpResponseNoContent(HttpResponse): status_code = 204 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, allow_headers): """ RFC2616: The response MUST include an Allow header containing a list of valid methods for the requested resource. """ super(HttpResponseNotAllowed, self).__init__() try: iter(allow_headers) except TypeError: self['Allow'] = allow_headers else: self['Allow'] = ", ".join(allow_headers) class HttpResponseNotAcceptable(HttpResponse): status_code = 406 class HttpResponseConflict(HttpResponse): status_code = 409 class HttpResponseUnsupportedMediaType(HttpResponse): status_code = 415 class HttpResponseNotImplemented(HttpResponse): status_code = 501
from django.http import HttpResponse class HttpResponseCreated(HttpResponse): status_code = 201 class HttpResponseNoContent(HttpResponse): status_code = 204 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, allow_headers): """ RFC2616: The response MUST include an Allow header containing a list of valid methods for the requested resource. """ super(HttpResponseNotAllowed, self).__init__() try: iter(allow_headers) except TypeError: self['Allow'] = allow_headers else: self['Allow'] = ", ".join(allow_headers) class HttpResponseNotAcceptable(HttpResponse): status_code = 406 class HttpResponseConflict(HttpResponse): status_code = 409 class HttpResponseNotImplemented(HttpResponse): status_code = 501 Add Response class for unsupported mediafrom django.http import HttpResponse class HttpResponseCreated(HttpResponse): status_code = 201 class HttpResponseNoContent(HttpResponse): status_code = 204 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, allow_headers): """ RFC2616: The response MUST include an Allow header containing a list of valid methods for the requested resource. """ super(HttpResponseNotAllowed, self).__init__() try: iter(allow_headers) except TypeError: self['Allow'] = allow_headers else: self['Allow'] = ", ".join(allow_headers) class HttpResponseNotAcceptable(HttpResponse): status_code = 406 class HttpResponseConflict(HttpResponse): status_code = 409 class HttpResponseUnsupportedMediaType(HttpResponse): status_code = 415 class HttpResponseNotImplemented(HttpResponse): status_code = 501
<commit_before>from django.http import HttpResponse class HttpResponseCreated(HttpResponse): status_code = 201 class HttpResponseNoContent(HttpResponse): status_code = 204 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, allow_headers): """ RFC2616: The response MUST include an Allow header containing a list of valid methods for the requested resource. """ super(HttpResponseNotAllowed, self).__init__() try: iter(allow_headers) except TypeError: self['Allow'] = allow_headers else: self['Allow'] = ", ".join(allow_headers) class HttpResponseNotAcceptable(HttpResponse): status_code = 406 class HttpResponseConflict(HttpResponse): status_code = 409 class HttpResponseNotImplemented(HttpResponse): status_code = 501 <commit_msg>Add Response class for unsupported media<commit_after>from django.http import HttpResponse class HttpResponseCreated(HttpResponse): status_code = 201 class HttpResponseNoContent(HttpResponse): status_code = 204 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, allow_headers): """ RFC2616: The response MUST include an Allow header containing a list of valid methods for the requested resource. """ super(HttpResponseNotAllowed, self).__init__() try: iter(allow_headers) except TypeError: self['Allow'] = allow_headers else: self['Allow'] = ", ".join(allow_headers) class HttpResponseNotAcceptable(HttpResponse): status_code = 406 class HttpResponseConflict(HttpResponse): status_code = 409 class HttpResponseUnsupportedMediaType(HttpResponse): status_code = 415 class HttpResponseNotImplemented(HttpResponse): status_code = 501
6323084f97ac80a579d9c8ef7d5fec9cd9a3ec4d
src/ipf/ipfblock/connection.py
src/ipf/ipfblock/connection.py
# -*- coding: utf-8 -*- import ioport import weakref class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.is_connect_allowed(oport, iport): self._oport = weakref.ref(oport) self._iport = weakref.ref(iport) self._oport().increase_binded_count() self._iport().set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): self._oport().decrease_binded_count() self._iport().set_free() def contains_port(self, port): return self._iport() == port or self._oport() == port def process(self): """ Send value from output port to input port """ self._iport().pass_value(self._oport().get_value())
# -*- coding: utf-8 -*- import ioport import weakref class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.is_connect_allowed(oport, iport): self._oport = weakref.ref(oport) self._iport = weakref.ref(iport) self._oport().increase_binded_count() self._iport().set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): if self._oport() is not None: self._oport().decrease_binded_count() if self._iport() is not None: self._iport().set_free() def contains_port(self, port): return self._iport() == port or self._oport() == port def process(self): """ Send value from output port to input port """ self._iport().pass_value(self._oport().get_value())
Check weakrefs to porst before using in destructor.
Check weakrefs to porst before using in destructor. Prevent raise of exception in case of connect deletion after block.
Python
lgpl-2.1
anton-golubkov/Garland,anton-golubkov/Garland
# -*- coding: utf-8 -*- import ioport import weakref class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.is_connect_allowed(oport, iport): self._oport = weakref.ref(oport) self._iport = weakref.ref(iport) self._oport().increase_binded_count() self._iport().set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): self._oport().decrease_binded_count() self._iport().set_free() def contains_port(self, port): return self._iport() == port or self._oport() == port def process(self): """ Send value from output port to input port """ self._iport().pass_value(self._oport().get_value()) Check weakrefs to porst before using in destructor. Prevent raise of exception in case of connect deletion after block.
# -*- coding: utf-8 -*- import ioport import weakref class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.is_connect_allowed(oport, iport): self._oport = weakref.ref(oport) self._iport = weakref.ref(iport) self._oport().increase_binded_count() self._iport().set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): if self._oport() is not None: self._oport().decrease_binded_count() if self._iport() is not None: self._iport().set_free() def contains_port(self, port): return self._iport() == port or self._oport() == port def process(self): """ Send value from output port to input port """ self._iport().pass_value(self._oport().get_value())
<commit_before># -*- coding: utf-8 -*- import ioport import weakref class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.is_connect_allowed(oport, iport): self._oport = weakref.ref(oport) self._iport = weakref.ref(iport) self._oport().increase_binded_count() self._iport().set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): self._oport().decrease_binded_count() self._iport().set_free() def contains_port(self, port): return self._iport() == port or self._oport() == port def process(self): """ Send value from output port to input port """ self._iport().pass_value(self._oport().get_value()) <commit_msg>Check weakrefs to porst before using in destructor. Prevent raise of exception in case of connect deletion after block.<commit_after>
# -*- coding: utf-8 -*- import ioport import weakref class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.is_connect_allowed(oport, iport): self._oport = weakref.ref(oport) self._iport = weakref.ref(iport) self._oport().increase_binded_count() self._iport().set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): if self._oport() is not None: self._oport().decrease_binded_count() if self._iport() is not None: self._iport().set_free() def contains_port(self, port): return self._iport() == port or self._oport() == port def process(self): """ Send value from output port to input port """ self._iport().pass_value(self._oport().get_value())
# -*- coding: utf-8 -*- import ioport import weakref class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.is_connect_allowed(oport, iport): self._oport = weakref.ref(oport) self._iport = weakref.ref(iport) self._oport().increase_binded_count() self._iport().set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): self._oport().decrease_binded_count() self._iport().set_free() def contains_port(self, port): return self._iport() == port or self._oport() == port def process(self): """ Send value from output port to input port """ self._iport().pass_value(self._oport().get_value()) Check weakrefs to porst before using in destructor. Prevent raise of exception in case of connect deletion after block.# -*- coding: utf-8 -*- import ioport import weakref class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.is_connect_allowed(oport, iport): self._oport = weakref.ref(oport) self._iport = weakref.ref(iport) self._oport().increase_binded_count() self._iport().set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): if self._oport() is not None: self._oport().decrease_binded_count() if self._iport() is not None: self._iport().set_free() def contains_port(self, port): return self._iport() == port or self._oport() == port def process(self): """ Send value from output port to input port """ self._iport().pass_value(self._oport().get_value())
<commit_before># -*- coding: utf-8 -*- import ioport import weakref class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.is_connect_allowed(oport, iport): self._oport = weakref.ref(oport) self._iport = weakref.ref(iport) self._oport().increase_binded_count() self._iport().set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): self._oport().decrease_binded_count() self._iport().set_free() def contains_port(self, port): return self._iport() == port or self._oport() == port def process(self): """ Send value from output port to input port """ self._iport().pass_value(self._oport().get_value()) <commit_msg>Check weakrefs to porst before using in destructor. Prevent raise of exception in case of connect deletion after block.<commit_after># -*- coding: utf-8 -*- import ioport import weakref class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.is_connect_allowed(oport, iport): self._oport = weakref.ref(oport) self._iport = weakref.ref(iport) self._oport().increase_binded_count() self._iport().set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): if self._oport() is not None: self._oport().decrease_binded_count() if self._iport() is not None: self._iport().set_free() def contains_port(self, port): return self._iport() == port or self._oport() == port def process(self): """ Send value from output port to input port """ self._iport().pass_value(self._oport().get_value())
7fe4342b2b62574230bb5196548fb1d6fb8cac69
src/engine/request_handler.py
src/engine/request_handler.py
import Queue import json import EBQP from . import world class GameRequestHandler: def __init__(self): self.world = None self.responses = { EBQP.new: self.respond_new, } def process(self, request): request_pieces = request.split(EBQP.packet_delimiter, 1) command = request_pieces[0] params = request_pieces[1].strip() if len(request_pieces) > 1 else '' try: json_args = json.loads(params) except Exception as e: return "process:failure:bad json" if command in self.responses: return self.responses[command](json_args) else: return "process:failure:unsupported command" def respond_new(self, args): if 'uid1' not in args or 'uid2' not in args: return 'new:failure:missing uid' uid1 = args['uid1'] uid2 = args['uid2'] self.world = world.World([uid1, uid2]) self.responses = { EBQP.view: self.respond_view, EBQP.move: self.respond_move, } return 'new:success' def respond_view(self, args): return 'view:success:%s' % self.world.to_json() #TODO def respond_move(self, args): return 'move:failure:unimplemented'
import Queue import json import EBQP from . import world from . import types from . import consts class GameRequestHandler: def __init__(self): self.world = None self.responses = { EBQP.new: self.respond_new, } def process(self, request): request_pieces = request.split(EBQP.packet_delimiter, 1) command = request_pieces[0] params = request_pieces[1].strip() if len(request_pieces) > 1 else '' try: json_args = json.loads(params) except Exception as e: return "process:failure:bad json" if command in self.responses: return self.responses[command](json_args) else: return "process:failure:unsupported command" def respond_new(self, args): if 'uid1' not in args or 'uid2' not in args: return 'new:failure:missing uid' uid1 = args['uid1'] uid2 = args['uid2'] self.world = world.World([uid1, uid2]) self.world.add_unit(uid1, types.new_unit('Tank', consts.RED)) self.responses = { EBQP.view: self.respond_view, EBQP.move: self.respond_move, } return 'new:success' def respond_view(self, args): return 'view:success:%s' % self.world.to_json() #TODO def respond_move(self, args): return 'move:failure:unimplemented'
Include one tank on player 1 on game creation
Include one tank on player 1 on game creation
Python
mit
Tactique/game_engine,Tactique/game_engine
import Queue import json import EBQP from . import world class GameRequestHandler: def __init__(self): self.world = None self.responses = { EBQP.new: self.respond_new, } def process(self, request): request_pieces = request.split(EBQP.packet_delimiter, 1) command = request_pieces[0] params = request_pieces[1].strip() if len(request_pieces) > 1 else '' try: json_args = json.loads(params) except Exception as e: return "process:failure:bad json" if command in self.responses: return self.responses[command](json_args) else: return "process:failure:unsupported command" def respond_new(self, args): if 'uid1' not in args or 'uid2' not in args: return 'new:failure:missing uid' uid1 = args['uid1'] uid2 = args['uid2'] self.world = world.World([uid1, uid2]) self.responses = { EBQP.view: self.respond_view, EBQP.move: self.respond_move, } return 'new:success' def respond_view(self, args): return 'view:success:%s' % self.world.to_json() #TODO def respond_move(self, args): return 'move:failure:unimplemented' Include one tank on player 1 on game creation
import Queue import json import EBQP from . import world from . import types from . import consts class GameRequestHandler: def __init__(self): self.world = None self.responses = { EBQP.new: self.respond_new, } def process(self, request): request_pieces = request.split(EBQP.packet_delimiter, 1) command = request_pieces[0] params = request_pieces[1].strip() if len(request_pieces) > 1 else '' try: json_args = json.loads(params) except Exception as e: return "process:failure:bad json" if command in self.responses: return self.responses[command](json_args) else: return "process:failure:unsupported command" def respond_new(self, args): if 'uid1' not in args or 'uid2' not in args: return 'new:failure:missing uid' uid1 = args['uid1'] uid2 = args['uid2'] self.world = world.World([uid1, uid2]) self.world.add_unit(uid1, types.new_unit('Tank', consts.RED)) self.responses = { EBQP.view: self.respond_view, EBQP.move: self.respond_move, } return 'new:success' def respond_view(self, args): return 'view:success:%s' % self.world.to_json() #TODO def respond_move(self, args): return 'move:failure:unimplemented'
<commit_before>import Queue import json import EBQP from . import world class GameRequestHandler: def __init__(self): self.world = None self.responses = { EBQP.new: self.respond_new, } def process(self, request): request_pieces = request.split(EBQP.packet_delimiter, 1) command = request_pieces[0] params = request_pieces[1].strip() if len(request_pieces) > 1 else '' try: json_args = json.loads(params) except Exception as e: return "process:failure:bad json" if command in self.responses: return self.responses[command](json_args) else: return "process:failure:unsupported command" def respond_new(self, args): if 'uid1' not in args or 'uid2' not in args: return 'new:failure:missing uid' uid1 = args['uid1'] uid2 = args['uid2'] self.world = world.World([uid1, uid2]) self.responses = { EBQP.view: self.respond_view, EBQP.move: self.respond_move, } return 'new:success' def respond_view(self, args): return 'view:success:%s' % self.world.to_json() #TODO def respond_move(self, args): return 'move:failure:unimplemented' <commit_msg>Include one tank on player 1 on game creation<commit_after>
import Queue import json import EBQP from . import world from . import types from . import consts class GameRequestHandler: def __init__(self): self.world = None self.responses = { EBQP.new: self.respond_new, } def process(self, request): request_pieces = request.split(EBQP.packet_delimiter, 1) command = request_pieces[0] params = request_pieces[1].strip() if len(request_pieces) > 1 else '' try: json_args = json.loads(params) except Exception as e: return "process:failure:bad json" if command in self.responses: return self.responses[command](json_args) else: return "process:failure:unsupported command" def respond_new(self, args): if 'uid1' not in args or 'uid2' not in args: return 'new:failure:missing uid' uid1 = args['uid1'] uid2 = args['uid2'] self.world = world.World([uid1, uid2]) self.world.add_unit(uid1, types.new_unit('Tank', consts.RED)) self.responses = { EBQP.view: self.respond_view, EBQP.move: self.respond_move, } return 'new:success' def respond_view(self, args): return 'view:success:%s' % self.world.to_json() #TODO def respond_move(self, args): return 'move:failure:unimplemented'
import Queue import json import EBQP from . import world class GameRequestHandler: def __init__(self): self.world = None self.responses = { EBQP.new: self.respond_new, } def process(self, request): request_pieces = request.split(EBQP.packet_delimiter, 1) command = request_pieces[0] params = request_pieces[1].strip() if len(request_pieces) > 1 else '' try: json_args = json.loads(params) except Exception as e: return "process:failure:bad json" if command in self.responses: return self.responses[command](json_args) else: return "process:failure:unsupported command" def respond_new(self, args): if 'uid1' not in args or 'uid2' not in args: return 'new:failure:missing uid' uid1 = args['uid1'] uid2 = args['uid2'] self.world = world.World([uid1, uid2]) self.responses = { EBQP.view: self.respond_view, EBQP.move: self.respond_move, } return 'new:success' def respond_view(self, args): return 'view:success:%s' % self.world.to_json() #TODO def respond_move(self, args): return 'move:failure:unimplemented' Include one tank on player 1 on game creationimport Queue import json import EBQP from . import world from . import types from . import consts class GameRequestHandler: def __init__(self): self.world = None self.responses = { EBQP.new: self.respond_new, } def process(self, request): request_pieces = request.split(EBQP.packet_delimiter, 1) command = request_pieces[0] params = request_pieces[1].strip() if len(request_pieces) > 1 else '' try: json_args = json.loads(params) except Exception as e: return "process:failure:bad json" if command in self.responses: return self.responses[command](json_args) else: return "process:failure:unsupported command" def respond_new(self, args): if 'uid1' not in args or 'uid2' not in args: return 'new:failure:missing uid' uid1 = args['uid1'] uid2 = args['uid2'] self.world = world.World([uid1, uid2]) self.world.add_unit(uid1, types.new_unit('Tank', consts.RED)) self.responses = { EBQP.view: self.respond_view, EBQP.move: self.respond_move, } return 'new:success' def respond_view(self, args): return 'view:success:%s' % self.world.to_json() #TODO def respond_move(self, args): return 'move:failure:unimplemented'
<commit_before>import Queue import json import EBQP from . import world class GameRequestHandler: def __init__(self): self.world = None self.responses = { EBQP.new: self.respond_new, } def process(self, request): request_pieces = request.split(EBQP.packet_delimiter, 1) command = request_pieces[0] params = request_pieces[1].strip() if len(request_pieces) > 1 else '' try: json_args = json.loads(params) except Exception as e: return "process:failure:bad json" if command in self.responses: return self.responses[command](json_args) else: return "process:failure:unsupported command" def respond_new(self, args): if 'uid1' not in args or 'uid2' not in args: return 'new:failure:missing uid' uid1 = args['uid1'] uid2 = args['uid2'] self.world = world.World([uid1, uid2]) self.responses = { EBQP.view: self.respond_view, EBQP.move: self.respond_move, } return 'new:success' def respond_view(self, args): return 'view:success:%s' % self.world.to_json() #TODO def respond_move(self, args): return 'move:failure:unimplemented' <commit_msg>Include one tank on player 1 on game creation<commit_after>import Queue import json import EBQP from . import world from . import types from . import consts class GameRequestHandler: def __init__(self): self.world = None self.responses = { EBQP.new: self.respond_new, } def process(self, request): request_pieces = request.split(EBQP.packet_delimiter, 1) command = request_pieces[0] params = request_pieces[1].strip() if len(request_pieces) > 1 else '' try: json_args = json.loads(params) except Exception as e: return "process:failure:bad json" if command in self.responses: return self.responses[command](json_args) else: return "process:failure:unsupported command" def respond_new(self, args): if 'uid1' not in args or 'uid2' not in args: return 'new:failure:missing uid' uid1 = args['uid1'] uid2 = args['uid2'] self.world = world.World([uid1, uid2]) self.world.add_unit(uid1, types.new_unit('Tank', consts.RED)) self.responses = { EBQP.view: self.respond_view, EBQP.move: self.respond_move, } return 'new:success' def respond_view(self, args): return 'view:success:%s' % self.world.to_json() #TODO def respond_move(self, args): return 'move:failure:unimplemented'
0fbe22520657f14b1009a49c7fee0958a3704353
turbasen/settings.py
turbasen/settings.py
class Settings: ENDPOINT_URL = u'http://api.nasjonalturbase.no/' LIMIT = 50
class Settings: ENDPOINT_URL = u'http://api.nasjonalturbase.no/' LIMIT = 20
Set default limit to 20 objects
Set default limit to 20 objects
Python
mit
Turbasen/turbasen.py
class Settings: ENDPOINT_URL = u'http://api.nasjonalturbase.no/' LIMIT = 50 Set default limit to 20 objects
class Settings: ENDPOINT_URL = u'http://api.nasjonalturbase.no/' LIMIT = 20
<commit_before>class Settings: ENDPOINT_URL = u'http://api.nasjonalturbase.no/' LIMIT = 50 <commit_msg>Set default limit to 20 objects<commit_after>
class Settings: ENDPOINT_URL = u'http://api.nasjonalturbase.no/' LIMIT = 20
class Settings: ENDPOINT_URL = u'http://api.nasjonalturbase.no/' LIMIT = 50 Set default limit to 20 objectsclass Settings: ENDPOINT_URL = u'http://api.nasjonalturbase.no/' LIMIT = 20
<commit_before>class Settings: ENDPOINT_URL = u'http://api.nasjonalturbase.no/' LIMIT = 50 <commit_msg>Set default limit to 20 objects<commit_after>class Settings: ENDPOINT_URL = u'http://api.nasjonalturbase.no/' LIMIT = 20
8ea1ee477a8f9f31e2fcb5fb92a02243723c822e
Instanssi/admin_upload/models.py
Instanssi/admin_upload/models.py
# -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from django.contrib import admin from Instanssi.kompomaatti.models import Event import os.path class UploadedFile(models.Model): event = models.ForeignKey(Event, verbose_name=u'Tapahtuma') user = models.ForeignKey(User, verbose_name=u'Käyttäjä') description = models.TextField(u'Kuvaus', help_text=u'Lyhyt kuvaus siitä, mihin/missä tiedostoa käytetään.', blank=True) file = models.FileField(u'Tiedosto', upload_to='files/') date = models.DateTimeField(u'Aika') def __unicode__(self): return self.file.name + ' by ' + self.user.username class Meta: verbose_name=u"tiedosto" verbose_name_plural=u"tiedostot" def name(self): return os.path.basename(self.file.name) try: admin.site.register(UploadedFile) except: pass
# -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from django.contrib import admin from Instanssi.kompomaatti.models import Event import os.path class UploadedFile(models.Model): event = models.ForeignKey(Event, verbose_name=u'Tapahtuma') user = models.ForeignKey(User, verbose_name=u'Käyttäjä') description = models.TextField(u'Kuvaus', help_text=u'Lyhyt kuvaus siitä, mihin/missä tiedostoa käytetään.', blank=True) file = models.FileField(u'Tiedosto', upload_to='files/') date = models.DateTimeField(u'Aika') def __unicode__(self): return self.file.name + ' by ' + self.user.username class Meta: verbose_name=u"tiedosto" verbose_name_plural=u"tiedostot" def name(self): return os.path.basename(self.file.name) def save(self, *args, **kwargs): # Delete old file when editing try: this = UploadedFile.objects.get(id=self.id) if this.file != self.file: this.file.delete(save=False) except: pass # Continue with normal save super(UploadedFile, self).save(*args, **kwargs) try: admin.site.register(UploadedFile) except: pass
Delete old files when modified
admin_upload: Delete old files when modified
Python
mit
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
# -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from django.contrib import admin from Instanssi.kompomaatti.models import Event import os.path class UploadedFile(models.Model): event = models.ForeignKey(Event, verbose_name=u'Tapahtuma') user = models.ForeignKey(User, verbose_name=u'Käyttäjä') description = models.TextField(u'Kuvaus', help_text=u'Lyhyt kuvaus siitä, mihin/missä tiedostoa käytetään.', blank=True) file = models.FileField(u'Tiedosto', upload_to='files/') date = models.DateTimeField(u'Aika') def __unicode__(self): return self.file.name + ' by ' + self.user.username class Meta: verbose_name=u"tiedosto" verbose_name_plural=u"tiedostot" def name(self): return os.path.basename(self.file.name) try: admin.site.register(UploadedFile) except: passadmin_upload: Delete old files when modified
# -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from django.contrib import admin from Instanssi.kompomaatti.models import Event import os.path class UploadedFile(models.Model): event = models.ForeignKey(Event, verbose_name=u'Tapahtuma') user = models.ForeignKey(User, verbose_name=u'Käyttäjä') description = models.TextField(u'Kuvaus', help_text=u'Lyhyt kuvaus siitä, mihin/missä tiedostoa käytetään.', blank=True) file = models.FileField(u'Tiedosto', upload_to='files/') date = models.DateTimeField(u'Aika') def __unicode__(self): return self.file.name + ' by ' + self.user.username class Meta: verbose_name=u"tiedosto" verbose_name_plural=u"tiedostot" def name(self): return os.path.basename(self.file.name) def save(self, *args, **kwargs): # Delete old file when editing try: this = UploadedFile.objects.get(id=self.id) if this.file != self.file: this.file.delete(save=False) except: pass # Continue with normal save super(UploadedFile, self).save(*args, **kwargs) try: admin.site.register(UploadedFile) except: pass
<commit_before># -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from django.contrib import admin from Instanssi.kompomaatti.models import Event import os.path class UploadedFile(models.Model): event = models.ForeignKey(Event, verbose_name=u'Tapahtuma') user = models.ForeignKey(User, verbose_name=u'Käyttäjä') description = models.TextField(u'Kuvaus', help_text=u'Lyhyt kuvaus siitä, mihin/missä tiedostoa käytetään.', blank=True) file = models.FileField(u'Tiedosto', upload_to='files/') date = models.DateTimeField(u'Aika') def __unicode__(self): return self.file.name + ' by ' + self.user.username class Meta: verbose_name=u"tiedosto" verbose_name_plural=u"tiedostot" def name(self): return os.path.basename(self.file.name) try: admin.site.register(UploadedFile) except: pass<commit_msg>admin_upload: Delete old files when modified<commit_after>
# -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from django.contrib import admin from Instanssi.kompomaatti.models import Event import os.path class UploadedFile(models.Model): event = models.ForeignKey(Event, verbose_name=u'Tapahtuma') user = models.ForeignKey(User, verbose_name=u'Käyttäjä') description = models.TextField(u'Kuvaus', help_text=u'Lyhyt kuvaus siitä, mihin/missä tiedostoa käytetään.', blank=True) file = models.FileField(u'Tiedosto', upload_to='files/') date = models.DateTimeField(u'Aika') def __unicode__(self): return self.file.name + ' by ' + self.user.username class Meta: verbose_name=u"tiedosto" verbose_name_plural=u"tiedostot" def name(self): return os.path.basename(self.file.name) def save(self, *args, **kwargs): # Delete old file when editing try: this = UploadedFile.objects.get(id=self.id) if this.file != self.file: this.file.delete(save=False) except: pass # Continue with normal save super(UploadedFile, self).save(*args, **kwargs) try: admin.site.register(UploadedFile) except: pass
# -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from django.contrib import admin from Instanssi.kompomaatti.models import Event import os.path class UploadedFile(models.Model): event = models.ForeignKey(Event, verbose_name=u'Tapahtuma') user = models.ForeignKey(User, verbose_name=u'Käyttäjä') description = models.TextField(u'Kuvaus', help_text=u'Lyhyt kuvaus siitä, mihin/missä tiedostoa käytetään.', blank=True) file = models.FileField(u'Tiedosto', upload_to='files/') date = models.DateTimeField(u'Aika') def __unicode__(self): return self.file.name + ' by ' + self.user.username class Meta: verbose_name=u"tiedosto" verbose_name_plural=u"tiedostot" def name(self): return os.path.basename(self.file.name) try: admin.site.register(UploadedFile) except: passadmin_upload: Delete old files when modified# -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from django.contrib import admin from Instanssi.kompomaatti.models import Event import os.path class UploadedFile(models.Model): event = models.ForeignKey(Event, verbose_name=u'Tapahtuma') user = models.ForeignKey(User, verbose_name=u'Käyttäjä') description = models.TextField(u'Kuvaus', help_text=u'Lyhyt kuvaus siitä, mihin/missä tiedostoa käytetään.', blank=True) file = models.FileField(u'Tiedosto', upload_to='files/') date = models.DateTimeField(u'Aika') def __unicode__(self): return self.file.name + ' by ' + self.user.username class Meta: verbose_name=u"tiedosto" verbose_name_plural=u"tiedostot" def name(self): return os.path.basename(self.file.name) def save(self, *args, **kwargs): # Delete old file when editing try: this = UploadedFile.objects.get(id=self.id) if this.file != self.file: this.file.delete(save=False) except: pass # Continue with normal save super(UploadedFile, self).save(*args, **kwargs) try: admin.site.register(UploadedFile) except: pass
<commit_before># -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from django.contrib import admin from Instanssi.kompomaatti.models import Event import os.path class UploadedFile(models.Model): event = models.ForeignKey(Event, verbose_name=u'Tapahtuma') user = models.ForeignKey(User, verbose_name=u'Käyttäjä') description = models.TextField(u'Kuvaus', help_text=u'Lyhyt kuvaus siitä, mihin/missä tiedostoa käytetään.', blank=True) file = models.FileField(u'Tiedosto', upload_to='files/') date = models.DateTimeField(u'Aika') def __unicode__(self): return self.file.name + ' by ' + self.user.username class Meta: verbose_name=u"tiedosto" verbose_name_plural=u"tiedostot" def name(self): return os.path.basename(self.file.name) try: admin.site.register(UploadedFile) except: pass<commit_msg>admin_upload: Delete old files when modified<commit_after># -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from django.contrib import admin from Instanssi.kompomaatti.models import Event import os.path class UploadedFile(models.Model): event = models.ForeignKey(Event, verbose_name=u'Tapahtuma') user = models.ForeignKey(User, verbose_name=u'Käyttäjä') description = models.TextField(u'Kuvaus', help_text=u'Lyhyt kuvaus siitä, mihin/missä tiedostoa käytetään.', blank=True) file = models.FileField(u'Tiedosto', upload_to='files/') date = models.DateTimeField(u'Aika') def __unicode__(self): return self.file.name + ' by ' + self.user.username class Meta: verbose_name=u"tiedosto" verbose_name_plural=u"tiedostot" def name(self): return os.path.basename(self.file.name) def save(self, *args, **kwargs): # Delete old file when editing try: this = UploadedFile.objects.get(id=self.id) if this.file != self.file: this.file.delete(save=False) except: pass # Continue with normal save super(UploadedFile, self).save(*args, **kwargs) try: admin.site.register(UploadedFile) except: pass
34c65649fc017c087aa229863bbb2c95f1be4134
tests/test_same_origin.py
tests/test_same_origin.py
from http.cookies import SimpleCookie from django.conf import settings from sockjs.tornado.session import ConnectionInfo from swampdragon.connections.sockjs_connection import SubscriberConnection from swampdragon.testing.dragon_testcase import DragonTestCaseAsync import uuid class TestSession(object): def __init__(self, is_open=True): self.session_id = uuid.uuid4().hex self.is_closed = is_open is False self.messages = [] def send_message(self, message, binary=False): self.messages.append(message) def close(self, code=3000, message='Connection closed'): self.is_closed = True class TestSameOrigin(DragonTestCaseAsync): def test_same_origin(self): settings.DRAGON_URL = self.host settings.SWAMP_DRAGON_SAME_ORIGIN = True response = self.fetch('/settings.js') cookie = SimpleCookie(response.headers['Set-Cookie']) request = ConnectionInfo('127.0.0.1', cookies=cookie, arguments={}, headers={}, path='/data/983/9cz4ridg/websocket') session = TestSession() self.connection = SubscriberConnection(session) self.connection.on_open(request) self.assertFalse(self.connection.is_closed)
try: from http.cookies import SimpleCookie except ImportError: from Cookie import SimpleCookie from django.conf import settings from sockjs.tornado.session import ConnectionInfo from swampdragon.connections.sockjs_connection import SubscriberConnection from swampdragon.testing.dragon_testcase import DragonTestCaseAsync import uuid class TestSession(object): def __init__(self, is_open=True): self.session_id = uuid.uuid4().hex self.is_closed = is_open is False self.messages = [] def send_message(self, message, binary=False): self.messages.append(message) def close(self, code=3000, message='Connection closed'): self.is_closed = True class TestSameOrigin(DragonTestCaseAsync): def test_same_origin(self): settings.DRAGON_URL = self.host settings.SWAMP_DRAGON_SAME_ORIGIN = True response = self.fetch('/settings.js') cookie = SimpleCookie(response.headers['Set-Cookie']) request = ConnectionInfo('127.0.0.1', cookies=cookie, arguments={}, headers={}, path='/data/983/9cz4ridg/websocket') session = TestSession() self.connection = SubscriberConnection(session) self.connection.on_open(request) self.assertFalse(self.connection.is_closed)
Fix import error of SimpleCookie for python 2.7
Fix import error of SimpleCookie for python 2.7 `Cookie` was renamed to `http.cookies` in Python 3
Python
bsd-3-clause
faulkner/swampdragon,Manuel4131/swampdragon,d9pouces/swampdragon,sahlinet/swampdragon,seclinch/swampdragon,jonashagstedt/swampdragon,Manuel4131/swampdragon,Manuel4131/swampdragon,michael-k/swampdragon,boris-savic/swampdragon,bastianh/swampdragon,bastianh/swampdragon,boris-savic/swampdragon,seclinch/swampdragon,michael-k/swampdragon,sahlinet/swampdragon,d9pouces/swampdragon,aexeagmbh/swampdragon,sahlinet/swampdragon,bastianh/swampdragon,michael-k/swampdragon,faulkner/swampdragon,faulkner/swampdragon,denizs/swampdragon,aexeagmbh/swampdragon,d9pouces/swampdragon,seclinch/swampdragon,boris-savic/swampdragon,jonashagstedt/swampdragon,aexeagmbh/swampdragon,denizs/swampdragon,denizs/swampdragon,jonashagstedt/swampdragon
from http.cookies import SimpleCookie from django.conf import settings from sockjs.tornado.session import ConnectionInfo from swampdragon.connections.sockjs_connection import SubscriberConnection from swampdragon.testing.dragon_testcase import DragonTestCaseAsync import uuid class TestSession(object): def __init__(self, is_open=True): self.session_id = uuid.uuid4().hex self.is_closed = is_open is False self.messages = [] def send_message(self, message, binary=False): self.messages.append(message) def close(self, code=3000, message='Connection closed'): self.is_closed = True class TestSameOrigin(DragonTestCaseAsync): def test_same_origin(self): settings.DRAGON_URL = self.host settings.SWAMP_DRAGON_SAME_ORIGIN = True response = self.fetch('/settings.js') cookie = SimpleCookie(response.headers['Set-Cookie']) request = ConnectionInfo('127.0.0.1', cookies=cookie, arguments={}, headers={}, path='/data/983/9cz4ridg/websocket') session = TestSession() self.connection = SubscriberConnection(session) self.connection.on_open(request) self.assertFalse(self.connection.is_closed) Fix import error of SimpleCookie for python 2.7 `Cookie` was renamed to `http.cookies` in Python 3
try: from http.cookies import SimpleCookie except ImportError: from Cookie import SimpleCookie from django.conf import settings from sockjs.tornado.session import ConnectionInfo from swampdragon.connections.sockjs_connection import SubscriberConnection from swampdragon.testing.dragon_testcase import DragonTestCaseAsync import uuid class TestSession(object): def __init__(self, is_open=True): self.session_id = uuid.uuid4().hex self.is_closed = is_open is False self.messages = [] def send_message(self, message, binary=False): self.messages.append(message) def close(self, code=3000, message='Connection closed'): self.is_closed = True class TestSameOrigin(DragonTestCaseAsync): def test_same_origin(self): settings.DRAGON_URL = self.host settings.SWAMP_DRAGON_SAME_ORIGIN = True response = self.fetch('/settings.js') cookie = SimpleCookie(response.headers['Set-Cookie']) request = ConnectionInfo('127.0.0.1', cookies=cookie, arguments={}, headers={}, path='/data/983/9cz4ridg/websocket') session = TestSession() self.connection = SubscriberConnection(session) self.connection.on_open(request) self.assertFalse(self.connection.is_closed)
<commit_before>from http.cookies import SimpleCookie from django.conf import settings from sockjs.tornado.session import ConnectionInfo from swampdragon.connections.sockjs_connection import SubscriberConnection from swampdragon.testing.dragon_testcase import DragonTestCaseAsync import uuid class TestSession(object): def __init__(self, is_open=True): self.session_id = uuid.uuid4().hex self.is_closed = is_open is False self.messages = [] def send_message(self, message, binary=False): self.messages.append(message) def close(self, code=3000, message='Connection closed'): self.is_closed = True class TestSameOrigin(DragonTestCaseAsync): def test_same_origin(self): settings.DRAGON_URL = self.host settings.SWAMP_DRAGON_SAME_ORIGIN = True response = self.fetch('/settings.js') cookie = SimpleCookie(response.headers['Set-Cookie']) request = ConnectionInfo('127.0.0.1', cookies=cookie, arguments={}, headers={}, path='/data/983/9cz4ridg/websocket') session = TestSession() self.connection = SubscriberConnection(session) self.connection.on_open(request) self.assertFalse(self.connection.is_closed) <commit_msg>Fix import error of SimpleCookie for python 2.7 `Cookie` was renamed to `http.cookies` in Python 3<commit_after>
try: from http.cookies import SimpleCookie except ImportError: from Cookie import SimpleCookie from django.conf import settings from sockjs.tornado.session import ConnectionInfo from swampdragon.connections.sockjs_connection import SubscriberConnection from swampdragon.testing.dragon_testcase import DragonTestCaseAsync import uuid class TestSession(object): def __init__(self, is_open=True): self.session_id = uuid.uuid4().hex self.is_closed = is_open is False self.messages = [] def send_message(self, message, binary=False): self.messages.append(message) def close(self, code=3000, message='Connection closed'): self.is_closed = True class TestSameOrigin(DragonTestCaseAsync): def test_same_origin(self): settings.DRAGON_URL = self.host settings.SWAMP_DRAGON_SAME_ORIGIN = True response = self.fetch('/settings.js') cookie = SimpleCookie(response.headers['Set-Cookie']) request = ConnectionInfo('127.0.0.1', cookies=cookie, arguments={}, headers={}, path='/data/983/9cz4ridg/websocket') session = TestSession() self.connection = SubscriberConnection(session) self.connection.on_open(request) self.assertFalse(self.connection.is_closed)
from http.cookies import SimpleCookie from django.conf import settings from sockjs.tornado.session import ConnectionInfo from swampdragon.connections.sockjs_connection import SubscriberConnection from swampdragon.testing.dragon_testcase import DragonTestCaseAsync import uuid class TestSession(object): def __init__(self, is_open=True): self.session_id = uuid.uuid4().hex self.is_closed = is_open is False self.messages = [] def send_message(self, message, binary=False): self.messages.append(message) def close(self, code=3000, message='Connection closed'): self.is_closed = True class TestSameOrigin(DragonTestCaseAsync): def test_same_origin(self): settings.DRAGON_URL = self.host settings.SWAMP_DRAGON_SAME_ORIGIN = True response = self.fetch('/settings.js') cookie = SimpleCookie(response.headers['Set-Cookie']) request = ConnectionInfo('127.0.0.1', cookies=cookie, arguments={}, headers={}, path='/data/983/9cz4ridg/websocket') session = TestSession() self.connection = SubscriberConnection(session) self.connection.on_open(request) self.assertFalse(self.connection.is_closed) Fix import error of SimpleCookie for python 2.7 `Cookie` was renamed to `http.cookies` in Python 3try: from http.cookies import SimpleCookie except ImportError: from Cookie import SimpleCookie from django.conf import settings from sockjs.tornado.session import ConnectionInfo from swampdragon.connections.sockjs_connection import SubscriberConnection from swampdragon.testing.dragon_testcase import DragonTestCaseAsync import uuid class TestSession(object): def __init__(self, is_open=True): self.session_id = uuid.uuid4().hex self.is_closed = is_open is False self.messages = [] def send_message(self, message, binary=False): self.messages.append(message) def close(self, code=3000, message='Connection closed'): self.is_closed = True class TestSameOrigin(DragonTestCaseAsync): def test_same_origin(self): settings.DRAGON_URL = self.host settings.SWAMP_DRAGON_SAME_ORIGIN = True response = self.fetch('/settings.js') cookie = SimpleCookie(response.headers['Set-Cookie']) request = ConnectionInfo('127.0.0.1', cookies=cookie, arguments={}, headers={}, path='/data/983/9cz4ridg/websocket') session = TestSession() self.connection = SubscriberConnection(session) self.connection.on_open(request) self.assertFalse(self.connection.is_closed)
<commit_before>from http.cookies import SimpleCookie from django.conf import settings from sockjs.tornado.session import ConnectionInfo from swampdragon.connections.sockjs_connection import SubscriberConnection from swampdragon.testing.dragon_testcase import DragonTestCaseAsync import uuid class TestSession(object): def __init__(self, is_open=True): self.session_id = uuid.uuid4().hex self.is_closed = is_open is False self.messages = [] def send_message(self, message, binary=False): self.messages.append(message) def close(self, code=3000, message='Connection closed'): self.is_closed = True class TestSameOrigin(DragonTestCaseAsync): def test_same_origin(self): settings.DRAGON_URL = self.host settings.SWAMP_DRAGON_SAME_ORIGIN = True response = self.fetch('/settings.js') cookie = SimpleCookie(response.headers['Set-Cookie']) request = ConnectionInfo('127.0.0.1', cookies=cookie, arguments={}, headers={}, path='/data/983/9cz4ridg/websocket') session = TestSession() self.connection = SubscriberConnection(session) self.connection.on_open(request) self.assertFalse(self.connection.is_closed) <commit_msg>Fix import error of SimpleCookie for python 2.7 `Cookie` was renamed to `http.cookies` in Python 3<commit_after>try: from http.cookies import SimpleCookie except ImportError: from Cookie import SimpleCookie from django.conf import settings from sockjs.tornado.session import ConnectionInfo from swampdragon.connections.sockjs_connection import SubscriberConnection from swampdragon.testing.dragon_testcase import DragonTestCaseAsync import uuid class TestSession(object): def __init__(self, is_open=True): self.session_id = uuid.uuid4().hex self.is_closed = is_open is False self.messages = [] def send_message(self, message, binary=False): self.messages.append(message) def close(self, code=3000, message='Connection closed'): self.is_closed = True class TestSameOrigin(DragonTestCaseAsync): def test_same_origin(self): settings.DRAGON_URL = self.host settings.SWAMP_DRAGON_SAME_ORIGIN = True response = self.fetch('/settings.js') cookie = SimpleCookie(response.headers['Set-Cookie']) request = ConnectionInfo('127.0.0.1', cookies=cookie, arguments={}, headers={}, path='/data/983/9cz4ridg/websocket') session = TestSession() self.connection = SubscriberConnection(session) self.connection.on_open(request) self.assertFalse(self.connection.is_closed)
95220c99ae6062fed7d4211c67b8dffc031f4c7c
tests/versioning/tests.py
tests/versioning/tests.py
from __future__ import absolute_import import os.path import pytest import subprocess from django.conf import settings from raven.versioning import fetch_git_sha, fetch_package_version from raven.utils import six def has_git_requirements(): return os.path.exists(os.path.join(settings.PROJECT_ROOT, '.git', 'refs', 'heads', 'master')) @pytest.mark.skipif('not has_git_requirements()') def test_fetch_git_sha(): result = fetch_git_sha(settings.PROJECT_ROOT) assert result is not None assert len(result) == 40 assert isinstance(result, six.string_types) assert result == subprocess.check_output( 'git rev-parse --verify HEAD', shell=True, cwd=settings.PROJECT_ROOT ).decode('latin1').strip() def test_fetch_package_version(): result = fetch_package_version('raven') assert result is not None assert isinstance(result, six.string_types)
from __future__ import absolute_import import os.path import pytest import subprocess from django.conf import settings from raven.versioning import fetch_git_sha, fetch_package_version from raven.utils import six def has_git_requirements(): return os.path.exists(os.path.join(settings.PROJECT_ROOT, '.git', 'refs', 'heads', 'master')) # Python 2.6 does not contain subprocess.check_output def check_output(cmd, **kwargs): return subprocess.Popen( cmd, stdout=subprocess.PIPE, **kwargs ).communicate()[0] @pytest.mark.skipif('not has_git_requirements()') def test_fetch_git_sha(): result = fetch_git_sha(settings.PROJECT_ROOT) assert result is not None assert len(result) == 40 assert isinstance(result, six.string_types) assert result == check_output( 'git rev-parse --verify HEAD', shell=True, cwd=settings.PROJECT_ROOT ).decode('latin1').strip() def test_fetch_package_version(): result = fetch_package_version('raven') assert result is not None assert isinstance(result, six.string_types)
Remove use of check_output (not in Py2.6)
Remove use of check_output (not in Py2.6)
Python
bsd-3-clause
johansteffner/raven-python,johansteffner/raven-python,getsentry/raven-python,akalipetis/raven-python,ewdurbin/raven-python,ewdurbin/raven-python,danriti/raven-python,getsentry/raven-python,getsentry/raven-python,johansteffner/raven-python,ewdurbin/raven-python,akalipetis/raven-python,danriti/raven-python,danriti/raven-python,akalipetis/raven-python
from __future__ import absolute_import import os.path import pytest import subprocess from django.conf import settings from raven.versioning import fetch_git_sha, fetch_package_version from raven.utils import six def has_git_requirements(): return os.path.exists(os.path.join(settings.PROJECT_ROOT, '.git', 'refs', 'heads', 'master')) @pytest.mark.skipif('not has_git_requirements()') def test_fetch_git_sha(): result = fetch_git_sha(settings.PROJECT_ROOT) assert result is not None assert len(result) == 40 assert isinstance(result, six.string_types) assert result == subprocess.check_output( 'git rev-parse --verify HEAD', shell=True, cwd=settings.PROJECT_ROOT ).decode('latin1').strip() def test_fetch_package_version(): result = fetch_package_version('raven') assert result is not None assert isinstance(result, six.string_types) Remove use of check_output (not in Py2.6)
from __future__ import absolute_import import os.path import pytest import subprocess from django.conf import settings from raven.versioning import fetch_git_sha, fetch_package_version from raven.utils import six def has_git_requirements(): return os.path.exists(os.path.join(settings.PROJECT_ROOT, '.git', 'refs', 'heads', 'master')) # Python 2.6 does not contain subprocess.check_output def check_output(cmd, **kwargs): return subprocess.Popen( cmd, stdout=subprocess.PIPE, **kwargs ).communicate()[0] @pytest.mark.skipif('not has_git_requirements()') def test_fetch_git_sha(): result = fetch_git_sha(settings.PROJECT_ROOT) assert result is not None assert len(result) == 40 assert isinstance(result, six.string_types) assert result == check_output( 'git rev-parse --verify HEAD', shell=True, cwd=settings.PROJECT_ROOT ).decode('latin1').strip() def test_fetch_package_version(): result = fetch_package_version('raven') assert result is not None assert isinstance(result, six.string_types)
<commit_before>from __future__ import absolute_import import os.path import pytest import subprocess from django.conf import settings from raven.versioning import fetch_git_sha, fetch_package_version from raven.utils import six def has_git_requirements(): return os.path.exists(os.path.join(settings.PROJECT_ROOT, '.git', 'refs', 'heads', 'master')) @pytest.mark.skipif('not has_git_requirements()') def test_fetch_git_sha(): result = fetch_git_sha(settings.PROJECT_ROOT) assert result is not None assert len(result) == 40 assert isinstance(result, six.string_types) assert result == subprocess.check_output( 'git rev-parse --verify HEAD', shell=True, cwd=settings.PROJECT_ROOT ).decode('latin1').strip() def test_fetch_package_version(): result = fetch_package_version('raven') assert result is not None assert isinstance(result, six.string_types) <commit_msg>Remove use of check_output (not in Py2.6)<commit_after>
from __future__ import absolute_import import os.path import pytest import subprocess from django.conf import settings from raven.versioning import fetch_git_sha, fetch_package_version from raven.utils import six def has_git_requirements(): return os.path.exists(os.path.join(settings.PROJECT_ROOT, '.git', 'refs', 'heads', 'master')) # Python 2.6 does not contain subprocess.check_output def check_output(cmd, **kwargs): return subprocess.Popen( cmd, stdout=subprocess.PIPE, **kwargs ).communicate()[0] @pytest.mark.skipif('not has_git_requirements()') def test_fetch_git_sha(): result = fetch_git_sha(settings.PROJECT_ROOT) assert result is not None assert len(result) == 40 assert isinstance(result, six.string_types) assert result == check_output( 'git rev-parse --verify HEAD', shell=True, cwd=settings.PROJECT_ROOT ).decode('latin1').strip() def test_fetch_package_version(): result = fetch_package_version('raven') assert result is not None assert isinstance(result, six.string_types)
from __future__ import absolute_import import os.path import pytest import subprocess from django.conf import settings from raven.versioning import fetch_git_sha, fetch_package_version from raven.utils import six def has_git_requirements(): return os.path.exists(os.path.join(settings.PROJECT_ROOT, '.git', 'refs', 'heads', 'master')) @pytest.mark.skipif('not has_git_requirements()') def test_fetch_git_sha(): result = fetch_git_sha(settings.PROJECT_ROOT) assert result is not None assert len(result) == 40 assert isinstance(result, six.string_types) assert result == subprocess.check_output( 'git rev-parse --verify HEAD', shell=True, cwd=settings.PROJECT_ROOT ).decode('latin1').strip() def test_fetch_package_version(): result = fetch_package_version('raven') assert result is not None assert isinstance(result, six.string_types) Remove use of check_output (not in Py2.6)from __future__ import absolute_import import os.path import pytest import subprocess from django.conf import settings from raven.versioning import fetch_git_sha, fetch_package_version from raven.utils import six def has_git_requirements(): return os.path.exists(os.path.join(settings.PROJECT_ROOT, '.git', 'refs', 'heads', 'master')) # Python 2.6 does not contain subprocess.check_output def check_output(cmd, **kwargs): return subprocess.Popen( cmd, stdout=subprocess.PIPE, **kwargs ).communicate()[0] @pytest.mark.skipif('not has_git_requirements()') def test_fetch_git_sha(): result = fetch_git_sha(settings.PROJECT_ROOT) assert result is not None assert len(result) == 40 assert isinstance(result, six.string_types) assert result == check_output( 'git rev-parse --verify HEAD', shell=True, cwd=settings.PROJECT_ROOT ).decode('latin1').strip() def test_fetch_package_version(): result = fetch_package_version('raven') assert result is not None assert isinstance(result, six.string_types)
<commit_before>from __future__ import absolute_import import os.path import pytest import subprocess from django.conf import settings from raven.versioning import fetch_git_sha, fetch_package_version from raven.utils import six def has_git_requirements(): return os.path.exists(os.path.join(settings.PROJECT_ROOT, '.git', 'refs', 'heads', 'master')) @pytest.mark.skipif('not has_git_requirements()') def test_fetch_git_sha(): result = fetch_git_sha(settings.PROJECT_ROOT) assert result is not None assert len(result) == 40 assert isinstance(result, six.string_types) assert result == subprocess.check_output( 'git rev-parse --verify HEAD', shell=True, cwd=settings.PROJECT_ROOT ).decode('latin1').strip() def test_fetch_package_version(): result = fetch_package_version('raven') assert result is not None assert isinstance(result, six.string_types) <commit_msg>Remove use of check_output (not in Py2.6)<commit_after>from __future__ import absolute_import import os.path import pytest import subprocess from django.conf import settings from raven.versioning import fetch_git_sha, fetch_package_version from raven.utils import six def has_git_requirements(): return os.path.exists(os.path.join(settings.PROJECT_ROOT, '.git', 'refs', 'heads', 'master')) # Python 2.6 does not contain subprocess.check_output def check_output(cmd, **kwargs): return subprocess.Popen( cmd, stdout=subprocess.PIPE, **kwargs ).communicate()[0] @pytest.mark.skipif('not has_git_requirements()') def test_fetch_git_sha(): result = fetch_git_sha(settings.PROJECT_ROOT) assert result is not None assert len(result) == 40 assert isinstance(result, six.string_types) assert result == check_output( 'git rev-parse --verify HEAD', shell=True, cwd=settings.PROJECT_ROOT ).decode('latin1').strip() def test_fetch_package_version(): result = fetch_package_version('raven') assert result is not None assert isinstance(result, six.string_types)
072526a6ec1794edc0f729f2ecb66c47ed38abb9
harmony/extensions/rng.py
harmony/extensions/rng.py
import random from discord.ext import commands class RNG: def __init__(self, bot): self.bot = bot @commands.command() async def roll(self, dice: str = None): """Roll some dice. Keyword arguments: dice -- number of dice (X) and faces (Y) in the format XdY """ if not dice: await self.bot.say('Usage: !roll XdY') return try: num_dice, num_faces = map(int, dice.split('d')) except Exception: await self.bot.say('Format is XdY') return if num_dice > 20 or num_faces > 1000: await self.bot.say('Max 20 dice and 1000 faces') return if num_dice < 1 or num_faces < 1: await self.bot.say('Stick to positive numbers') return total = sum((random.randrange(1, num_faces) for _ in range(int(num_dice)))) await self.bot.say(str(total)) @commands.command() async def choose(self, *choices: str): """ Choose between the options Keyword arguments: choices -- Space separated list of options """ await self.bot.say(random.choice(choices)) def setup(bot): bot.add_cog(RNG(bot))
import random from discord.ext import commands class RNG: def __init__(self, bot): self.bot = bot @commands.command() async def roll(self, dice: str): """Roll some dice. Keyword arguments: dice -- number of dice (X) and faces (Y) in the format XdY """ try: num_faces, num_dice = map(int, dice.split('d')) except Exception: await self.bot.say('Format is XdY!') return rolls = [random.randint(1, num_faces) for _ in range(num_dice)] await self.bot.say(', '.join(rolls) + ' (total {})'.format(sum(rolls))) @commands.command() async def choose(self, *choices: str): """ Choose between the options Keyword arguments: choices -- Space separated list of options """ await self.bot.say(random.choice(choices)) def setup(bot): bot.add_cog(RNG(bot))
Make roll better and worse
Make roll better and worse
Python
apache-2.0
knyghty/harmony
import random from discord.ext import commands class RNG: def __init__(self, bot): self.bot = bot @commands.command() async def roll(self, dice: str = None): """Roll some dice. Keyword arguments: dice -- number of dice (X) and faces (Y) in the format XdY """ if not dice: await self.bot.say('Usage: !roll XdY') return try: num_dice, num_faces = map(int, dice.split('d')) except Exception: await self.bot.say('Format is XdY') return if num_dice > 20 or num_faces > 1000: await self.bot.say('Max 20 dice and 1000 faces') return if num_dice < 1 or num_faces < 1: await self.bot.say('Stick to positive numbers') return total = sum((random.randrange(1, num_faces) for _ in range(int(num_dice)))) await self.bot.say(str(total)) @commands.command() async def choose(self, *choices: str): """ Choose between the options Keyword arguments: choices -- Space separated list of options """ await self.bot.say(random.choice(choices)) def setup(bot): bot.add_cog(RNG(bot)) Make roll better and worse
import random from discord.ext import commands class RNG: def __init__(self, bot): self.bot = bot @commands.command() async def roll(self, dice: str): """Roll some dice. Keyword arguments: dice -- number of dice (X) and faces (Y) in the format XdY """ try: num_faces, num_dice = map(int, dice.split('d')) except Exception: await self.bot.say('Format is XdY!') return rolls = [random.randint(1, num_faces) for _ in range(num_dice)] await self.bot.say(', '.join(rolls) + ' (total {})'.format(sum(rolls))) @commands.command() async def choose(self, *choices: str): """ Choose between the options Keyword arguments: choices -- Space separated list of options """ await self.bot.say(random.choice(choices)) def setup(bot): bot.add_cog(RNG(bot))
<commit_before>import random from discord.ext import commands class RNG: def __init__(self, bot): self.bot = bot @commands.command() async def roll(self, dice: str = None): """Roll some dice. Keyword arguments: dice -- number of dice (X) and faces (Y) in the format XdY """ if not dice: await self.bot.say('Usage: !roll XdY') return try: num_dice, num_faces = map(int, dice.split('d')) except Exception: await self.bot.say('Format is XdY') return if num_dice > 20 or num_faces > 1000: await self.bot.say('Max 20 dice and 1000 faces') return if num_dice < 1 or num_faces < 1: await self.bot.say('Stick to positive numbers') return total = sum((random.randrange(1, num_faces) for _ in range(int(num_dice)))) await self.bot.say(str(total)) @commands.command() async def choose(self, *choices: str): """ Choose between the options Keyword arguments: choices -- Space separated list of options """ await self.bot.say(random.choice(choices)) def setup(bot): bot.add_cog(RNG(bot)) <commit_msg>Make roll better and worse<commit_after>
import random from discord.ext import commands class RNG: def __init__(self, bot): self.bot = bot @commands.command() async def roll(self, dice: str): """Roll some dice. Keyword arguments: dice -- number of dice (X) and faces (Y) in the format XdY """ try: num_faces, num_dice = map(int, dice.split('d')) except Exception: await self.bot.say('Format is XdY!') return rolls = [random.randint(1, num_faces) for _ in range(num_dice)] await self.bot.say(', '.join(rolls) + ' (total {})'.format(sum(rolls))) @commands.command() async def choose(self, *choices: str): """ Choose between the options Keyword arguments: choices -- Space separated list of options """ await self.bot.say(random.choice(choices)) def setup(bot): bot.add_cog(RNG(bot))
import random from discord.ext import commands class RNG: def __init__(self, bot): self.bot = bot @commands.command() async def roll(self, dice: str = None): """Roll some dice. Keyword arguments: dice -- number of dice (X) and faces (Y) in the format XdY """ if not dice: await self.bot.say('Usage: !roll XdY') return try: num_dice, num_faces = map(int, dice.split('d')) except Exception: await self.bot.say('Format is XdY') return if num_dice > 20 or num_faces > 1000: await self.bot.say('Max 20 dice and 1000 faces') return if num_dice < 1 or num_faces < 1: await self.bot.say('Stick to positive numbers') return total = sum((random.randrange(1, num_faces) for _ in range(int(num_dice)))) await self.bot.say(str(total)) @commands.command() async def choose(self, *choices: str): """ Choose between the options Keyword arguments: choices -- Space separated list of options """ await self.bot.say(random.choice(choices)) def setup(bot): bot.add_cog(RNG(bot)) Make roll better and worseimport random from discord.ext import commands class RNG: def __init__(self, bot): self.bot = bot @commands.command() async def roll(self, dice: str): """Roll some dice. Keyword arguments: dice -- number of dice (X) and faces (Y) in the format XdY """ try: num_faces, num_dice = map(int, dice.split('d')) except Exception: await self.bot.say('Format is XdY!') return rolls = [random.randint(1, num_faces) for _ in range(num_dice)] await self.bot.say(', '.join(rolls) + ' (total {})'.format(sum(rolls))) @commands.command() async def choose(self, *choices: str): """ Choose between the options Keyword arguments: choices -- Space separated list of options """ await self.bot.say(random.choice(choices)) def setup(bot): bot.add_cog(RNG(bot))
<commit_before>import random from discord.ext import commands class RNG: def __init__(self, bot): self.bot = bot @commands.command() async def roll(self, dice: str = None): """Roll some dice. Keyword arguments: dice -- number of dice (X) and faces (Y) in the format XdY """ if not dice: await self.bot.say('Usage: !roll XdY') return try: num_dice, num_faces = map(int, dice.split('d')) except Exception: await self.bot.say('Format is XdY') return if num_dice > 20 or num_faces > 1000: await self.bot.say('Max 20 dice and 1000 faces') return if num_dice < 1 or num_faces < 1: await self.bot.say('Stick to positive numbers') return total = sum((random.randrange(1, num_faces) for _ in range(int(num_dice)))) await self.bot.say(str(total)) @commands.command() async def choose(self, *choices: str): """ Choose between the options Keyword arguments: choices -- Space separated list of options """ await self.bot.say(random.choice(choices)) def setup(bot): bot.add_cog(RNG(bot)) <commit_msg>Make roll better and worse<commit_after>import random from discord.ext import commands class RNG: def __init__(self, bot): self.bot = bot @commands.command() async def roll(self, dice: str): """Roll some dice. Keyword arguments: dice -- number of dice (X) and faces (Y) in the format XdY """ try: num_faces, num_dice = map(int, dice.split('d')) except Exception: await self.bot.say('Format is XdY!') return rolls = [random.randint(1, num_faces) for _ in range(num_dice)] await self.bot.say(', '.join(rolls) + ' (total {})'.format(sum(rolls))) @commands.command() async def choose(self, *choices: str): """ Choose between the options Keyword arguments: choices -- Space separated list of options """ await self.bot.say(random.choice(choices)) def setup(bot): bot.add_cog(RNG(bot))
f9a22a0db46a84508cd7a6e5294612eb0484805a
watchdog/__init__.py
watchdog/__init__.py
# -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) from events import FileSystemEventHandler try: import _watchdog_fsevents logging.debug('Using FSEventsObserver.') from fsevents_observer import FSEventsObserver as Observer except ImportError: try: import win32file import win32con logging.debug('Using Win32Observer.') from win32_observer import Win32Observer as Observer except ImportError: logging.debug('Using PollingObserver as fallback.') from polling_observer import PollingObserver as Observer
# -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) from events import FileSystemEventHandler try: import pyinotify logging.debug('Using InotifyObserver') from inotify_observer import InotifyObserver as Observer except ImportError: try: import _watchdog_fsevents logging.debug('Using FSEventsObserver.') from fsevents_observer import FSEventsObserver as Observer except ImportError: try: import win32file import win32con logging.debug('Using Win32Observer.') from win32_observer import Win32Observer as Observer except ImportError: logging.debug('Using PollingObserver as fallback.') from polling_observer import PollingObserver as Observer
Use InotifyObserver if the pyinotify dependency is satisfied.
Use InotifyObserver if the pyinotify dependency is satisfied. Signed-off-by: Gora Khargosh <a2078c57e3ac12c6dfb97b7c2c4e6d6d7db7e92f@gmail.com>
Python
apache-2.0
glorizen/watchdog,mconstantin/watchdog,ymero/watchdog,gorakhargosh/watchdog,glorizen/watchdog,ymero/watchdog,edevil/watchdog,teleyinex/watchdog,teleyinex/watchdog,gorakhargosh/watchdog,glorizen/watchdog,teleyinex/watchdog,mconstantin/watchdog,edevil/watchdog,edevil/watchdog,mconstantin/watchdog,javrasya/watchdog,edevil/watchdog,edevil/watchdog,javrasya/watchdog,ymero/watchdog,javrasya/watchdog
# -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) from events import FileSystemEventHandler try: import _watchdog_fsevents logging.debug('Using FSEventsObserver.') from fsevents_observer import FSEventsObserver as Observer except ImportError: try: import win32file import win32con logging.debug('Using Win32Observer.') from win32_observer import Win32Observer as Observer except ImportError: logging.debug('Using PollingObserver as fallback.') from polling_observer import PollingObserver as Observer Use InotifyObserver if the pyinotify dependency is satisfied. Signed-off-by: Gora Khargosh <a2078c57e3ac12c6dfb97b7c2c4e6d6d7db7e92f@gmail.com>
# -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) from events import FileSystemEventHandler try: import pyinotify logging.debug('Using InotifyObserver') from inotify_observer import InotifyObserver as Observer except ImportError: try: import _watchdog_fsevents logging.debug('Using FSEventsObserver.') from fsevents_observer import FSEventsObserver as Observer except ImportError: try: import win32file import win32con logging.debug('Using Win32Observer.') from win32_observer import Win32Observer as Observer except ImportError: logging.debug('Using PollingObserver as fallback.') from polling_observer import PollingObserver as Observer
<commit_before># -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) from events import FileSystemEventHandler try: import _watchdog_fsevents logging.debug('Using FSEventsObserver.') from fsevents_observer import FSEventsObserver as Observer except ImportError: try: import win32file import win32con logging.debug('Using Win32Observer.') from win32_observer import Win32Observer as Observer except ImportError: logging.debug('Using PollingObserver as fallback.') from polling_observer import PollingObserver as Observer <commit_msg>Use InotifyObserver if the pyinotify dependency is satisfied. Signed-off-by: Gora Khargosh <a2078c57e3ac12c6dfb97b7c2c4e6d6d7db7e92f@gmail.com><commit_after>
# -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) from events import FileSystemEventHandler try: import pyinotify logging.debug('Using InotifyObserver') from inotify_observer import InotifyObserver as Observer except ImportError: try: import _watchdog_fsevents logging.debug('Using FSEventsObserver.') from fsevents_observer import FSEventsObserver as Observer except ImportError: try: import win32file import win32con logging.debug('Using Win32Observer.') from win32_observer import Win32Observer as Observer except ImportError: logging.debug('Using PollingObserver as fallback.') from polling_observer import PollingObserver as Observer
# -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) from events import FileSystemEventHandler try: import _watchdog_fsevents logging.debug('Using FSEventsObserver.') from fsevents_observer import FSEventsObserver as Observer except ImportError: try: import win32file import win32con logging.debug('Using Win32Observer.') from win32_observer import Win32Observer as Observer except ImportError: logging.debug('Using PollingObserver as fallback.') from polling_observer import PollingObserver as Observer Use InotifyObserver if the pyinotify dependency is satisfied. Signed-off-by: Gora Khargosh <a2078c57e3ac12c6dfb97b7c2c4e6d6d7db7e92f@gmail.com># -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) from events import FileSystemEventHandler try: import pyinotify logging.debug('Using InotifyObserver') from inotify_observer import InotifyObserver as Observer except ImportError: try: import _watchdog_fsevents logging.debug('Using FSEventsObserver.') from fsevents_observer import FSEventsObserver as Observer except ImportError: try: import win32file import win32con logging.debug('Using Win32Observer.') from win32_observer import Win32Observer as Observer except ImportError: logging.debug('Using PollingObserver as fallback.') from polling_observer import PollingObserver as Observer
<commit_before># -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) from events import FileSystemEventHandler try: import _watchdog_fsevents logging.debug('Using FSEventsObserver.') from fsevents_observer import FSEventsObserver as Observer except ImportError: try: import win32file import win32con logging.debug('Using Win32Observer.') from win32_observer import Win32Observer as Observer except ImportError: logging.debug('Using PollingObserver as fallback.') from polling_observer import PollingObserver as Observer <commit_msg>Use InotifyObserver if the pyinotify dependency is satisfied. Signed-off-by: Gora Khargosh <a2078c57e3ac12c6dfb97b7c2c4e6d6d7db7e92f@gmail.com><commit_after># -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) from events import FileSystemEventHandler try: import pyinotify logging.debug('Using InotifyObserver') from inotify_observer import InotifyObserver as Observer except ImportError: try: import _watchdog_fsevents logging.debug('Using FSEventsObserver.') from fsevents_observer import FSEventsObserver as Observer except ImportError: try: import win32file import win32con logging.debug('Using Win32Observer.') from win32_observer import Win32Observer as Observer except ImportError: logging.debug('Using PollingObserver as fallback.') from polling_observer import PollingObserver as Observer
52f8e68835eb67e522dd8f1c7725d460eaa2cab7
RPS/rps-1.py
RPS/rps-1.py
# A simple rock, paper, scissors script submitted as a demo of easy game-making # In Python # Certain parts of this program are functional—that is, written in functions that # work together. Some parts aren't. As we improve the program, you'll find that # This functional way of doing things has some real advantages. import random # We need thr random module for the computer to play # This dictionary relates a choice to what it defeats for easy comparison later. beats = { "rock":"scissors", "paper":"rock", "scissors":"paper" } # Now we make an easy-to-use list of choices from the beats choices = list(beats.keys()) # Get the player choice from the input command. # The lower() is used to correct for people typing with capitals. # The strip() removes any trailing space that might mess us up. player_choice = input("Rock, paper, or scissors?").lower().strip() # Finally, time to compare! But we have to account for cheaters/mistakes. if player_choice not in choices: print("You must choose r, p, or s!") else: # Here we make the computer choose, then compare the two. computer_choice = random.choice(choices) if beats[player_choice] == computer_choice print("You win!") else: print("You lose") """ This program works, but it's quite limited. Probably its biggest limitation is that it will only run once. It also doesn't keep score—why would it, since it only runs once? Our next version of the game will make it more user-friendly. """
""" A simple rock, paper, scissors script submitted as a demo of easy game-making In Python. """ import random # We need thr random module for the computer to play # This dictionary relates a choice to what it defeats for easy comparison later. beats = { "rock":"scissors", "paper":"rock", "scissors":"paper" } # Now we make an easy-to-use list of choices from the beats choices = list(beats.keys()) # Get the player choice from the input command. # The lower() is used to correct for people typing with capitals. # The strip() removes any trailing space that might mess us up. player_choice = input("Rock, paper, or scissors?").lower().strip() # Finally, time to compare! But we have to account for cheaters/mistakes. if player_choice not in choices: print("You must choose r, p, or s!") else: # Here we make the computer choose, then compare the two. computer_choice = random.choice(choices) if beats[player_choice] == computer_choice print("You win!") else: print("You lose") """ This program works, but it's quite limited. Probably its biggest limitation is that it will only run once. It also doesn't keep score—why would it, since it only runs once? Our next version of the game will make it more user-friendly. """
Refactor out functions from rps1
Refactor out functions from rps1
Python
mit
mttaggart/python-cs
# A simple rock, paper, scissors script submitted as a demo of easy game-making # In Python # Certain parts of this program are functional—that is, written in functions that # work together. Some parts aren't. As we improve the program, you'll find that # This functional way of doing things has some real advantages. import random # We need thr random module for the computer to play # This dictionary relates a choice to what it defeats for easy comparison later. beats = { "rock":"scissors", "paper":"rock", "scissors":"paper" } # Now we make an easy-to-use list of choices from the beats choices = list(beats.keys()) # Get the player choice from the input command. # The lower() is used to correct for people typing with capitals. # The strip() removes any trailing space that might mess us up. player_choice = input("Rock, paper, or scissors?").lower().strip() # Finally, time to compare! But we have to account for cheaters/mistakes. if player_choice not in choices: print("You must choose r, p, or s!") else: # Here we make the computer choose, then compare the two. computer_choice = random.choice(choices) if beats[player_choice] == computer_choice print("You win!") else: print("You lose") """ This program works, but it's quite limited. Probably its biggest limitation is that it will only run once. It also doesn't keep score—why would it, since it only runs once? Our next version of the game will make it more user-friendly. """ Refactor out functions from rps1
""" A simple rock, paper, scissors script submitted as a demo of easy game-making In Python. """ import random # We need thr random module for the computer to play # This dictionary relates a choice to what it defeats for easy comparison later. beats = { "rock":"scissors", "paper":"rock", "scissors":"paper" } # Now we make an easy-to-use list of choices from the beats choices = list(beats.keys()) # Get the player choice from the input command. # The lower() is used to correct for people typing with capitals. # The strip() removes any trailing space that might mess us up. player_choice = input("Rock, paper, or scissors?").lower().strip() # Finally, time to compare! But we have to account for cheaters/mistakes. if player_choice not in choices: print("You must choose r, p, or s!") else: # Here we make the computer choose, then compare the two. computer_choice = random.choice(choices) if beats[player_choice] == computer_choice print("You win!") else: print("You lose") """ This program works, but it's quite limited. Probably its biggest limitation is that it will only run once. It also doesn't keep score—why would it, since it only runs once? Our next version of the game will make it more user-friendly. """
<commit_before># A simple rock, paper, scissors script submitted as a demo of easy game-making # In Python # Certain parts of this program are functional—that is, written in functions that # work together. Some parts aren't. As we improve the program, you'll find that # This functional way of doing things has some real advantages. import random # We need thr random module for the computer to play # This dictionary relates a choice to what it defeats for easy comparison later. beats = { "rock":"scissors", "paper":"rock", "scissors":"paper" } # Now we make an easy-to-use list of choices from the beats choices = list(beats.keys()) # Get the player choice from the input command. # The lower() is used to correct for people typing with capitals. # The strip() removes any trailing space that might mess us up. player_choice = input("Rock, paper, or scissors?").lower().strip() # Finally, time to compare! But we have to account for cheaters/mistakes. if player_choice not in choices: print("You must choose r, p, or s!") else: # Here we make the computer choose, then compare the two. computer_choice = random.choice(choices) if beats[player_choice] == computer_choice print("You win!") else: print("You lose") """ This program works, but it's quite limited. Probably its biggest limitation is that it will only run once. It also doesn't keep score—why would it, since it only runs once? Our next version of the game will make it more user-friendly. """ <commit_msg>Refactor out functions from rps1<commit_after>
""" A simple rock, paper, scissors script submitted as a demo of easy game-making In Python. """ import random # We need thr random module for the computer to play # This dictionary relates a choice to what it defeats for easy comparison later. beats = { "rock":"scissors", "paper":"rock", "scissors":"paper" } # Now we make an easy-to-use list of choices from the beats choices = list(beats.keys()) # Get the player choice from the input command. # The lower() is used to correct for people typing with capitals. # The strip() removes any trailing space that might mess us up. player_choice = input("Rock, paper, or scissors?").lower().strip() # Finally, time to compare! But we have to account for cheaters/mistakes. if player_choice not in choices: print("You must choose r, p, or s!") else: # Here we make the computer choose, then compare the two. computer_choice = random.choice(choices) if beats[player_choice] == computer_choice print("You win!") else: print("You lose") """ This program works, but it's quite limited. Probably its biggest limitation is that it will only run once. It also doesn't keep score—why would it, since it only runs once? Our next version of the game will make it more user-friendly. """
# A simple rock, paper, scissors script submitted as a demo of easy game-making # In Python # Certain parts of this program are functional—that is, written in functions that # work together. Some parts aren't. As we improve the program, you'll find that # This functional way of doing things has some real advantages. import random # We need thr random module for the computer to play # This dictionary relates a choice to what it defeats for easy comparison later. beats = { "rock":"scissors", "paper":"rock", "scissors":"paper" } # Now we make an easy-to-use list of choices from the beats choices = list(beats.keys()) # Get the player choice from the input command. # The lower() is used to correct for people typing with capitals. # The strip() removes any trailing space that might mess us up. player_choice = input("Rock, paper, or scissors?").lower().strip() # Finally, time to compare! But we have to account for cheaters/mistakes. if player_choice not in choices: print("You must choose r, p, or s!") else: # Here we make the computer choose, then compare the two. computer_choice = random.choice(choices) if beats[player_choice] == computer_choice print("You win!") else: print("You lose") """ This program works, but it's quite limited. Probably its biggest limitation is that it will only run once. It also doesn't keep score—why would it, since it only runs once? Our next version of the game will make it more user-friendly. """ Refactor out functions from rps1""" A simple rock, paper, scissors script submitted as a demo of easy game-making In Python. """ import random # We need thr random module for the computer to play # This dictionary relates a choice to what it defeats for easy comparison later. beats = { "rock":"scissors", "paper":"rock", "scissors":"paper" } # Now we make an easy-to-use list of choices from the beats choices = list(beats.keys()) # Get the player choice from the input command. # The lower() is used to correct for people typing with capitals. # The strip() removes any trailing space that might mess us up. player_choice = input("Rock, paper, or scissors?").lower().strip() # Finally, time to compare! But we have to account for cheaters/mistakes. if player_choice not in choices: print("You must choose r, p, or s!") else: # Here we make the computer choose, then compare the two. computer_choice = random.choice(choices) if beats[player_choice] == computer_choice print("You win!") else: print("You lose") """ This program works, but it's quite limited. Probably its biggest limitation is that it will only run once. It also doesn't keep score—why would it, since it only runs once? Our next version of the game will make it more user-friendly. """
<commit_before># A simple rock, paper, scissors script submitted as a demo of easy game-making # In Python # Certain parts of this program are functional—that is, written in functions that # work together. Some parts aren't. As we improve the program, you'll find that # This functional way of doing things has some real advantages. import random # We need thr random module for the computer to play # This dictionary relates a choice to what it defeats for easy comparison later. beats = { "rock":"scissors", "paper":"rock", "scissors":"paper" } # Now we make an easy-to-use list of choices from the beats choices = list(beats.keys()) # Get the player choice from the input command. # The lower() is used to correct for people typing with capitals. # The strip() removes any trailing space that might mess us up. player_choice = input("Rock, paper, or scissors?").lower().strip() # Finally, time to compare! But we have to account for cheaters/mistakes. if player_choice not in choices: print("You must choose r, p, or s!") else: # Here we make the computer choose, then compare the two. computer_choice = random.choice(choices) if beats[player_choice] == computer_choice print("You win!") else: print("You lose") """ This program works, but it's quite limited. Probably its biggest limitation is that it will only run once. It also doesn't keep score—why would it, since it only runs once? Our next version of the game will make it more user-friendly. """ <commit_msg>Refactor out functions from rps1<commit_after>""" A simple rock, paper, scissors script submitted as a demo of easy game-making In Python. """ import random # We need thr random module for the computer to play # This dictionary relates a choice to what it defeats for easy comparison later. beats = { "rock":"scissors", "paper":"rock", "scissors":"paper" } # Now we make an easy-to-use list of choices from the beats choices = list(beats.keys()) # Get the player choice from the input command. # The lower() is used to correct for people typing with capitals. # The strip() removes any trailing space that might mess us up. player_choice = input("Rock, paper, or scissors?").lower().strip() # Finally, time to compare! But we have to account for cheaters/mistakes. if player_choice not in choices: print("You must choose r, p, or s!") else: # Here we make the computer choose, then compare the two. computer_choice = random.choice(choices) if beats[player_choice] == computer_choice print("You win!") else: print("You lose") """ This program works, but it's quite limited. Probably its biggest limitation is that it will only run once. It also doesn't keep score—why would it, since it only runs once? Our next version of the game will make it more user-friendly. """
e16b2de7dd7c6e0df100bba08d3a7465bbbb4424
tests/test_service.py
tests/test_service.py
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives import serialization, hashes import requests import base64 import unittest import os class TestPosieService(unittest.TestCase): POSIE_URL = os.getenv('POSIE_URL', 'http://127.0.0.1:5000') key_url = "{}/key".format(POSIE_URL) import_url = "{}/decrypt".format(POSIE_URL) public_key = "" def setUp(self): # Load public der key from http endpoint r = requests.get(self.key_url) key_string = base64.b64decode(r.text) self.public_key = serialization.load_der_public_key( key_string, backend=default_backend() ) def send_message(self, message): ciphertext = self.public_key.encrypt( message, padding.OAEP( mgf=padding.MGF1(algorithm=hashes.SHA1()), algorithm=hashes.SHA1(), label=None ) ) # Ask posie to decode message r = requests.post(self.import_url, data=base64.b64encode(ciphertext)) return r def test_decrypt_fail_sends_400(self): # Ask posie to decode message r = requests.post(self.import_url, data='rubbish') self.assertEqual(r.status_code, 400) def test_no_content_sends_400(self): # Ask posie to decode message r = requests.post(self.import_url, data='') self.assertEqual(r.status_code, 400) def test_decrypts_message(self): # Encrypt a message with the key message = b"Some encrypted message" # Ask posie to decode message r = self.send_message(message) # Compare to bytestring version of decrypted data self.assertEqual(str.encode(r.text), message)
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization import base64 import unittest import sys import os sys.path.append(os.path.abspath('../server.py')) import server class TestPosieService(unittest.TestCase): def test_key_generation(self): # Load public der key from http endpoint key_string = base64.b64decode(server.key()) public_key = serialization.load_der_public_key( key_string, backend=default_backend() ) self.assertIsNotNone(public_key)
Remove requests and drop external tests (now in integration)
Remove requests and drop external tests (now in integration)
Python
mit
ONSdigital/edcdi
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives import serialization, hashes import requests import base64 import unittest import os class TestPosieService(unittest.TestCase): POSIE_URL = os.getenv('POSIE_URL', 'http://127.0.0.1:5000') key_url = "{}/key".format(POSIE_URL) import_url = "{}/decrypt".format(POSIE_URL) public_key = "" def setUp(self): # Load public der key from http endpoint r = requests.get(self.key_url) key_string = base64.b64decode(r.text) self.public_key = serialization.load_der_public_key( key_string, backend=default_backend() ) def send_message(self, message): ciphertext = self.public_key.encrypt( message, padding.OAEP( mgf=padding.MGF1(algorithm=hashes.SHA1()), algorithm=hashes.SHA1(), label=None ) ) # Ask posie to decode message r = requests.post(self.import_url, data=base64.b64encode(ciphertext)) return r def test_decrypt_fail_sends_400(self): # Ask posie to decode message r = requests.post(self.import_url, data='rubbish') self.assertEqual(r.status_code, 400) def test_no_content_sends_400(self): # Ask posie to decode message r = requests.post(self.import_url, data='') self.assertEqual(r.status_code, 400) def test_decrypts_message(self): # Encrypt a message with the key message = b"Some encrypted message" # Ask posie to decode message r = self.send_message(message) # Compare to bytestring version of decrypted data self.assertEqual(str.encode(r.text), message) Remove requests and drop external tests (now in integration)
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization import base64 import unittest import sys import os sys.path.append(os.path.abspath('../server.py')) import server class TestPosieService(unittest.TestCase): def test_key_generation(self): # Load public der key from http endpoint key_string = base64.b64decode(server.key()) public_key = serialization.load_der_public_key( key_string, backend=default_backend() ) self.assertIsNotNone(public_key)
<commit_before>from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives import serialization, hashes import requests import base64 import unittest import os class TestPosieService(unittest.TestCase): POSIE_URL = os.getenv('POSIE_URL', 'http://127.0.0.1:5000') key_url = "{}/key".format(POSIE_URL) import_url = "{}/decrypt".format(POSIE_URL) public_key = "" def setUp(self): # Load public der key from http endpoint r = requests.get(self.key_url) key_string = base64.b64decode(r.text) self.public_key = serialization.load_der_public_key( key_string, backend=default_backend() ) def send_message(self, message): ciphertext = self.public_key.encrypt( message, padding.OAEP( mgf=padding.MGF1(algorithm=hashes.SHA1()), algorithm=hashes.SHA1(), label=None ) ) # Ask posie to decode message r = requests.post(self.import_url, data=base64.b64encode(ciphertext)) return r def test_decrypt_fail_sends_400(self): # Ask posie to decode message r = requests.post(self.import_url, data='rubbish') self.assertEqual(r.status_code, 400) def test_no_content_sends_400(self): # Ask posie to decode message r = requests.post(self.import_url, data='') self.assertEqual(r.status_code, 400) def test_decrypts_message(self): # Encrypt a message with the key message = b"Some encrypted message" # Ask posie to decode message r = self.send_message(message) # Compare to bytestring version of decrypted data self.assertEqual(str.encode(r.text), message) <commit_msg>Remove requests and drop external tests (now in integration)<commit_after>
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization import base64 import unittest import sys import os sys.path.append(os.path.abspath('../server.py')) import server class TestPosieService(unittest.TestCase): def test_key_generation(self): # Load public der key from http endpoint key_string = base64.b64decode(server.key()) public_key = serialization.load_der_public_key( key_string, backend=default_backend() ) self.assertIsNotNone(public_key)
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives import serialization, hashes import requests import base64 import unittest import os class TestPosieService(unittest.TestCase): POSIE_URL = os.getenv('POSIE_URL', 'http://127.0.0.1:5000') key_url = "{}/key".format(POSIE_URL) import_url = "{}/decrypt".format(POSIE_URL) public_key = "" def setUp(self): # Load public der key from http endpoint r = requests.get(self.key_url) key_string = base64.b64decode(r.text) self.public_key = serialization.load_der_public_key( key_string, backend=default_backend() ) def send_message(self, message): ciphertext = self.public_key.encrypt( message, padding.OAEP( mgf=padding.MGF1(algorithm=hashes.SHA1()), algorithm=hashes.SHA1(), label=None ) ) # Ask posie to decode message r = requests.post(self.import_url, data=base64.b64encode(ciphertext)) return r def test_decrypt_fail_sends_400(self): # Ask posie to decode message r = requests.post(self.import_url, data='rubbish') self.assertEqual(r.status_code, 400) def test_no_content_sends_400(self): # Ask posie to decode message r = requests.post(self.import_url, data='') self.assertEqual(r.status_code, 400) def test_decrypts_message(self): # Encrypt a message with the key message = b"Some encrypted message" # Ask posie to decode message r = self.send_message(message) # Compare to bytestring version of decrypted data self.assertEqual(str.encode(r.text), message) Remove requests and drop external tests (now in integration)from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization import base64 import unittest import sys import os sys.path.append(os.path.abspath('../server.py')) import server class TestPosieService(unittest.TestCase): def test_key_generation(self): # Load public der key from http endpoint key_string = base64.b64decode(server.key()) public_key = serialization.load_der_public_key( key_string, backend=default_backend() ) self.assertIsNotNone(public_key)
<commit_before>from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives import serialization, hashes import requests import base64 import unittest import os class TestPosieService(unittest.TestCase): POSIE_URL = os.getenv('POSIE_URL', 'http://127.0.0.1:5000') key_url = "{}/key".format(POSIE_URL) import_url = "{}/decrypt".format(POSIE_URL) public_key = "" def setUp(self): # Load public der key from http endpoint r = requests.get(self.key_url) key_string = base64.b64decode(r.text) self.public_key = serialization.load_der_public_key( key_string, backend=default_backend() ) def send_message(self, message): ciphertext = self.public_key.encrypt( message, padding.OAEP( mgf=padding.MGF1(algorithm=hashes.SHA1()), algorithm=hashes.SHA1(), label=None ) ) # Ask posie to decode message r = requests.post(self.import_url, data=base64.b64encode(ciphertext)) return r def test_decrypt_fail_sends_400(self): # Ask posie to decode message r = requests.post(self.import_url, data='rubbish') self.assertEqual(r.status_code, 400) def test_no_content_sends_400(self): # Ask posie to decode message r = requests.post(self.import_url, data='') self.assertEqual(r.status_code, 400) def test_decrypts_message(self): # Encrypt a message with the key message = b"Some encrypted message" # Ask posie to decode message r = self.send_message(message) # Compare to bytestring version of decrypted data self.assertEqual(str.encode(r.text), message) <commit_msg>Remove requests and drop external tests (now in integration)<commit_after>from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization import base64 import unittest import sys import os sys.path.append(os.path.abspath('../server.py')) import server class TestPosieService(unittest.TestCase): def test_key_generation(self): # Load public der key from http endpoint key_string = base64.b64decode(server.key()) public_key = serialization.load_der_public_key( key_string, backend=default_backend() ) self.assertIsNotNone(public_key)
461019099c41ca4ef2fc7ccfec0141ed5b7e3bd6
tests/test_unicode.py
tests/test_unicode.py
# coding: utf-8 import sys import pytest import jupytext from .utils import list_all_notebooks @pytest.mark.parametrize('nb_file', list_all_notebooks('.ipynb') + list_all_notebooks('.Rmd')) def test_notebook_contents_is_unicode(nb_file): nb = jupytext.readf(nb_file) for cell in nb.cells: if sys.version_info < (3, 0): assert cell.source == '' or isinstance(cell.source, unicode) else: assert isinstance(cell.source, str) def test_write_non_ascii(tmpdir): nb = jupytext.reads(u'Non-ascii contênt', ext='.Rmd') jupytext.writef(nb, str(tmpdir.join('notebook.Rmd'))) jupytext.writef(nb, str(tmpdir.join('notebook.ipynb')))
# coding: utf-8 import sys import pytest import jupytext from .utils import list_all_notebooks try: unicode # Python 2 except NameError: unicode = str # Python 3 @pytest.mark.parametrize('nb_file', list_all_notebooks('.ipynb') + list_all_notebooks('.Rmd')) def test_notebook_contents_is_unicode(nb_file): nb = jupytext.readf(nb_file) for cell in nb.cells: if sys.version_info < (3, 0): assert cell.source == '' or isinstance(cell.source, unicode) else: assert isinstance(cell.source, str) def test_write_non_ascii(tmpdir): nb = jupytext.reads(u'Non-ascii contênt', ext='.Rmd') jupytext.writef(nb, str(tmpdir.join('notebook.Rmd'))) jupytext.writef(nb, str(tmpdir.join('notebook.ipynb')))
Define unicode in Python 3
Define unicode in Python 3 __unicode__ was removed in Python 3 because all __str__ are Unicode. [flake8](http://flake8.pycqa.org) testing of https://github.com/mwouts/jupytext on Python 3.7.0 $ __flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics__ ``` ./.jupyter/jupyter_notebook_config.py:1:1: F821 undefined name 'c' c.NotebookApp.contents_manager_class = 'jupytext.TextFileContentsManager' ^ ./tests/test_unicode.py:15:65: F821 undefined name 'unicode' assert cell.source == '' or isinstance(cell.source, unicode) ^ ./tests/mirror/jupyter_again.py:32:1: E999 SyntaxError: invalid syntax ?next ^ 1 E999 SyntaxError: invalid syntax 2 F821 undefined name 'c' 3 ```
Python
mit
mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext
# coding: utf-8 import sys import pytest import jupytext from .utils import list_all_notebooks @pytest.mark.parametrize('nb_file', list_all_notebooks('.ipynb') + list_all_notebooks('.Rmd')) def test_notebook_contents_is_unicode(nb_file): nb = jupytext.readf(nb_file) for cell in nb.cells: if sys.version_info < (3, 0): assert cell.source == '' or isinstance(cell.source, unicode) else: assert isinstance(cell.source, str) def test_write_non_ascii(tmpdir): nb = jupytext.reads(u'Non-ascii contênt', ext='.Rmd') jupytext.writef(nb, str(tmpdir.join('notebook.Rmd'))) jupytext.writef(nb, str(tmpdir.join('notebook.ipynb'))) Define unicode in Python 3 __unicode__ was removed in Python 3 because all __str__ are Unicode. [flake8](http://flake8.pycqa.org) testing of https://github.com/mwouts/jupytext on Python 3.7.0 $ __flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics__ ``` ./.jupyter/jupyter_notebook_config.py:1:1: F821 undefined name 'c' c.NotebookApp.contents_manager_class = 'jupytext.TextFileContentsManager' ^ ./tests/test_unicode.py:15:65: F821 undefined name 'unicode' assert cell.source == '' or isinstance(cell.source, unicode) ^ ./tests/mirror/jupyter_again.py:32:1: E999 SyntaxError: invalid syntax ?next ^ 1 E999 SyntaxError: invalid syntax 2 F821 undefined name 'c' 3 ```
# coding: utf-8 import sys import pytest import jupytext from .utils import list_all_notebooks try: unicode # Python 2 except NameError: unicode = str # Python 3 @pytest.mark.parametrize('nb_file', list_all_notebooks('.ipynb') + list_all_notebooks('.Rmd')) def test_notebook_contents_is_unicode(nb_file): nb = jupytext.readf(nb_file) for cell in nb.cells: if sys.version_info < (3, 0): assert cell.source == '' or isinstance(cell.source, unicode) else: assert isinstance(cell.source, str) def test_write_non_ascii(tmpdir): nb = jupytext.reads(u'Non-ascii contênt', ext='.Rmd') jupytext.writef(nb, str(tmpdir.join('notebook.Rmd'))) jupytext.writef(nb, str(tmpdir.join('notebook.ipynb')))
<commit_before># coding: utf-8 import sys import pytest import jupytext from .utils import list_all_notebooks @pytest.mark.parametrize('nb_file', list_all_notebooks('.ipynb') + list_all_notebooks('.Rmd')) def test_notebook_contents_is_unicode(nb_file): nb = jupytext.readf(nb_file) for cell in nb.cells: if sys.version_info < (3, 0): assert cell.source == '' or isinstance(cell.source, unicode) else: assert isinstance(cell.source, str) def test_write_non_ascii(tmpdir): nb = jupytext.reads(u'Non-ascii contênt', ext='.Rmd') jupytext.writef(nb, str(tmpdir.join('notebook.Rmd'))) jupytext.writef(nb, str(tmpdir.join('notebook.ipynb'))) <commit_msg>Define unicode in Python 3 __unicode__ was removed in Python 3 because all __str__ are Unicode. [flake8](http://flake8.pycqa.org) testing of https://github.com/mwouts/jupytext on Python 3.7.0 $ __flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics__ ``` ./.jupyter/jupyter_notebook_config.py:1:1: F821 undefined name 'c' c.NotebookApp.contents_manager_class = 'jupytext.TextFileContentsManager' ^ ./tests/test_unicode.py:15:65: F821 undefined name 'unicode' assert cell.source == '' or isinstance(cell.source, unicode) ^ ./tests/mirror/jupyter_again.py:32:1: E999 SyntaxError: invalid syntax ?next ^ 1 E999 SyntaxError: invalid syntax 2 F821 undefined name 'c' 3 ```<commit_after>
# coding: utf-8 import sys import pytest import jupytext from .utils import list_all_notebooks try: unicode # Python 2 except NameError: unicode = str # Python 3 @pytest.mark.parametrize('nb_file', list_all_notebooks('.ipynb') + list_all_notebooks('.Rmd')) def test_notebook_contents_is_unicode(nb_file): nb = jupytext.readf(nb_file) for cell in nb.cells: if sys.version_info < (3, 0): assert cell.source == '' or isinstance(cell.source, unicode) else: assert isinstance(cell.source, str) def test_write_non_ascii(tmpdir): nb = jupytext.reads(u'Non-ascii contênt', ext='.Rmd') jupytext.writef(nb, str(tmpdir.join('notebook.Rmd'))) jupytext.writef(nb, str(tmpdir.join('notebook.ipynb')))
# coding: utf-8 import sys import pytest import jupytext from .utils import list_all_notebooks @pytest.mark.parametrize('nb_file', list_all_notebooks('.ipynb') + list_all_notebooks('.Rmd')) def test_notebook_contents_is_unicode(nb_file): nb = jupytext.readf(nb_file) for cell in nb.cells: if sys.version_info < (3, 0): assert cell.source == '' or isinstance(cell.source, unicode) else: assert isinstance(cell.source, str) def test_write_non_ascii(tmpdir): nb = jupytext.reads(u'Non-ascii contênt', ext='.Rmd') jupytext.writef(nb, str(tmpdir.join('notebook.Rmd'))) jupytext.writef(nb, str(tmpdir.join('notebook.ipynb'))) Define unicode in Python 3 __unicode__ was removed in Python 3 because all __str__ are Unicode. [flake8](http://flake8.pycqa.org) testing of https://github.com/mwouts/jupytext on Python 3.7.0 $ __flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics__ ``` ./.jupyter/jupyter_notebook_config.py:1:1: F821 undefined name 'c' c.NotebookApp.contents_manager_class = 'jupytext.TextFileContentsManager' ^ ./tests/test_unicode.py:15:65: F821 undefined name 'unicode' assert cell.source == '' or isinstance(cell.source, unicode) ^ ./tests/mirror/jupyter_again.py:32:1: E999 SyntaxError: invalid syntax ?next ^ 1 E999 SyntaxError: invalid syntax 2 F821 undefined name 'c' 3 ```# coding: utf-8 import sys import pytest import jupytext from .utils import list_all_notebooks try: unicode # Python 2 except NameError: unicode = str # Python 3 @pytest.mark.parametrize('nb_file', list_all_notebooks('.ipynb') + list_all_notebooks('.Rmd')) def test_notebook_contents_is_unicode(nb_file): nb = jupytext.readf(nb_file) for cell in nb.cells: if sys.version_info < (3, 0): assert cell.source == '' or isinstance(cell.source, unicode) else: assert isinstance(cell.source, str) def test_write_non_ascii(tmpdir): nb = jupytext.reads(u'Non-ascii contênt', ext='.Rmd') jupytext.writef(nb, str(tmpdir.join('notebook.Rmd'))) jupytext.writef(nb, str(tmpdir.join('notebook.ipynb')))
<commit_before># coding: utf-8 import sys import pytest import jupytext from .utils import list_all_notebooks @pytest.mark.parametrize('nb_file', list_all_notebooks('.ipynb') + list_all_notebooks('.Rmd')) def test_notebook_contents_is_unicode(nb_file): nb = jupytext.readf(nb_file) for cell in nb.cells: if sys.version_info < (3, 0): assert cell.source == '' or isinstance(cell.source, unicode) else: assert isinstance(cell.source, str) def test_write_non_ascii(tmpdir): nb = jupytext.reads(u'Non-ascii contênt', ext='.Rmd') jupytext.writef(nb, str(tmpdir.join('notebook.Rmd'))) jupytext.writef(nb, str(tmpdir.join('notebook.ipynb'))) <commit_msg>Define unicode in Python 3 __unicode__ was removed in Python 3 because all __str__ are Unicode. [flake8](http://flake8.pycqa.org) testing of https://github.com/mwouts/jupytext on Python 3.7.0 $ __flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics__ ``` ./.jupyter/jupyter_notebook_config.py:1:1: F821 undefined name 'c' c.NotebookApp.contents_manager_class = 'jupytext.TextFileContentsManager' ^ ./tests/test_unicode.py:15:65: F821 undefined name 'unicode' assert cell.source == '' or isinstance(cell.source, unicode) ^ ./tests/mirror/jupyter_again.py:32:1: E999 SyntaxError: invalid syntax ?next ^ 1 E999 SyntaxError: invalid syntax 2 F821 undefined name 'c' 3 ```<commit_after># coding: utf-8 import sys import pytest import jupytext from .utils import list_all_notebooks try: unicode # Python 2 except NameError: unicode = str # Python 3 @pytest.mark.parametrize('nb_file', list_all_notebooks('.ipynb') + list_all_notebooks('.Rmd')) def test_notebook_contents_is_unicode(nb_file): nb = jupytext.readf(nb_file) for cell in nb.cells: if sys.version_info < (3, 0): assert cell.source == '' or isinstance(cell.source, unicode) else: assert isinstance(cell.source, str) def test_write_non_ascii(tmpdir): nb = jupytext.reads(u'Non-ascii contênt', ext='.Rmd') jupytext.writef(nb, str(tmpdir.join('notebook.Rmd'))) jupytext.writef(nb, str(tmpdir.join('notebook.ipynb')))
eb893151d12f81f1ebe388f0b4ae650aa6f6552c
ticketing/__init__.py
ticketing/__init__.py
""" Ticketing ~~~~~~~~~ """ try: VERSION = __import__('pkg_resources') \ .get_distribution('django-ticketing').version except Exception, e: VERSION = 'unknown'
""" Ticketing ~~~~~~~~~ """ VERSION = (0, 6, 0, 'final', 0)
Change the version string so it doesn't cause any errors.
Change the version string so it doesn't cause any errors.
Python
mit
streeter/django-ticketing
""" Ticketing ~~~~~~~~~ """ try: VERSION = __import__('pkg_resources') \ .get_distribution('django-ticketing').version except Exception, e: VERSION = 'unknown' Change the version string so it doesn't cause any errors.
""" Ticketing ~~~~~~~~~ """ VERSION = (0, 6, 0, 'final', 0)
<commit_before>""" Ticketing ~~~~~~~~~ """ try: VERSION = __import__('pkg_resources') \ .get_distribution('django-ticketing').version except Exception, e: VERSION = 'unknown' <commit_msg>Change the version string so it doesn't cause any errors.<commit_after>
""" Ticketing ~~~~~~~~~ """ VERSION = (0, 6, 0, 'final', 0)
""" Ticketing ~~~~~~~~~ """ try: VERSION = __import__('pkg_resources') \ .get_distribution('django-ticketing').version except Exception, e: VERSION = 'unknown' Change the version string so it doesn't cause any errors.""" Ticketing ~~~~~~~~~ """ VERSION = (0, 6, 0, 'final', 0)
<commit_before>""" Ticketing ~~~~~~~~~ """ try: VERSION = __import__('pkg_resources') \ .get_distribution('django-ticketing').version except Exception, e: VERSION = 'unknown' <commit_msg>Change the version string so it doesn't cause any errors.<commit_after>""" Ticketing ~~~~~~~~~ """ VERSION = (0, 6, 0, 'final', 0)
4f98c8ff8ef724b65106a040ffaf67800dff1611
animations.py
animations.py
""" All animators should be functions which take the input time as a number from 0 to 1 and return a new value for the property they are animating. For numerical properties, this should also be normalized to a [0, 1] scale, allowing for the possibility of a [-1, 1] scale as well. """ def Linear(start, finish): def linear_animator(dt): print 'Linear Animator' print start, finish, dt, (finish-start)*(dt)+start return (finish-start)*(dt)+start return linear_animator
""" All animators should be functions which take the input time as a number from 0 to 1 and return a new value for the property they are animating. For numerical properties, this should also be normalized to a [0, 1] scale, allowing for the possibility of a [-1, 1] scale as well. """ def Linear(start, finish): def linear_animator(dt): return (finish-start)*(dt)+start return linear_animator
Remove some debugging print statements that slipped through.
Remove some debugging print statements that slipped through.
Python
lgpl-2.1
platipy/spyral
""" All animators should be functions which take the input time as a number from 0 to 1 and return a new value for the property they are animating. For numerical properties, this should also be normalized to a [0, 1] scale, allowing for the possibility of a [-1, 1] scale as well. """ def Linear(start, finish): def linear_animator(dt): print 'Linear Animator' print start, finish, dt, (finish-start)*(dt)+start return (finish-start)*(dt)+start return linear_animatorRemove some debugging print statements that slipped through.
""" All animators should be functions which take the input time as a number from 0 to 1 and return a new value for the property they are animating. For numerical properties, this should also be normalized to a [0, 1] scale, allowing for the possibility of a [-1, 1] scale as well. """ def Linear(start, finish): def linear_animator(dt): return (finish-start)*(dt)+start return linear_animator
<commit_before>""" All animators should be functions which take the input time as a number from 0 to 1 and return a new value for the property they are animating. For numerical properties, this should also be normalized to a [0, 1] scale, allowing for the possibility of a [-1, 1] scale as well. """ def Linear(start, finish): def linear_animator(dt): print 'Linear Animator' print start, finish, dt, (finish-start)*(dt)+start return (finish-start)*(dt)+start return linear_animator<commit_msg>Remove some debugging print statements that slipped through.<commit_after>
""" All animators should be functions which take the input time as a number from 0 to 1 and return a new value for the property they are animating. For numerical properties, this should also be normalized to a [0, 1] scale, allowing for the possibility of a [-1, 1] scale as well. """ def Linear(start, finish): def linear_animator(dt): return (finish-start)*(dt)+start return linear_animator
""" All animators should be functions which take the input time as a number from 0 to 1 and return a new value for the property they are animating. For numerical properties, this should also be normalized to a [0, 1] scale, allowing for the possibility of a [-1, 1] scale as well. """ def Linear(start, finish): def linear_animator(dt): print 'Linear Animator' print start, finish, dt, (finish-start)*(dt)+start return (finish-start)*(dt)+start return linear_animatorRemove some debugging print statements that slipped through.""" All animators should be functions which take the input time as a number from 0 to 1 and return a new value for the property they are animating. For numerical properties, this should also be normalized to a [0, 1] scale, allowing for the possibility of a [-1, 1] scale as well. """ def Linear(start, finish): def linear_animator(dt): return (finish-start)*(dt)+start return linear_animator
<commit_before>""" All animators should be functions which take the input time as a number from 0 to 1 and return a new value for the property they are animating. For numerical properties, this should also be normalized to a [0, 1] scale, allowing for the possibility of a [-1, 1] scale as well. """ def Linear(start, finish): def linear_animator(dt): print 'Linear Animator' print start, finish, dt, (finish-start)*(dt)+start return (finish-start)*(dt)+start return linear_animator<commit_msg>Remove some debugging print statements that slipped through.<commit_after>""" All animators should be functions which take the input time as a number from 0 to 1 and return a new value for the property they are animating. For numerical properties, this should also be normalized to a [0, 1] scale, allowing for the possibility of a [-1, 1] scale as well. """ def Linear(start, finish): def linear_animator(dt): return (finish-start)*(dt)+start return linear_animator
c370eb048aa6fe6e64f9cd738717d1deaccf8b2f
modules/pipestrconcat.py
modules/pipestrconcat.py
# pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: pass #todo get from _INPUT e.g {u'type': u'text', u'subkey': u'severity'} else: s += util.get_value(part, kwargs) while True: yield s
# pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: s += _INPUT[part['subkey']] else: s += util.get_value(part, kwargs) while True: yield s
Handle subkey for submodule, in strconcat at least
Handle subkey for submodule, in strconcat at least
Python
mit
nerevu/riko,nerevu/riko
# pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: pass #todo get from _INPUT e.g {u'type': u'text', u'subkey': u'severity'} else: s += util.get_value(part, kwargs) while True: yield s Handle subkey for submodule, in strconcat at least
# pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: s += _INPUT[part['subkey']] else: s += util.get_value(part, kwargs) while True: yield s
<commit_before># pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: pass #todo get from _INPUT e.g {u'type': u'text', u'subkey': u'severity'} else: s += util.get_value(part, kwargs) while True: yield s <commit_msg>Handle subkey for submodule, in strconcat at least<commit_after>
# pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: s += _INPUT[part['subkey']] else: s += util.get_value(part, kwargs) while True: yield s
# pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: pass #todo get from _INPUT e.g {u'type': u'text', u'subkey': u'severity'} else: s += util.get_value(part, kwargs) while True: yield s Handle subkey for submodule, in strconcat at least# pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: s += _INPUT[part['subkey']] else: s += util.get_value(part, kwargs) while True: yield s
<commit_before># pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: pass #todo get from _INPUT e.g {u'type': u'text', u'subkey': u'severity'} else: s += util.get_value(part, kwargs) while True: yield s <commit_msg>Handle subkey for submodule, in strconcat at least<commit_after># pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: s += _INPUT[part['subkey']] else: s += util.get_value(part, kwargs) while True: yield s
94ae82e8a2915c6c7d353d03aa363ae687805344
testing/models/test_proposal.py
testing/models/test_proposal.py
import pytest try: from unittest import mock except ImportError: import mock from k2catalogue import models @pytest.fixture def proposal(): return models.Proposal(proposal_id='abc', pi='pi', title='title', pdf_url='pdf_url') def test_proposal_printing(proposal): assert repr(proposal) == '<Proposal: abc>' def test_proposal(): proposals = models.Proposal.create(['abc', 'def'], campaign=mock.MagicMock(), proposal_mapping=mock.MagicMock()) assert (proposals[0].proposal_id == 'abc' and proposals[1].proposal_id == 'def') @pytest.mark.parametrize('input,expected', [ ('GO2069_LC', True), ('G', False), ('LC_2007JJ43_TILE', False), ]) def test_valid_proposal(input, expected): assert models.Proposal.valid_proposal(input) == expected
import pytest try: from unittest import mock except ImportError: import mock from k2catalogue import models @pytest.fixture def proposal(): return models.Proposal(proposal_id='abc', pi='pi', title='title', pdf_url='pdf_url') def test_proposal_printing(proposal): assert repr(proposal) == '<Proposal: abc>' def test_proposal(): proposals = models.Proposal.create(['abc', 'def'], campaign=mock.MagicMock(), proposal_mapping=mock.MagicMock()) assert (proposals[0].proposal_id == 'abc' and proposals[1].proposal_id == 'def') def test_open_proposals_page(proposal): with mock.patch.object(proposal, 'campaign') as campaign: proposal.open_proposals_page() campaign.open_proposals_page.assert_called_once_with() def test_open_proposal(proposal): with mock.patch('k2catalogue.models.webbrowser.open') as mock_open: proposal.open_proposal() mock_open.assert_called_once_with('pdf_url') def test_open_proposal_without_url(proposal): proposal.pdf_url = None with mock.patch('k2catalogue.models.webbrowser.open') as mock_open: proposal.open_proposal() assert not mock_open.called def test_create_with_no_mapping(capsys): proposal_mapping = {} campaign = mock.Mock() proposal_ids = ['abc', ] models.Proposal.create(proposal_ids, campaign, proposal_mapping) out, err = capsys.readouterr() assert err == 'No proposal metadata for abc\n' @pytest.mark.parametrize('input,expected', [ ('GO2069_LC', True), ('G', False), ('LC_2007JJ43_TILE', False), ]) def test_valid_proposal(input, expected): assert models.Proposal.valid_proposal(input) == expected
Add more tests around the proposal model behaviour
Add more tests around the proposal model behaviour
Python
mit
mindriot101/k2catalogue
import pytest try: from unittest import mock except ImportError: import mock from k2catalogue import models @pytest.fixture def proposal(): return models.Proposal(proposal_id='abc', pi='pi', title='title', pdf_url='pdf_url') def test_proposal_printing(proposal): assert repr(proposal) == '<Proposal: abc>' def test_proposal(): proposals = models.Proposal.create(['abc', 'def'], campaign=mock.MagicMock(), proposal_mapping=mock.MagicMock()) assert (proposals[0].proposal_id == 'abc' and proposals[1].proposal_id == 'def') @pytest.mark.parametrize('input,expected', [ ('GO2069_LC', True), ('G', False), ('LC_2007JJ43_TILE', False), ]) def test_valid_proposal(input, expected): assert models.Proposal.valid_proposal(input) == expected Add more tests around the proposal model behaviour
import pytest try: from unittest import mock except ImportError: import mock from k2catalogue import models @pytest.fixture def proposal(): return models.Proposal(proposal_id='abc', pi='pi', title='title', pdf_url='pdf_url') def test_proposal_printing(proposal): assert repr(proposal) == '<Proposal: abc>' def test_proposal(): proposals = models.Proposal.create(['abc', 'def'], campaign=mock.MagicMock(), proposal_mapping=mock.MagicMock()) assert (proposals[0].proposal_id == 'abc' and proposals[1].proposal_id == 'def') def test_open_proposals_page(proposal): with mock.patch.object(proposal, 'campaign') as campaign: proposal.open_proposals_page() campaign.open_proposals_page.assert_called_once_with() def test_open_proposal(proposal): with mock.patch('k2catalogue.models.webbrowser.open') as mock_open: proposal.open_proposal() mock_open.assert_called_once_with('pdf_url') def test_open_proposal_without_url(proposal): proposal.pdf_url = None with mock.patch('k2catalogue.models.webbrowser.open') as mock_open: proposal.open_proposal() assert not mock_open.called def test_create_with_no_mapping(capsys): proposal_mapping = {} campaign = mock.Mock() proposal_ids = ['abc', ] models.Proposal.create(proposal_ids, campaign, proposal_mapping) out, err = capsys.readouterr() assert err == 'No proposal metadata for abc\n' @pytest.mark.parametrize('input,expected', [ ('GO2069_LC', True), ('G', False), ('LC_2007JJ43_TILE', False), ]) def test_valid_proposal(input, expected): assert models.Proposal.valid_proposal(input) == expected
<commit_before>import pytest try: from unittest import mock except ImportError: import mock from k2catalogue import models @pytest.fixture def proposal(): return models.Proposal(proposal_id='abc', pi='pi', title='title', pdf_url='pdf_url') def test_proposal_printing(proposal): assert repr(proposal) == '<Proposal: abc>' def test_proposal(): proposals = models.Proposal.create(['abc', 'def'], campaign=mock.MagicMock(), proposal_mapping=mock.MagicMock()) assert (proposals[0].proposal_id == 'abc' and proposals[1].proposal_id == 'def') @pytest.mark.parametrize('input,expected', [ ('GO2069_LC', True), ('G', False), ('LC_2007JJ43_TILE', False), ]) def test_valid_proposal(input, expected): assert models.Proposal.valid_proposal(input) == expected <commit_msg>Add more tests around the proposal model behaviour<commit_after>
import pytest try: from unittest import mock except ImportError: import mock from k2catalogue import models @pytest.fixture def proposal(): return models.Proposal(proposal_id='abc', pi='pi', title='title', pdf_url='pdf_url') def test_proposal_printing(proposal): assert repr(proposal) == '<Proposal: abc>' def test_proposal(): proposals = models.Proposal.create(['abc', 'def'], campaign=mock.MagicMock(), proposal_mapping=mock.MagicMock()) assert (proposals[0].proposal_id == 'abc' and proposals[1].proposal_id == 'def') def test_open_proposals_page(proposal): with mock.patch.object(proposal, 'campaign') as campaign: proposal.open_proposals_page() campaign.open_proposals_page.assert_called_once_with() def test_open_proposal(proposal): with mock.patch('k2catalogue.models.webbrowser.open') as mock_open: proposal.open_proposal() mock_open.assert_called_once_with('pdf_url') def test_open_proposal_without_url(proposal): proposal.pdf_url = None with mock.patch('k2catalogue.models.webbrowser.open') as mock_open: proposal.open_proposal() assert not mock_open.called def test_create_with_no_mapping(capsys): proposal_mapping = {} campaign = mock.Mock() proposal_ids = ['abc', ] models.Proposal.create(proposal_ids, campaign, proposal_mapping) out, err = capsys.readouterr() assert err == 'No proposal metadata for abc\n' @pytest.mark.parametrize('input,expected', [ ('GO2069_LC', True), ('G', False), ('LC_2007JJ43_TILE', False), ]) def test_valid_proposal(input, expected): assert models.Proposal.valid_proposal(input) == expected
import pytest try: from unittest import mock except ImportError: import mock from k2catalogue import models @pytest.fixture def proposal(): return models.Proposal(proposal_id='abc', pi='pi', title='title', pdf_url='pdf_url') def test_proposal_printing(proposal): assert repr(proposal) == '<Proposal: abc>' def test_proposal(): proposals = models.Proposal.create(['abc', 'def'], campaign=mock.MagicMock(), proposal_mapping=mock.MagicMock()) assert (proposals[0].proposal_id == 'abc' and proposals[1].proposal_id == 'def') @pytest.mark.parametrize('input,expected', [ ('GO2069_LC', True), ('G', False), ('LC_2007JJ43_TILE', False), ]) def test_valid_proposal(input, expected): assert models.Proposal.valid_proposal(input) == expected Add more tests around the proposal model behaviourimport pytest try: from unittest import mock except ImportError: import mock from k2catalogue import models @pytest.fixture def proposal(): return models.Proposal(proposal_id='abc', pi='pi', title='title', pdf_url='pdf_url') def test_proposal_printing(proposal): assert repr(proposal) == '<Proposal: abc>' def test_proposal(): proposals = models.Proposal.create(['abc', 'def'], campaign=mock.MagicMock(), proposal_mapping=mock.MagicMock()) assert (proposals[0].proposal_id == 'abc' and proposals[1].proposal_id == 'def') def test_open_proposals_page(proposal): with mock.patch.object(proposal, 'campaign') as campaign: proposal.open_proposals_page() campaign.open_proposals_page.assert_called_once_with() def test_open_proposal(proposal): with mock.patch('k2catalogue.models.webbrowser.open') as mock_open: proposal.open_proposal() mock_open.assert_called_once_with('pdf_url') def test_open_proposal_without_url(proposal): proposal.pdf_url = None with mock.patch('k2catalogue.models.webbrowser.open') as mock_open: proposal.open_proposal() assert not mock_open.called def test_create_with_no_mapping(capsys): proposal_mapping = {} campaign = mock.Mock() proposal_ids = ['abc', ] models.Proposal.create(proposal_ids, campaign, proposal_mapping) out, err = capsys.readouterr() assert err == 'No proposal metadata for abc\n' @pytest.mark.parametrize('input,expected', [ ('GO2069_LC', True), ('G', False), ('LC_2007JJ43_TILE', False), ]) def test_valid_proposal(input, expected): assert models.Proposal.valid_proposal(input) == expected
<commit_before>import pytest try: from unittest import mock except ImportError: import mock from k2catalogue import models @pytest.fixture def proposal(): return models.Proposal(proposal_id='abc', pi='pi', title='title', pdf_url='pdf_url') def test_proposal_printing(proposal): assert repr(proposal) == '<Proposal: abc>' def test_proposal(): proposals = models.Proposal.create(['abc', 'def'], campaign=mock.MagicMock(), proposal_mapping=mock.MagicMock()) assert (proposals[0].proposal_id == 'abc' and proposals[1].proposal_id == 'def') @pytest.mark.parametrize('input,expected', [ ('GO2069_LC', True), ('G', False), ('LC_2007JJ43_TILE', False), ]) def test_valid_proposal(input, expected): assert models.Proposal.valid_proposal(input) == expected <commit_msg>Add more tests around the proposal model behaviour<commit_after>import pytest try: from unittest import mock except ImportError: import mock from k2catalogue import models @pytest.fixture def proposal(): return models.Proposal(proposal_id='abc', pi='pi', title='title', pdf_url='pdf_url') def test_proposal_printing(proposal): assert repr(proposal) == '<Proposal: abc>' def test_proposal(): proposals = models.Proposal.create(['abc', 'def'], campaign=mock.MagicMock(), proposal_mapping=mock.MagicMock()) assert (proposals[0].proposal_id == 'abc' and proposals[1].proposal_id == 'def') def test_open_proposals_page(proposal): with mock.patch.object(proposal, 'campaign') as campaign: proposal.open_proposals_page() campaign.open_proposals_page.assert_called_once_with() def test_open_proposal(proposal): with mock.patch('k2catalogue.models.webbrowser.open') as mock_open: proposal.open_proposal() mock_open.assert_called_once_with('pdf_url') def test_open_proposal_without_url(proposal): proposal.pdf_url = None with mock.patch('k2catalogue.models.webbrowser.open') as mock_open: proposal.open_proposal() assert not mock_open.called def test_create_with_no_mapping(capsys): proposal_mapping = {} campaign = mock.Mock() proposal_ids = ['abc', ] models.Proposal.create(proposal_ids, campaign, proposal_mapping) out, err = capsys.readouterr() assert err == 'No proposal metadata for abc\n' @pytest.mark.parametrize('input,expected', [ ('GO2069_LC', True), ('G', False), ('LC_2007JJ43_TILE', False), ]) def test_valid_proposal(input, expected): assert models.Proposal.valid_proposal(input) == expected
be871f0c6e027e2e51233600c49a502dc6b9a15b
calaccess_raw/__init__.py
calaccess_raw/__init__.py
import os from django.conf import settings from django.db.models.loading import get_models, get_app default_app_config = 'calaccess_raw.apps.CalAccessRawConfig' def get_download_directory(): """ Returns the download directory where we will store downloaded data. """ if hasattr(settings, 'CALACCESS_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'data') raise ValueError("CAL-ACCESS download directory not configured. Set either \ CALACCESS_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_test_download_directory(): """ Returns the download directory where we will store test data. """ if hasattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'test-data') raise ValueError("CAL-ACCESS test download directory not configured. \ Set either CALACCESS_TEST_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_model_list(): """ Returns a model list with all the data tables in this application """ return get_models(get_app("calaccess_raw"))
import os from django.conf import settings from django.apps import apps as django_apps default_app_config = 'calaccess_raw.apps.CalAccessRawConfig' def get_download_directory(): """ Returns the download directory where we will store downloaded data. """ if hasattr(settings, 'CALACCESS_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'data') raise ValueError("CAL-ACCESS download directory not configured. Set either \ CALACCESS_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_test_download_directory(): """ Returns the download directory where we will store test data. """ if hasattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'test-data') raise ValueError("CAL-ACCESS test download directory not configured. \ Set either CALACCESS_TEST_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_model_list(): """ Returns a model list with all the data tables in this application """ return django_apps.get_app_config('calaccess_raw').get_models()
Update get_model_list to use app config rather than deprecated django.db.models.loading functions
Update get_model_list to use app config rather than deprecated django.db.models.loading functions
Python
mit
dcloud/django-calaccess-raw-data
import os from django.conf import settings from django.db.models.loading import get_models, get_app default_app_config = 'calaccess_raw.apps.CalAccessRawConfig' def get_download_directory(): """ Returns the download directory where we will store downloaded data. """ if hasattr(settings, 'CALACCESS_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'data') raise ValueError("CAL-ACCESS download directory not configured. Set either \ CALACCESS_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_test_download_directory(): """ Returns the download directory where we will store test data. """ if hasattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'test-data') raise ValueError("CAL-ACCESS test download directory not configured. \ Set either CALACCESS_TEST_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_model_list(): """ Returns a model list with all the data tables in this application """ return get_models(get_app("calaccess_raw")) Update get_model_list to use app config rather than deprecated django.db.models.loading functions
import os from django.conf import settings from django.apps import apps as django_apps default_app_config = 'calaccess_raw.apps.CalAccessRawConfig' def get_download_directory(): """ Returns the download directory where we will store downloaded data. """ if hasattr(settings, 'CALACCESS_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'data') raise ValueError("CAL-ACCESS download directory not configured. Set either \ CALACCESS_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_test_download_directory(): """ Returns the download directory where we will store test data. """ if hasattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'test-data') raise ValueError("CAL-ACCESS test download directory not configured. \ Set either CALACCESS_TEST_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_model_list(): """ Returns a model list with all the data tables in this application """ return django_apps.get_app_config('calaccess_raw').get_models()
<commit_before>import os from django.conf import settings from django.db.models.loading import get_models, get_app default_app_config = 'calaccess_raw.apps.CalAccessRawConfig' def get_download_directory(): """ Returns the download directory where we will store downloaded data. """ if hasattr(settings, 'CALACCESS_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'data') raise ValueError("CAL-ACCESS download directory not configured. Set either \ CALACCESS_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_test_download_directory(): """ Returns the download directory where we will store test data. """ if hasattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'test-data') raise ValueError("CAL-ACCESS test download directory not configured. \ Set either CALACCESS_TEST_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_model_list(): """ Returns a model list with all the data tables in this application """ return get_models(get_app("calaccess_raw")) <commit_msg>Update get_model_list to use app config rather than deprecated django.db.models.loading functions<commit_after>
import os from django.conf import settings from django.apps import apps as django_apps default_app_config = 'calaccess_raw.apps.CalAccessRawConfig' def get_download_directory(): """ Returns the download directory where we will store downloaded data. """ if hasattr(settings, 'CALACCESS_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'data') raise ValueError("CAL-ACCESS download directory not configured. Set either \ CALACCESS_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_test_download_directory(): """ Returns the download directory where we will store test data. """ if hasattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'test-data') raise ValueError("CAL-ACCESS test download directory not configured. \ Set either CALACCESS_TEST_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_model_list(): """ Returns a model list with all the data tables in this application """ return django_apps.get_app_config('calaccess_raw').get_models()
import os from django.conf import settings from django.db.models.loading import get_models, get_app default_app_config = 'calaccess_raw.apps.CalAccessRawConfig' def get_download_directory(): """ Returns the download directory where we will store downloaded data. """ if hasattr(settings, 'CALACCESS_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'data') raise ValueError("CAL-ACCESS download directory not configured. Set either \ CALACCESS_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_test_download_directory(): """ Returns the download directory where we will store test data. """ if hasattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'test-data') raise ValueError("CAL-ACCESS test download directory not configured. \ Set either CALACCESS_TEST_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_model_list(): """ Returns a model list with all the data tables in this application """ return get_models(get_app("calaccess_raw")) Update get_model_list to use app config rather than deprecated django.db.models.loading functionsimport os from django.conf import settings from django.apps import apps as django_apps default_app_config = 'calaccess_raw.apps.CalAccessRawConfig' def get_download_directory(): """ Returns the download directory where we will store downloaded data. """ if hasattr(settings, 'CALACCESS_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'data') raise ValueError("CAL-ACCESS download directory not configured. Set either \ CALACCESS_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_test_download_directory(): """ Returns the download directory where we will store test data. """ if hasattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'test-data') raise ValueError("CAL-ACCESS test download directory not configured. \ Set either CALACCESS_TEST_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_model_list(): """ Returns a model list with all the data tables in this application """ return django_apps.get_app_config('calaccess_raw').get_models()
<commit_before>import os from django.conf import settings from django.db.models.loading import get_models, get_app default_app_config = 'calaccess_raw.apps.CalAccessRawConfig' def get_download_directory(): """ Returns the download directory where we will store downloaded data. """ if hasattr(settings, 'CALACCESS_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'data') raise ValueError("CAL-ACCESS download directory not configured. Set either \ CALACCESS_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_test_download_directory(): """ Returns the download directory where we will store test data. """ if hasattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'test-data') raise ValueError("CAL-ACCESS test download directory not configured. \ Set either CALACCESS_TEST_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_model_list(): """ Returns a model list with all the data tables in this application """ return get_models(get_app("calaccess_raw")) <commit_msg>Update get_model_list to use app config rather than deprecated django.db.models.loading functions<commit_after>import os from django.conf import settings from django.apps import apps as django_apps default_app_config = 'calaccess_raw.apps.CalAccessRawConfig' def get_download_directory(): """ Returns the download directory where we will store downloaded data. """ if hasattr(settings, 'CALACCESS_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'data') raise ValueError("CAL-ACCESS download directory not configured. Set either \ CALACCESS_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_test_download_directory(): """ Returns the download directory where we will store test data. """ if hasattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR'): return getattr(settings, 'CALACCESS_TEST_DOWNLOAD_DIR') elif hasattr(settings, 'BASE_DIR'): return os.path.join(getattr(settings, 'BASE_DIR'), 'test-data') raise ValueError("CAL-ACCESS test download directory not configured. \ Set either CALACCESS_TEST_DOWNLOAD_DIR or BASE_DIR in settings.py") def get_model_list(): """ Returns a model list with all the data tables in this application """ return django_apps.get_app_config('calaccess_raw').get_models()
0eca195f9c29824f354cae53a4005f04c67eb86f
nodeconductor/cloud/views.py
nodeconductor/cloud/views.py
from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): model = models.Cloud serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied()
from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): queryset = models.Cloud.objects.all().prefetch_related('flavors') serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied()
Optimize SQL queries used for fetching clouds
Optimize SQL queries used for fetching clouds
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): model = models.Cloud serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied() Optimize SQL queries used for fetching clouds
from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): queryset = models.Cloud.objects.all().prefetch_related('flavors') serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied()
<commit_before>from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): model = models.Cloud serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied() <commit_msg>Optimize SQL queries used for fetching clouds<commit_after>
from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): queryset = models.Cloud.objects.all().prefetch_related('flavors') serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied()
from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): model = models.Cloud serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied() Optimize SQL queries used for fetching cloudsfrom rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): queryset = models.Cloud.objects.all().prefetch_related('flavors') serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied()
<commit_before>from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): model = models.Cloud serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied() <commit_msg>Optimize SQL queries used for fetching clouds<commit_after>from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): queryset = models.Cloud.objects.all().prefetch_related('flavors') serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied()
09a27308c97ae45992df0128ac1743147658fb98
tests/unit/test_saysomething.py
tests/unit/test_saysomething.py
import pytest from pmxbot import saysomething class TestMongoDBChains: @pytest.fixture def mongodb_chains(self, request, mongodb_uri): k = saysomething.MongoDBChains.from_URI(mongodb_uri) k.db = k.db.database.connection[ k.db.database.name + '_test' ][k.db.name] request.addfinalizer(k.db.drop) return k def test_basic_usage(self, mongodb_chains): chains = mongodb_chains chains.feed('foo: what did you say?') # because there's only one message, that's the one you'll get assert chains.get() == 'foo: what did you say?' def test_seed(self, mongodb_chains): chains = mongodb_chains chains.feed('bar: what about if you have a seed? What happens then?') msg = chains.get('seed?') assert msg == 'What happens then?'
import itertools import functools from more_itertools import recipes import pytest from pmxbot import saysomething class TestMongoDBChains: @pytest.fixture def mongodb_chains(self, request, mongodb_uri): k = saysomething.MongoDBChains.from_URI(mongodb_uri) k.db = k.db.database.connection[ k.db.database.name + '_test' ][k.db.name] request.addfinalizer(k.db.drop) return k def test_basic_usage(self, mongodb_chains): chains = mongodb_chains chains.feed('foo: what did you say?') # because there's only one message, that's the one you'll get assert chains.get() == 'foo: what did you say?' def test_seed(self, mongodb_chains): chains = mongodb_chains chains.feed('bar: what about if you have a seed? What happens then?') msg = chains.get('seed?') assert msg == 'What happens then?' def test_non_deterministic_traversal(self, mongodb_chains): chains = mongodb_chains chains.feed('a quick brown fox') chains.feed('a cute white hen') chains.feed('three white boys') # A seed of the word 'a' should lead to several phrases from_a = functools.partial(chains.get, 'a') msgs = recipes.repeatfunc(from_a) # prevent infinite results msgs = itertools.islice(msgs, 1000) # at least one of those thousand messages should # include 'a quick brown fox', 'a cute white hen', # and 'a cute white boys' assert any('fox' in msg for msg in msgs) assert any('hen' in msg for msg in msgs) assert any('boys' in msg for msg in msgs) assert not any('three' in msg for msg in msgs)
Add test capturing expected behavior under more complex inputs.
Add test capturing expected behavior under more complex inputs.
Python
mit
yougov/pmxbot,yougov/pmxbot,yougov/pmxbot
import pytest from pmxbot import saysomething class TestMongoDBChains: @pytest.fixture def mongodb_chains(self, request, mongodb_uri): k = saysomething.MongoDBChains.from_URI(mongodb_uri) k.db = k.db.database.connection[ k.db.database.name + '_test' ][k.db.name] request.addfinalizer(k.db.drop) return k def test_basic_usage(self, mongodb_chains): chains = mongodb_chains chains.feed('foo: what did you say?') # because there's only one message, that's the one you'll get assert chains.get() == 'foo: what did you say?' def test_seed(self, mongodb_chains): chains = mongodb_chains chains.feed('bar: what about if you have a seed? What happens then?') msg = chains.get('seed?') assert msg == 'What happens then?' Add test capturing expected behavior under more complex inputs.
import itertools import functools from more_itertools import recipes import pytest from pmxbot import saysomething class TestMongoDBChains: @pytest.fixture def mongodb_chains(self, request, mongodb_uri): k = saysomething.MongoDBChains.from_URI(mongodb_uri) k.db = k.db.database.connection[ k.db.database.name + '_test' ][k.db.name] request.addfinalizer(k.db.drop) return k def test_basic_usage(self, mongodb_chains): chains = mongodb_chains chains.feed('foo: what did you say?') # because there's only one message, that's the one you'll get assert chains.get() == 'foo: what did you say?' def test_seed(self, mongodb_chains): chains = mongodb_chains chains.feed('bar: what about if you have a seed? What happens then?') msg = chains.get('seed?') assert msg == 'What happens then?' def test_non_deterministic_traversal(self, mongodb_chains): chains = mongodb_chains chains.feed('a quick brown fox') chains.feed('a cute white hen') chains.feed('three white boys') # A seed of the word 'a' should lead to several phrases from_a = functools.partial(chains.get, 'a') msgs = recipes.repeatfunc(from_a) # prevent infinite results msgs = itertools.islice(msgs, 1000) # at least one of those thousand messages should # include 'a quick brown fox', 'a cute white hen', # and 'a cute white boys' assert any('fox' in msg for msg in msgs) assert any('hen' in msg for msg in msgs) assert any('boys' in msg for msg in msgs) assert not any('three' in msg for msg in msgs)
<commit_before>import pytest from pmxbot import saysomething class TestMongoDBChains: @pytest.fixture def mongodb_chains(self, request, mongodb_uri): k = saysomething.MongoDBChains.from_URI(mongodb_uri) k.db = k.db.database.connection[ k.db.database.name + '_test' ][k.db.name] request.addfinalizer(k.db.drop) return k def test_basic_usage(self, mongodb_chains): chains = mongodb_chains chains.feed('foo: what did you say?') # because there's only one message, that's the one you'll get assert chains.get() == 'foo: what did you say?' def test_seed(self, mongodb_chains): chains = mongodb_chains chains.feed('bar: what about if you have a seed? What happens then?') msg = chains.get('seed?') assert msg == 'What happens then?' <commit_msg>Add test capturing expected behavior under more complex inputs.<commit_after>
import itertools import functools from more_itertools import recipes import pytest from pmxbot import saysomething class TestMongoDBChains: @pytest.fixture def mongodb_chains(self, request, mongodb_uri): k = saysomething.MongoDBChains.from_URI(mongodb_uri) k.db = k.db.database.connection[ k.db.database.name + '_test' ][k.db.name] request.addfinalizer(k.db.drop) return k def test_basic_usage(self, mongodb_chains): chains = mongodb_chains chains.feed('foo: what did you say?') # because there's only one message, that's the one you'll get assert chains.get() == 'foo: what did you say?' def test_seed(self, mongodb_chains): chains = mongodb_chains chains.feed('bar: what about if you have a seed? What happens then?') msg = chains.get('seed?') assert msg == 'What happens then?' def test_non_deterministic_traversal(self, mongodb_chains): chains = mongodb_chains chains.feed('a quick brown fox') chains.feed('a cute white hen') chains.feed('three white boys') # A seed of the word 'a' should lead to several phrases from_a = functools.partial(chains.get, 'a') msgs = recipes.repeatfunc(from_a) # prevent infinite results msgs = itertools.islice(msgs, 1000) # at least one of those thousand messages should # include 'a quick brown fox', 'a cute white hen', # and 'a cute white boys' assert any('fox' in msg for msg in msgs) assert any('hen' in msg for msg in msgs) assert any('boys' in msg for msg in msgs) assert not any('three' in msg for msg in msgs)
import pytest from pmxbot import saysomething class TestMongoDBChains: @pytest.fixture def mongodb_chains(self, request, mongodb_uri): k = saysomething.MongoDBChains.from_URI(mongodb_uri) k.db = k.db.database.connection[ k.db.database.name + '_test' ][k.db.name] request.addfinalizer(k.db.drop) return k def test_basic_usage(self, mongodb_chains): chains = mongodb_chains chains.feed('foo: what did you say?') # because there's only one message, that's the one you'll get assert chains.get() == 'foo: what did you say?' def test_seed(self, mongodb_chains): chains = mongodb_chains chains.feed('bar: what about if you have a seed? What happens then?') msg = chains.get('seed?') assert msg == 'What happens then?' Add test capturing expected behavior under more complex inputs.import itertools import functools from more_itertools import recipes import pytest from pmxbot import saysomething class TestMongoDBChains: @pytest.fixture def mongodb_chains(self, request, mongodb_uri): k = saysomething.MongoDBChains.from_URI(mongodb_uri) k.db = k.db.database.connection[ k.db.database.name + '_test' ][k.db.name] request.addfinalizer(k.db.drop) return k def test_basic_usage(self, mongodb_chains): chains = mongodb_chains chains.feed('foo: what did you say?') # because there's only one message, that's the one you'll get assert chains.get() == 'foo: what did you say?' def test_seed(self, mongodb_chains): chains = mongodb_chains chains.feed('bar: what about if you have a seed? What happens then?') msg = chains.get('seed?') assert msg == 'What happens then?' def test_non_deterministic_traversal(self, mongodb_chains): chains = mongodb_chains chains.feed('a quick brown fox') chains.feed('a cute white hen') chains.feed('three white boys') # A seed of the word 'a' should lead to several phrases from_a = functools.partial(chains.get, 'a') msgs = recipes.repeatfunc(from_a) # prevent infinite results msgs = itertools.islice(msgs, 1000) # at least one of those thousand messages should # include 'a quick brown fox', 'a cute white hen', # and 'a cute white boys' assert any('fox' in msg for msg in msgs) assert any('hen' in msg for msg in msgs) assert any('boys' in msg for msg in msgs) assert not any('three' in msg for msg in msgs)
<commit_before>import pytest from pmxbot import saysomething class TestMongoDBChains: @pytest.fixture def mongodb_chains(self, request, mongodb_uri): k = saysomething.MongoDBChains.from_URI(mongodb_uri) k.db = k.db.database.connection[ k.db.database.name + '_test' ][k.db.name] request.addfinalizer(k.db.drop) return k def test_basic_usage(self, mongodb_chains): chains = mongodb_chains chains.feed('foo: what did you say?') # because there's only one message, that's the one you'll get assert chains.get() == 'foo: what did you say?' def test_seed(self, mongodb_chains): chains = mongodb_chains chains.feed('bar: what about if you have a seed? What happens then?') msg = chains.get('seed?') assert msg == 'What happens then?' <commit_msg>Add test capturing expected behavior under more complex inputs.<commit_after>import itertools import functools from more_itertools import recipes import pytest from pmxbot import saysomething class TestMongoDBChains: @pytest.fixture def mongodb_chains(self, request, mongodb_uri): k = saysomething.MongoDBChains.from_URI(mongodb_uri) k.db = k.db.database.connection[ k.db.database.name + '_test' ][k.db.name] request.addfinalizer(k.db.drop) return k def test_basic_usage(self, mongodb_chains): chains = mongodb_chains chains.feed('foo: what did you say?') # because there's only one message, that's the one you'll get assert chains.get() == 'foo: what did you say?' def test_seed(self, mongodb_chains): chains = mongodb_chains chains.feed('bar: what about if you have a seed? What happens then?') msg = chains.get('seed?') assert msg == 'What happens then?' def test_non_deterministic_traversal(self, mongodb_chains): chains = mongodb_chains chains.feed('a quick brown fox') chains.feed('a cute white hen') chains.feed('three white boys') # A seed of the word 'a' should lead to several phrases from_a = functools.partial(chains.get, 'a') msgs = recipes.repeatfunc(from_a) # prevent infinite results msgs = itertools.islice(msgs, 1000) # at least one of those thousand messages should # include 'a quick brown fox', 'a cute white hen', # and 'a cute white boys' assert any('fox' in msg for msg in msgs) assert any('hen' in msg for msg in msgs) assert any('boys' in msg for msg in msgs) assert not any('three' in msg for msg in msgs)
3df9cdb0f96e68fb6870f3ee261cd206d38fb787
octane/tests/test_app.py
octane/tests/test_app.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io from octane import app as o_app def test_help(): out, err = io.BytesIO(), io.BytesIO() app = o_app.OctaneApp(stdin=io.BytesIO(), stdout=out, stderr=err) try: app.run(["--help"]) except SystemExit as e: assert e.code == 0 assert not err.getvalue() assert 'Could not' not in out.getvalue()
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io import pytest from octane import app as o_app @pytest.fixture def octane_app(): return o_app.OctaneApp(stdin=io.BytesIO(), stdout=io.BytesIO(), stderr=io.BytesIO()) def test_help(octane_app): try: octane_app.run(["--help"]) except SystemExit as e: assert e.code == 0 assert not octane_app.stderr.getvalue() assert 'Could not' not in octane_app.stdout.getvalue()
Refactor test to use cool py.test's fixture
Refactor test to use cool py.test's fixture
Python
apache-2.0
Mirantis/octane,stackforge/fuel-octane,Mirantis/octane,stackforge/fuel-octane
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io from octane import app as o_app def test_help(): out, err = io.BytesIO(), io.BytesIO() app = o_app.OctaneApp(stdin=io.BytesIO(), stdout=out, stderr=err) try: app.run(["--help"]) except SystemExit as e: assert e.code == 0 assert not err.getvalue() assert 'Could not' not in out.getvalue() Refactor test to use cool py.test's fixture
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io import pytest from octane import app as o_app @pytest.fixture def octane_app(): return o_app.OctaneApp(stdin=io.BytesIO(), stdout=io.BytesIO(), stderr=io.BytesIO()) def test_help(octane_app): try: octane_app.run(["--help"]) except SystemExit as e: assert e.code == 0 assert not octane_app.stderr.getvalue() assert 'Could not' not in octane_app.stdout.getvalue()
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io from octane import app as o_app def test_help(): out, err = io.BytesIO(), io.BytesIO() app = o_app.OctaneApp(stdin=io.BytesIO(), stdout=out, stderr=err) try: app.run(["--help"]) except SystemExit as e: assert e.code == 0 assert not err.getvalue() assert 'Could not' not in out.getvalue() <commit_msg>Refactor test to use cool py.test's fixture<commit_after>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io import pytest from octane import app as o_app @pytest.fixture def octane_app(): return o_app.OctaneApp(stdin=io.BytesIO(), stdout=io.BytesIO(), stderr=io.BytesIO()) def test_help(octane_app): try: octane_app.run(["--help"]) except SystemExit as e: assert e.code == 0 assert not octane_app.stderr.getvalue() assert 'Could not' not in octane_app.stdout.getvalue()
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io from octane import app as o_app def test_help(): out, err = io.BytesIO(), io.BytesIO() app = o_app.OctaneApp(stdin=io.BytesIO(), stdout=out, stderr=err) try: app.run(["--help"]) except SystemExit as e: assert e.code == 0 assert not err.getvalue() assert 'Could not' not in out.getvalue() Refactor test to use cool py.test's fixture# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io import pytest from octane import app as o_app @pytest.fixture def octane_app(): return o_app.OctaneApp(stdin=io.BytesIO(), stdout=io.BytesIO(), stderr=io.BytesIO()) def test_help(octane_app): try: octane_app.run(["--help"]) except SystemExit as e: assert e.code == 0 assert not octane_app.stderr.getvalue() assert 'Could not' not in octane_app.stdout.getvalue()
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io from octane import app as o_app def test_help(): out, err = io.BytesIO(), io.BytesIO() app = o_app.OctaneApp(stdin=io.BytesIO(), stdout=out, stderr=err) try: app.run(["--help"]) except SystemExit as e: assert e.code == 0 assert not err.getvalue() assert 'Could not' not in out.getvalue() <commit_msg>Refactor test to use cool py.test's fixture<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import io import pytest from octane import app as o_app @pytest.fixture def octane_app(): return o_app.OctaneApp(stdin=io.BytesIO(), stdout=io.BytesIO(), stderr=io.BytesIO()) def test_help(octane_app): try: octane_app.run(["--help"]) except SystemExit as e: assert e.code == 0 assert not octane_app.stderr.getvalue() assert 'Could not' not in octane_app.stdout.getvalue()
9866d25465a0c95ceeb0d5bc6502deb4bab582e0
setuptools_scm_git_archive/__init__.py
setuptools_scm_git_archive/__init__.py
from os.path import join import re from setuptools_scm.utils import data_from_mime, trace from setuptools_scm.version import meta, tags_to_versions tag_re = re.compile(r'(?<=\btag: )([^,]+)\b') def archival_to_version(data): trace('data', data) versions = tags_to_versions(tag_re.findall(data.get('ref-names', ''))) if versions: return meta(versions[0]) def parse(root): archival = join(root, '.git_archival.txt') data = data_from_mime(archival) return archival_to_version(data)
from os.path import join import re from setuptools_scm.utils import data_from_mime, trace from setuptools_scm.version import meta, tags_to_versions tag_re = re.compile(r'(?<=\btag: )([^,]+)\b') def archival_to_version(data): trace('data', data) versions = tags_to_versions(tag_re.findall(data.get('ref-names', ''))) if versions: return meta(next(versions)) def parse(root): archival = join(root, '.git_archival.txt') data = data_from_mime(archival) return archival_to_version(data)
Update to support python 3.
Update to support python 3.
Python
mit
Changaco/setuptools_scm_git_archive
from os.path import join import re from setuptools_scm.utils import data_from_mime, trace from setuptools_scm.version import meta, tags_to_versions tag_re = re.compile(r'(?<=\btag: )([^,]+)\b') def archival_to_version(data): trace('data', data) versions = tags_to_versions(tag_re.findall(data.get('ref-names', ''))) if versions: return meta(versions[0]) def parse(root): archival = join(root, '.git_archival.txt') data = data_from_mime(archival) return archival_to_version(data) Update to support python 3.
from os.path import join import re from setuptools_scm.utils import data_from_mime, trace from setuptools_scm.version import meta, tags_to_versions tag_re = re.compile(r'(?<=\btag: )([^,]+)\b') def archival_to_version(data): trace('data', data) versions = tags_to_versions(tag_re.findall(data.get('ref-names', ''))) if versions: return meta(next(versions)) def parse(root): archival = join(root, '.git_archival.txt') data = data_from_mime(archival) return archival_to_version(data)
<commit_before>from os.path import join import re from setuptools_scm.utils import data_from_mime, trace from setuptools_scm.version import meta, tags_to_versions tag_re = re.compile(r'(?<=\btag: )([^,]+)\b') def archival_to_version(data): trace('data', data) versions = tags_to_versions(tag_re.findall(data.get('ref-names', ''))) if versions: return meta(versions[0]) def parse(root): archival = join(root, '.git_archival.txt') data = data_from_mime(archival) return archival_to_version(data) <commit_msg>Update to support python 3.<commit_after>
from os.path import join import re from setuptools_scm.utils import data_from_mime, trace from setuptools_scm.version import meta, tags_to_versions tag_re = re.compile(r'(?<=\btag: )([^,]+)\b') def archival_to_version(data): trace('data', data) versions = tags_to_versions(tag_re.findall(data.get('ref-names', ''))) if versions: return meta(next(versions)) def parse(root): archival = join(root, '.git_archival.txt') data = data_from_mime(archival) return archival_to_version(data)
from os.path import join import re from setuptools_scm.utils import data_from_mime, trace from setuptools_scm.version import meta, tags_to_versions tag_re = re.compile(r'(?<=\btag: )([^,]+)\b') def archival_to_version(data): trace('data', data) versions = tags_to_versions(tag_re.findall(data.get('ref-names', ''))) if versions: return meta(versions[0]) def parse(root): archival = join(root, '.git_archival.txt') data = data_from_mime(archival) return archival_to_version(data) Update to support python 3.from os.path import join import re from setuptools_scm.utils import data_from_mime, trace from setuptools_scm.version import meta, tags_to_versions tag_re = re.compile(r'(?<=\btag: )([^,]+)\b') def archival_to_version(data): trace('data', data) versions = tags_to_versions(tag_re.findall(data.get('ref-names', ''))) if versions: return meta(next(versions)) def parse(root): archival = join(root, '.git_archival.txt') data = data_from_mime(archival) return archival_to_version(data)
<commit_before>from os.path import join import re from setuptools_scm.utils import data_from_mime, trace from setuptools_scm.version import meta, tags_to_versions tag_re = re.compile(r'(?<=\btag: )([^,]+)\b') def archival_to_version(data): trace('data', data) versions = tags_to_versions(tag_re.findall(data.get('ref-names', ''))) if versions: return meta(versions[0]) def parse(root): archival = join(root, '.git_archival.txt') data = data_from_mime(archival) return archival_to_version(data) <commit_msg>Update to support python 3.<commit_after>from os.path import join import re from setuptools_scm.utils import data_from_mime, trace from setuptools_scm.version import meta, tags_to_versions tag_re = re.compile(r'(?<=\btag: )([^,]+)\b') def archival_to_version(data): trace('data', data) versions = tags_to_versions(tag_re.findall(data.get('ref-names', ''))) if versions: return meta(next(versions)) def parse(root): archival = join(root, '.git_archival.txt') data = data_from_mime(archival) return archival_to_version(data)
75606e2b13a29a5d68894eda86dbede8292fb0c8
website/project/taxonomies/__init__.py
website/project/taxonomies/__init__.py
from modularodm import fields from framework.mongo import ( ObjectId, StoredObject, utils as mongo_utils ) from website.util import api_v2_url @mongo_utils.unique_on(['text']) class Subject(StoredObject): _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) text = fields.StringField(required=True) parents = fields.ForeignField('subject', list=True) @property def absolute_api_v2_url(self): return api_v2_url('taxonomies/{}/'.format(self._id)) def get_absolute_url(self): return self.absolute_api_v2_url
import pymongo from modularodm import fields from framework.mongo import ( ObjectId, StoredObject, utils as mongo_utils ) from website.util import api_v2_url @mongo_utils.unique_on(['text']) class Subject(StoredObject): __indices__ = [ { 'unique': True, 'key_or_list': [ ('text', pymongo.DESCENDING), ] } ] _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) text = fields.StringField(required=True) parents = fields.ForeignField('subject', list=True) @property def absolute_api_v2_url(self): return api_v2_url('taxonomies/{}/'.format(self._id)) def get_absolute_url(self): return self.absolute_api_v2_url
Add unique index on the subject model for @chrisseto
Add unique index on the subject model for @chrisseto
Python
apache-2.0
hmoco/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,aaxelb/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,chrisseto/osf.io,alexschiller/osf.io,aaxelb/osf.io,emetsger/osf.io,Johnetordoff/osf.io,erinspace/osf.io,erinspace/osf.io,sloria/osf.io,mfraezz/osf.io,mfraezz/osf.io,rdhyee/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,mluo613/osf.io,TomBaxter/osf.io,baylee-d/osf.io,samchrisinger/osf.io,mluo613/osf.io,icereval/osf.io,chrisseto/osf.io,caseyrollins/osf.io,sloria/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,binoculars/osf.io,adlius/osf.io,caseyrollins/osf.io,samchrisinger/osf.io,saradbowman/osf.io,monikagrabowska/osf.io,mattclark/osf.io,mluo613/osf.io,binoculars/osf.io,alexschiller/osf.io,alexschiller/osf.io,TomBaxter/osf.io,sloria/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,laurenrevere/osf.io,chrisseto/osf.io,chrisseto/osf.io,emetsger/osf.io,acshi/osf.io,adlius/osf.io,brianjgeiger/osf.io,icereval/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,leb2dg/osf.io,felliott/osf.io,felliott/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,chennan47/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,mattclark/osf.io,caneruguz/osf.io,erinspace/osf.io,pattisdr/osf.io,cslzchen/osf.io,icereval/osf.io,acshi/osf.io,mluo613/osf.io,cslzchen/osf.io,mfraezz/osf.io,felliott/osf.io,emetsger/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,crcresearch/osf.io,cslzchen/osf.io,hmoco/osf.io,Nesiehr/osf.io,rdhyee/osf.io,acshi/osf.io,acshi/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,mluo613/osf.io,crcresearch/osf.io,Nesiehr/osf.io,emetsger/osf.io,leb2dg/osf.io,samchrisinger/osf.io,acshi/osf.io,Nesiehr/osf.io,adlius/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,rdhyee/osf.io,felliott/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,caneruguz/osf.io,pattisdr/osf.io,adlius/osf.io,chennan47/osf.io,mfraezz/osf.io,baylee-d/osf.io,caneruguz/osf.io,chennan47/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,rdhyee/osf.io,hmoco/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,mattclark/osf.io
from modularodm import fields from framework.mongo import ( ObjectId, StoredObject, utils as mongo_utils ) from website.util import api_v2_url @mongo_utils.unique_on(['text']) class Subject(StoredObject): _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) text = fields.StringField(required=True) parents = fields.ForeignField('subject', list=True) @property def absolute_api_v2_url(self): return api_v2_url('taxonomies/{}/'.format(self._id)) def get_absolute_url(self): return self.absolute_api_v2_url Add unique index on the subject model for @chrisseto
import pymongo from modularodm import fields from framework.mongo import ( ObjectId, StoredObject, utils as mongo_utils ) from website.util import api_v2_url @mongo_utils.unique_on(['text']) class Subject(StoredObject): __indices__ = [ { 'unique': True, 'key_or_list': [ ('text', pymongo.DESCENDING), ] } ] _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) text = fields.StringField(required=True) parents = fields.ForeignField('subject', list=True) @property def absolute_api_v2_url(self): return api_v2_url('taxonomies/{}/'.format(self._id)) def get_absolute_url(self): return self.absolute_api_v2_url
<commit_before>from modularodm import fields from framework.mongo import ( ObjectId, StoredObject, utils as mongo_utils ) from website.util import api_v2_url @mongo_utils.unique_on(['text']) class Subject(StoredObject): _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) text = fields.StringField(required=True) parents = fields.ForeignField('subject', list=True) @property def absolute_api_v2_url(self): return api_v2_url('taxonomies/{}/'.format(self._id)) def get_absolute_url(self): return self.absolute_api_v2_url <commit_msg>Add unique index on the subject model for @chrisseto<commit_after>
import pymongo from modularodm import fields from framework.mongo import ( ObjectId, StoredObject, utils as mongo_utils ) from website.util import api_v2_url @mongo_utils.unique_on(['text']) class Subject(StoredObject): __indices__ = [ { 'unique': True, 'key_or_list': [ ('text', pymongo.DESCENDING), ] } ] _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) text = fields.StringField(required=True) parents = fields.ForeignField('subject', list=True) @property def absolute_api_v2_url(self): return api_v2_url('taxonomies/{}/'.format(self._id)) def get_absolute_url(self): return self.absolute_api_v2_url
from modularodm import fields from framework.mongo import ( ObjectId, StoredObject, utils as mongo_utils ) from website.util import api_v2_url @mongo_utils.unique_on(['text']) class Subject(StoredObject): _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) text = fields.StringField(required=True) parents = fields.ForeignField('subject', list=True) @property def absolute_api_v2_url(self): return api_v2_url('taxonomies/{}/'.format(self._id)) def get_absolute_url(self): return self.absolute_api_v2_url Add unique index on the subject model for @chrissetoimport pymongo from modularodm import fields from framework.mongo import ( ObjectId, StoredObject, utils as mongo_utils ) from website.util import api_v2_url @mongo_utils.unique_on(['text']) class Subject(StoredObject): __indices__ = [ { 'unique': True, 'key_or_list': [ ('text', pymongo.DESCENDING), ] } ] _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) text = fields.StringField(required=True) parents = fields.ForeignField('subject', list=True) @property def absolute_api_v2_url(self): return api_v2_url('taxonomies/{}/'.format(self._id)) def get_absolute_url(self): return self.absolute_api_v2_url
<commit_before>from modularodm import fields from framework.mongo import ( ObjectId, StoredObject, utils as mongo_utils ) from website.util import api_v2_url @mongo_utils.unique_on(['text']) class Subject(StoredObject): _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) text = fields.StringField(required=True) parents = fields.ForeignField('subject', list=True) @property def absolute_api_v2_url(self): return api_v2_url('taxonomies/{}/'.format(self._id)) def get_absolute_url(self): return self.absolute_api_v2_url <commit_msg>Add unique index on the subject model for @chrisseto<commit_after>import pymongo from modularodm import fields from framework.mongo import ( ObjectId, StoredObject, utils as mongo_utils ) from website.util import api_v2_url @mongo_utils.unique_on(['text']) class Subject(StoredObject): __indices__ = [ { 'unique': True, 'key_or_list': [ ('text', pymongo.DESCENDING), ] } ] _id = fields.StringField(primary=True, default=lambda: str(ObjectId())) text = fields.StringField(required=True) parents = fields.ForeignField('subject', list=True) @property def absolute_api_v2_url(self): return api_v2_url('taxonomies/{}/'.format(self._id)) def get_absolute_url(self): return self.absolute_api_v2_url
57f8715b8a5ec74efdf5c386226f3e416f7df9e5
python_practice/numpy_exercise2.py
python_practice/numpy_exercise2.py
import numpy as np Matrix_A = np.array( [[1,1],[0,1]] ) Matrix_B = np.array( [[2,0],[3,4]] ) print Matrix_A*Matrix_B print Matrix_A.dot(Matrix_B) print np.dot(Matrix_A, Matrix_B) Matrix_C = np.ones((2,3), dtype=int) Matrix_C *= 3 print Matrix_C Matrix_D = np.ones((2,3), dtype=int) print Matrix_C+Matrix_D
import numpy as np Matrix_A = np.array( [[1,1],[0,1]] ) Matrix_B = np.array( [[2,0],[3,4]] ) print Matrix_A*Matrix_B print Matrix_A.dot(Matrix_B) print np.dot(Matrix_A, Matrix_B) Matrix_C = np.ones((2,3), dtype=int) Matrix_C *= 3 print Matrix_C Matrix_D = np.ones((2,3), dtype=int) print Matrix_C+Matrix_D Matrix_E = np.arange(12).reshape(3,4) print Matrix_E print Matrix_E[2,3] print Matrix_E[ : , 1] print Matrix_E[1 , : ]
Add get the element by col and row
Add get the element by col and row
Python
mit
jeremykid/FunAlgorithm,jeremykid/FunAlgorithm,jeremykid/FunAlgorithm,jeremykid/FunAlgorithm
import numpy as np Matrix_A = np.array( [[1,1],[0,1]] ) Matrix_B = np.array( [[2,0],[3,4]] ) print Matrix_A*Matrix_B print Matrix_A.dot(Matrix_B) print np.dot(Matrix_A, Matrix_B) Matrix_C = np.ones((2,3), dtype=int) Matrix_C *= 3 print Matrix_C Matrix_D = np.ones((2,3), dtype=int) print Matrix_C+Matrix_D Add get the element by col and row
import numpy as np Matrix_A = np.array( [[1,1],[0,1]] ) Matrix_B = np.array( [[2,0],[3,4]] ) print Matrix_A*Matrix_B print Matrix_A.dot(Matrix_B) print np.dot(Matrix_A, Matrix_B) Matrix_C = np.ones((2,3), dtype=int) Matrix_C *= 3 print Matrix_C Matrix_D = np.ones((2,3), dtype=int) print Matrix_C+Matrix_D Matrix_E = np.arange(12).reshape(3,4) print Matrix_E print Matrix_E[2,3] print Matrix_E[ : , 1] print Matrix_E[1 , : ]
<commit_before>import numpy as np Matrix_A = np.array( [[1,1],[0,1]] ) Matrix_B = np.array( [[2,0],[3,4]] ) print Matrix_A*Matrix_B print Matrix_A.dot(Matrix_B) print np.dot(Matrix_A, Matrix_B) Matrix_C = np.ones((2,3), dtype=int) Matrix_C *= 3 print Matrix_C Matrix_D = np.ones((2,3), dtype=int) print Matrix_C+Matrix_D <commit_msg>Add get the element by col and row<commit_after>
import numpy as np Matrix_A = np.array( [[1,1],[0,1]] ) Matrix_B = np.array( [[2,0],[3,4]] ) print Matrix_A*Matrix_B print Matrix_A.dot(Matrix_B) print np.dot(Matrix_A, Matrix_B) Matrix_C = np.ones((2,3), dtype=int) Matrix_C *= 3 print Matrix_C Matrix_D = np.ones((2,3), dtype=int) print Matrix_C+Matrix_D Matrix_E = np.arange(12).reshape(3,4) print Matrix_E print Matrix_E[2,3] print Matrix_E[ : , 1] print Matrix_E[1 , : ]
import numpy as np Matrix_A = np.array( [[1,1],[0,1]] ) Matrix_B = np.array( [[2,0],[3,4]] ) print Matrix_A*Matrix_B print Matrix_A.dot(Matrix_B) print np.dot(Matrix_A, Matrix_B) Matrix_C = np.ones((2,3), dtype=int) Matrix_C *= 3 print Matrix_C Matrix_D = np.ones((2,3), dtype=int) print Matrix_C+Matrix_D Add get the element by col and rowimport numpy as np Matrix_A = np.array( [[1,1],[0,1]] ) Matrix_B = np.array( [[2,0],[3,4]] ) print Matrix_A*Matrix_B print Matrix_A.dot(Matrix_B) print np.dot(Matrix_A, Matrix_B) Matrix_C = np.ones((2,3), dtype=int) Matrix_C *= 3 print Matrix_C Matrix_D = np.ones((2,3), dtype=int) print Matrix_C+Matrix_D Matrix_E = np.arange(12).reshape(3,4) print Matrix_E print Matrix_E[2,3] print Matrix_E[ : , 1] print Matrix_E[1 , : ]
<commit_before>import numpy as np Matrix_A = np.array( [[1,1],[0,1]] ) Matrix_B = np.array( [[2,0],[3,4]] ) print Matrix_A*Matrix_B print Matrix_A.dot(Matrix_B) print np.dot(Matrix_A, Matrix_B) Matrix_C = np.ones((2,3), dtype=int) Matrix_C *= 3 print Matrix_C Matrix_D = np.ones((2,3), dtype=int) print Matrix_C+Matrix_D <commit_msg>Add get the element by col and row<commit_after>import numpy as np Matrix_A = np.array( [[1,1],[0,1]] ) Matrix_B = np.array( [[2,0],[3,4]] ) print Matrix_A*Matrix_B print Matrix_A.dot(Matrix_B) print np.dot(Matrix_A, Matrix_B) Matrix_C = np.ones((2,3), dtype=int) Matrix_C *= 3 print Matrix_C Matrix_D = np.ones((2,3), dtype=int) print Matrix_C+Matrix_D Matrix_E = np.arange(12).reshape(3,4) print Matrix_E print Matrix_E[2,3] print Matrix_E[ : , 1] print Matrix_E[1 , : ]
98c1875d544cd3287b1df91f6216f57d09b93bdc
waterbutler/tasks/move.py
waterbutler/tasks/move.py
import asyncio from waterbutler.core import utils from waterbutler.tasks import core from waterbutler.tasks import settings @core.celery_task def move(src_bundle, dest_bundle): src_args, src_provider = src_bundle.pop('args'), utils.make_provider(**src_bundle.pop('provider')) dest_args, dest_provider = dest_bundle.pop('args'), utils.make_provider(**dest_bundle.pop('provider')) core.ensure_event_loop().run_until_complete( src_provider.move(dest_provider, src_args, dest_args) ) # dest_provider.move() # stream = src_provider.download(**src_args) # progress = stream.ProgressStreamWriter(stream.size) # stream.add_writer(progress) # upload_task = asyncio.async(dest_provider.upload(stream, **dest_options)) # @async.coroutine # def do_upload() # while not upload_task.done(): # yield from asyncio.sleep(3) # progress.progress # # update redis # # sleep x seconds
import os import time from waterbutler.core import utils from waterbutler.tasks import core @core.celery_task def move(src_bundle, dest_bundle, callback_url, auth): src_args, src_provider = src_bundle.pop('args'), utils.make_provider(**src_bundle.pop('provider')) dest_args, dest_provider = dest_bundle.pop('args'), utils.make_provider(**dest_bundle.pop('provider')) metadata, _ = core.ensure_event_loop().run_until_complete( src_provider.move(dest_provider, src_args, dest_args) ) if callback: callback(metadata) return (yield from utils.send_signed_request('PUT', callback_url, { 'action': 'move', 'source': { 'path': self.json['source']['path'], 'name': os.path.split(self.json['source']['path'])[1], 'provider': source_provider.NAME, }, 'destination': { 'path': data['path'], 'name': data['name'], 'provider': destination_provider.NAME, }, 'auth': auth['auth'], 'time': time.time() + 60 })) # dest_provider.move() # stream = src_provider.download(**src_args) # progress = stream.ProgressStreamWriter(stream.size) # stream.add_writer(progress) # upload_task = asyncio.async(dest_provider.upload(stream, **dest_options)) # @async.coroutine # def do_upload() # while not upload_task.done(): # yield from asyncio.sleep(3) # progress.progress # # update redis # # sleep x seconds
Send callback when finished moving
Send callback when finished moving
Python
apache-2.0
rafaeldelucena/waterbutler,icereval/waterbutler,hmoco/waterbutler,RCOSDP/waterbutler,TomBaxter/waterbutler,Ghalko/waterbutler,chrisseto/waterbutler,Johnetordoff/waterbutler,cosenal/waterbutler,felliott/waterbutler,CenterForOpenScience/waterbutler,rdhyee/waterbutler,kwierman/waterbutler
import asyncio from waterbutler.core import utils from waterbutler.tasks import core from waterbutler.tasks import settings @core.celery_task def move(src_bundle, dest_bundle): src_args, src_provider = src_bundle.pop('args'), utils.make_provider(**src_bundle.pop('provider')) dest_args, dest_provider = dest_bundle.pop('args'), utils.make_provider(**dest_bundle.pop('provider')) core.ensure_event_loop().run_until_complete( src_provider.move(dest_provider, src_args, dest_args) ) # dest_provider.move() # stream = src_provider.download(**src_args) # progress = stream.ProgressStreamWriter(stream.size) # stream.add_writer(progress) # upload_task = asyncio.async(dest_provider.upload(stream, **dest_options)) # @async.coroutine # def do_upload() # while not upload_task.done(): # yield from asyncio.sleep(3) # progress.progress # # update redis # # sleep x seconds Send callback when finished moving
import os import time from waterbutler.core import utils from waterbutler.tasks import core @core.celery_task def move(src_bundle, dest_bundle, callback_url, auth): src_args, src_provider = src_bundle.pop('args'), utils.make_provider(**src_bundle.pop('provider')) dest_args, dest_provider = dest_bundle.pop('args'), utils.make_provider(**dest_bundle.pop('provider')) metadata, _ = core.ensure_event_loop().run_until_complete( src_provider.move(dest_provider, src_args, dest_args) ) if callback: callback(metadata) return (yield from utils.send_signed_request('PUT', callback_url, { 'action': 'move', 'source': { 'path': self.json['source']['path'], 'name': os.path.split(self.json['source']['path'])[1], 'provider': source_provider.NAME, }, 'destination': { 'path': data['path'], 'name': data['name'], 'provider': destination_provider.NAME, }, 'auth': auth['auth'], 'time': time.time() + 60 })) # dest_provider.move() # stream = src_provider.download(**src_args) # progress = stream.ProgressStreamWriter(stream.size) # stream.add_writer(progress) # upload_task = asyncio.async(dest_provider.upload(stream, **dest_options)) # @async.coroutine # def do_upload() # while not upload_task.done(): # yield from asyncio.sleep(3) # progress.progress # # update redis # # sleep x seconds
<commit_before>import asyncio from waterbutler.core import utils from waterbutler.tasks import core from waterbutler.tasks import settings @core.celery_task def move(src_bundle, dest_bundle): src_args, src_provider = src_bundle.pop('args'), utils.make_provider(**src_bundle.pop('provider')) dest_args, dest_provider = dest_bundle.pop('args'), utils.make_provider(**dest_bundle.pop('provider')) core.ensure_event_loop().run_until_complete( src_provider.move(dest_provider, src_args, dest_args) ) # dest_provider.move() # stream = src_provider.download(**src_args) # progress = stream.ProgressStreamWriter(stream.size) # stream.add_writer(progress) # upload_task = asyncio.async(dest_provider.upload(stream, **dest_options)) # @async.coroutine # def do_upload() # while not upload_task.done(): # yield from asyncio.sleep(3) # progress.progress # # update redis # # sleep x seconds <commit_msg>Send callback when finished moving<commit_after>
import os import time from waterbutler.core import utils from waterbutler.tasks import core @core.celery_task def move(src_bundle, dest_bundle, callback_url, auth): src_args, src_provider = src_bundle.pop('args'), utils.make_provider(**src_bundle.pop('provider')) dest_args, dest_provider = dest_bundle.pop('args'), utils.make_provider(**dest_bundle.pop('provider')) metadata, _ = core.ensure_event_loop().run_until_complete( src_provider.move(dest_provider, src_args, dest_args) ) if callback: callback(metadata) return (yield from utils.send_signed_request('PUT', callback_url, { 'action': 'move', 'source': { 'path': self.json['source']['path'], 'name': os.path.split(self.json['source']['path'])[1], 'provider': source_provider.NAME, }, 'destination': { 'path': data['path'], 'name': data['name'], 'provider': destination_provider.NAME, }, 'auth': auth['auth'], 'time': time.time() + 60 })) # dest_provider.move() # stream = src_provider.download(**src_args) # progress = stream.ProgressStreamWriter(stream.size) # stream.add_writer(progress) # upload_task = asyncio.async(dest_provider.upload(stream, **dest_options)) # @async.coroutine # def do_upload() # while not upload_task.done(): # yield from asyncio.sleep(3) # progress.progress # # update redis # # sleep x seconds
import asyncio from waterbutler.core import utils from waterbutler.tasks import core from waterbutler.tasks import settings @core.celery_task def move(src_bundle, dest_bundle): src_args, src_provider = src_bundle.pop('args'), utils.make_provider(**src_bundle.pop('provider')) dest_args, dest_provider = dest_bundle.pop('args'), utils.make_provider(**dest_bundle.pop('provider')) core.ensure_event_loop().run_until_complete( src_provider.move(dest_provider, src_args, dest_args) ) # dest_provider.move() # stream = src_provider.download(**src_args) # progress = stream.ProgressStreamWriter(stream.size) # stream.add_writer(progress) # upload_task = asyncio.async(dest_provider.upload(stream, **dest_options)) # @async.coroutine # def do_upload() # while not upload_task.done(): # yield from asyncio.sleep(3) # progress.progress # # update redis # # sleep x seconds Send callback when finished movingimport os import time from waterbutler.core import utils from waterbutler.tasks import core @core.celery_task def move(src_bundle, dest_bundle, callback_url, auth): src_args, src_provider = src_bundle.pop('args'), utils.make_provider(**src_bundle.pop('provider')) dest_args, dest_provider = dest_bundle.pop('args'), utils.make_provider(**dest_bundle.pop('provider')) metadata, _ = core.ensure_event_loop().run_until_complete( src_provider.move(dest_provider, src_args, dest_args) ) if callback: callback(metadata) return (yield from utils.send_signed_request('PUT', callback_url, { 'action': 'move', 'source': { 'path': self.json['source']['path'], 'name': os.path.split(self.json['source']['path'])[1], 'provider': source_provider.NAME, }, 'destination': { 'path': data['path'], 'name': data['name'], 'provider': destination_provider.NAME, }, 'auth': auth['auth'], 'time': time.time() + 60 })) # dest_provider.move() # stream = src_provider.download(**src_args) # progress = stream.ProgressStreamWriter(stream.size) # stream.add_writer(progress) # upload_task = asyncio.async(dest_provider.upload(stream, **dest_options)) # @async.coroutine # def do_upload() # while not upload_task.done(): # yield from asyncio.sleep(3) # progress.progress # # update redis # # sleep x seconds
<commit_before>import asyncio from waterbutler.core import utils from waterbutler.tasks import core from waterbutler.tasks import settings @core.celery_task def move(src_bundle, dest_bundle): src_args, src_provider = src_bundle.pop('args'), utils.make_provider(**src_bundle.pop('provider')) dest_args, dest_provider = dest_bundle.pop('args'), utils.make_provider(**dest_bundle.pop('provider')) core.ensure_event_loop().run_until_complete( src_provider.move(dest_provider, src_args, dest_args) ) # dest_provider.move() # stream = src_provider.download(**src_args) # progress = stream.ProgressStreamWriter(stream.size) # stream.add_writer(progress) # upload_task = asyncio.async(dest_provider.upload(stream, **dest_options)) # @async.coroutine # def do_upload() # while not upload_task.done(): # yield from asyncio.sleep(3) # progress.progress # # update redis # # sleep x seconds <commit_msg>Send callback when finished moving<commit_after>import os import time from waterbutler.core import utils from waterbutler.tasks import core @core.celery_task def move(src_bundle, dest_bundle, callback_url, auth): src_args, src_provider = src_bundle.pop('args'), utils.make_provider(**src_bundle.pop('provider')) dest_args, dest_provider = dest_bundle.pop('args'), utils.make_provider(**dest_bundle.pop('provider')) metadata, _ = core.ensure_event_loop().run_until_complete( src_provider.move(dest_provider, src_args, dest_args) ) if callback: callback(metadata) return (yield from utils.send_signed_request('PUT', callback_url, { 'action': 'move', 'source': { 'path': self.json['source']['path'], 'name': os.path.split(self.json['source']['path'])[1], 'provider': source_provider.NAME, }, 'destination': { 'path': data['path'], 'name': data['name'], 'provider': destination_provider.NAME, }, 'auth': auth['auth'], 'time': time.time() + 60 })) # dest_provider.move() # stream = src_provider.download(**src_args) # progress = stream.ProgressStreamWriter(stream.size) # stream.add_writer(progress) # upload_task = asyncio.async(dest_provider.upload(stream, **dest_options)) # @async.coroutine # def do_upload() # while not upload_task.done(): # yield from asyncio.sleep(3) # progress.progress # # update redis # # sleep x seconds
2539f8adbe2b7deed2974c4245fd8087a8f05e65
wluopensource/osl_comments/models.py
wluopensource/osl_comments/models.py
from django.contrib.comments.models import Comment from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True)
from django.contrib.comments.models import Comment from django.contrib.comments.signals import comment_was_posted from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) def comment_user_url_injection_handler(sender, **kwargs): if 'request' in kwargs and kwargs['request'].user.is_authenticated() and \ 'comment' in kwargs: comment = kwargs['comment'] comment.url = comment.user.get_profile().url comment.save() comment_was_posted.connect(comment_user_url_injection_handler)
Use signals to add authenticated user URL to comment when posted
Use signals to add authenticated user URL to comment when posted
Python
bsd-3-clause
jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website
from django.contrib.comments.models import Comment from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) Use signals to add authenticated user URL to comment when posted
from django.contrib.comments.models import Comment from django.contrib.comments.signals import comment_was_posted from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) def comment_user_url_injection_handler(sender, **kwargs): if 'request' in kwargs and kwargs['request'].user.is_authenticated() and \ 'comment' in kwargs: comment = kwargs['comment'] comment.url = comment.user.get_profile().url comment.save() comment_was_posted.connect(comment_user_url_injection_handler)
<commit_before>from django.contrib.comments.models import Comment from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) <commit_msg>Use signals to add authenticated user URL to comment when posted<commit_after>
from django.contrib.comments.models import Comment from django.contrib.comments.signals import comment_was_posted from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) def comment_user_url_injection_handler(sender, **kwargs): if 'request' in kwargs and kwargs['request'].user.is_authenticated() and \ 'comment' in kwargs: comment = kwargs['comment'] comment.url = comment.user.get_profile().url comment.save() comment_was_posted.connect(comment_user_url_injection_handler)
from django.contrib.comments.models import Comment from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) Use signals to add authenticated user URL to comment when postedfrom django.contrib.comments.models import Comment from django.contrib.comments.signals import comment_was_posted from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) def comment_user_url_injection_handler(sender, **kwargs): if 'request' in kwargs and kwargs['request'].user.is_authenticated() and \ 'comment' in kwargs: comment = kwargs['comment'] comment.url = comment.user.get_profile().url comment.save() comment_was_posted.connect(comment_user_url_injection_handler)
<commit_before>from django.contrib.comments.models import Comment from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) <commit_msg>Use signals to add authenticated user URL to comment when posted<commit_after>from django.contrib.comments.models import Comment from django.contrib.comments.signals import comment_was_posted from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) def comment_user_url_injection_handler(sender, **kwargs): if 'request' in kwargs and kwargs['request'].user.is_authenticated() and \ 'comment' in kwargs: comment = kwargs['comment'] comment.url = comment.user.get_profile().url comment.save() comment_was_posted.connect(comment_user_url_injection_handler)
bdb939e548afd96bfefacdecbec5e96541568053
app/__init__.py
app/__init__.py
from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uri'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application
from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application
Use multiple ES host uris field from PaaS config
Use multiple ES host uris field from PaaS config
Python
mit
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uri'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application Use multiple ES host uris field from PaaS config
from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application
<commit_before>from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uri'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application <commit_msg>Use multiple ES host uris field from PaaS config<commit_after>
from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application
from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uri'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application Use multiple ES host uris field from PaaS configfrom flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application
<commit_before>from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uri'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application <commit_msg>Use multiple ES host uris field from PaaS config<commit_after>from flask import Flask import base64 import json from config import config as configs from flask.ext.elasticsearch import FlaskElasticsearch from dmutils import init_app, flask_featureflags feature_flags = flask_featureflags.FeatureFlag() elasticsearch_client = FlaskElasticsearch() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], feature_flags=feature_flags ) if application.config['VCAP_SERVICES']: cf_services = json.loads(application.config['VCAP_SERVICES']) application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris'] with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile: es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64'])) elasticsearch_client.init_app( application, verify_certs=True, ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH'] ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) return application
3ed06913ab26b44f133ef2e91ea2f626af72c996
comics/comics/gucomics.py
comics/comics/gucomics.py
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'GU Comics' language = 'en' url = 'http://www.gucomics.com/' start_date = '2000-07-10' rights = 'Woody Hearn' class Crawler(CrawlerBase): history_capable_date = '2000-07-10' schedule = 'Mo,We,Fr' time_zone = 'US/Eastern' def crawl(self, pub_date): page_url = 'http://www.gucomics.com/' + pub_date.strftime('%Y%m%d') page = self.parse_page(page_url) title = page.text('b') title = title.replace('"', '') title = title.strip() text = page.text('font[class="main"]') # If there is a --- the text after is not about the comic text = text[:text.find('---')] text = text.strip() url = 'http://www.gucomics.com/comics/' + pub_date.strftime('%Y/gu_%Y%m%d')+'.jpg' return CrawlerImage(url, title, text)
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'GU Comics' language = 'en' url = 'http://www.gucomics.com/' start_date = '2000-07-10' rights = 'Woody Hearn' class Crawler(CrawlerBase): history_capable_date = '2000-07-10' schedule = 'Mo,We,Fr' time_zone = 'US/Eastern' def crawl(self, pub_date): page_url = 'http://www.gucomics.com/' + pub_date.strftime('%Y%m%d') page = self.parse_page(page_url) title = page.text('b') title = title.replace('"', '') title = title.strip() text = page.text('font[class="main"]') # If there is a --- the text after is not about the comic text = text[:text.find('---')] text = text.strip() url = 'http://www.gucomics.com/comics/' + pub_date.strftime( '%Y/gu_%Y%m%d')+'.jpg' return CrawlerImage(url, title, text)
Split line to avoid flake8 warning
Split line to avoid flake8 warning
Python
agpl-3.0
datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'GU Comics' language = 'en' url = 'http://www.gucomics.com/' start_date = '2000-07-10' rights = 'Woody Hearn' class Crawler(CrawlerBase): history_capable_date = '2000-07-10' schedule = 'Mo,We,Fr' time_zone = 'US/Eastern' def crawl(self, pub_date): page_url = 'http://www.gucomics.com/' + pub_date.strftime('%Y%m%d') page = self.parse_page(page_url) title = page.text('b') title = title.replace('"', '') title = title.strip() text = page.text('font[class="main"]') # If there is a --- the text after is not about the comic text = text[:text.find('---')] text = text.strip() url = 'http://www.gucomics.com/comics/' + pub_date.strftime('%Y/gu_%Y%m%d')+'.jpg' return CrawlerImage(url, title, text) Split line to avoid flake8 warning
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'GU Comics' language = 'en' url = 'http://www.gucomics.com/' start_date = '2000-07-10' rights = 'Woody Hearn' class Crawler(CrawlerBase): history_capable_date = '2000-07-10' schedule = 'Mo,We,Fr' time_zone = 'US/Eastern' def crawl(self, pub_date): page_url = 'http://www.gucomics.com/' + pub_date.strftime('%Y%m%d') page = self.parse_page(page_url) title = page.text('b') title = title.replace('"', '') title = title.strip() text = page.text('font[class="main"]') # If there is a --- the text after is not about the comic text = text[:text.find('---')] text = text.strip() url = 'http://www.gucomics.com/comics/' + pub_date.strftime( '%Y/gu_%Y%m%d')+'.jpg' return CrawlerImage(url, title, text)
<commit_before>from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'GU Comics' language = 'en' url = 'http://www.gucomics.com/' start_date = '2000-07-10' rights = 'Woody Hearn' class Crawler(CrawlerBase): history_capable_date = '2000-07-10' schedule = 'Mo,We,Fr' time_zone = 'US/Eastern' def crawl(self, pub_date): page_url = 'http://www.gucomics.com/' + pub_date.strftime('%Y%m%d') page = self.parse_page(page_url) title = page.text('b') title = title.replace('"', '') title = title.strip() text = page.text('font[class="main"]') # If there is a --- the text after is not about the comic text = text[:text.find('---')] text = text.strip() url = 'http://www.gucomics.com/comics/' + pub_date.strftime('%Y/gu_%Y%m%d')+'.jpg' return CrawlerImage(url, title, text) <commit_msg>Split line to avoid flake8 warning<commit_after>
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'GU Comics' language = 'en' url = 'http://www.gucomics.com/' start_date = '2000-07-10' rights = 'Woody Hearn' class Crawler(CrawlerBase): history_capable_date = '2000-07-10' schedule = 'Mo,We,Fr' time_zone = 'US/Eastern' def crawl(self, pub_date): page_url = 'http://www.gucomics.com/' + pub_date.strftime('%Y%m%d') page = self.parse_page(page_url) title = page.text('b') title = title.replace('"', '') title = title.strip() text = page.text('font[class="main"]') # If there is a --- the text after is not about the comic text = text[:text.find('---')] text = text.strip() url = 'http://www.gucomics.com/comics/' + pub_date.strftime( '%Y/gu_%Y%m%d')+'.jpg' return CrawlerImage(url, title, text)
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'GU Comics' language = 'en' url = 'http://www.gucomics.com/' start_date = '2000-07-10' rights = 'Woody Hearn' class Crawler(CrawlerBase): history_capable_date = '2000-07-10' schedule = 'Mo,We,Fr' time_zone = 'US/Eastern' def crawl(self, pub_date): page_url = 'http://www.gucomics.com/' + pub_date.strftime('%Y%m%d') page = self.parse_page(page_url) title = page.text('b') title = title.replace('"', '') title = title.strip() text = page.text('font[class="main"]') # If there is a --- the text after is not about the comic text = text[:text.find('---')] text = text.strip() url = 'http://www.gucomics.com/comics/' + pub_date.strftime('%Y/gu_%Y%m%d')+'.jpg' return CrawlerImage(url, title, text) Split line to avoid flake8 warningfrom comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'GU Comics' language = 'en' url = 'http://www.gucomics.com/' start_date = '2000-07-10' rights = 'Woody Hearn' class Crawler(CrawlerBase): history_capable_date = '2000-07-10' schedule = 'Mo,We,Fr' time_zone = 'US/Eastern' def crawl(self, pub_date): page_url = 'http://www.gucomics.com/' + pub_date.strftime('%Y%m%d') page = self.parse_page(page_url) title = page.text('b') title = title.replace('"', '') title = title.strip() text = page.text('font[class="main"]') # If there is a --- the text after is not about the comic text = text[:text.find('---')] text = text.strip() url = 'http://www.gucomics.com/comics/' + pub_date.strftime( '%Y/gu_%Y%m%d')+'.jpg' return CrawlerImage(url, title, text)
<commit_before>from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'GU Comics' language = 'en' url = 'http://www.gucomics.com/' start_date = '2000-07-10' rights = 'Woody Hearn' class Crawler(CrawlerBase): history_capable_date = '2000-07-10' schedule = 'Mo,We,Fr' time_zone = 'US/Eastern' def crawl(self, pub_date): page_url = 'http://www.gucomics.com/' + pub_date.strftime('%Y%m%d') page = self.parse_page(page_url) title = page.text('b') title = title.replace('"', '') title = title.strip() text = page.text('font[class="main"]') # If there is a --- the text after is not about the comic text = text[:text.find('---')] text = text.strip() url = 'http://www.gucomics.com/comics/' + pub_date.strftime('%Y/gu_%Y%m%d')+'.jpg' return CrawlerImage(url, title, text) <commit_msg>Split line to avoid flake8 warning<commit_after>from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'GU Comics' language = 'en' url = 'http://www.gucomics.com/' start_date = '2000-07-10' rights = 'Woody Hearn' class Crawler(CrawlerBase): history_capable_date = '2000-07-10' schedule = 'Mo,We,Fr' time_zone = 'US/Eastern' def crawl(self, pub_date): page_url = 'http://www.gucomics.com/' + pub_date.strftime('%Y%m%d') page = self.parse_page(page_url) title = page.text('b') title = title.replace('"', '') title = title.strip() text = page.text('font[class="main"]') # If there is a --- the text after is not about the comic text = text[:text.find('---')] text = text.strip() url = 'http://www.gucomics.com/comics/' + pub_date.strftime( '%Y/gu_%Y%m%d')+'.jpg' return CrawlerImage(url, title, text)
9742d86c802bc16759fdb6bc0de6c46eb316d01d
Lib/xml/__init__.py
Lib/xml/__init__.py
"""Core XML support for Python. This package contains three sub-packages: dom -- The W3C Document Object Model. This supports DOM Level 1 + Namespaces. parsers -- Python wrappers for XML parsers (currently only supports Expat). sax -- The Simple API for XML, developed by XML-Dev, led by David Megginson and ported to Python by Lars Marius Garshol. This supports the SAX 2 API. """ __all__ = ["dom", "parsers", "sax"] __version__ = "$Revision$"[1:-1].split()[1] _MINIMUM_XMLPLUS_VERSION = (0, 6, 1) try: import _xmlplus except ImportError: pass else: try: v = _xmlplus.version_info except AttributeError: # _xmlplue is too old; ignore it pass else: if v >= _MINIMUM_XMLPLUS_VERSION: import sys sys.modules[__name__] = _xmlplus else: del v
"""Core XML support for Python. This package contains three sub-packages: dom -- The W3C Document Object Model. This supports DOM Level 1 + Namespaces. parsers -- Python wrappers for XML parsers (currently only supports Expat). sax -- The Simple API for XML, developed by XML-Dev, led by David Megginson and ported to Python by Lars Marius Garshol. This supports the SAX 2 API. """ __all__ = ["dom", "parsers", "sax"] import string __version__ = string.split("$Revision$")[1] del string _MINIMUM_XMLPLUS_VERSION = (0, 6, 1) try: import _xmlplus except ImportError: pass else: try: v = _xmlplus.version_info except AttributeError: # _xmlplue is too old; ignore it pass else: if v >= _MINIMUM_XMLPLUS_VERSION: import sys sys.modules[__name__] = _xmlplus else: del v
Use the string module instead of string methods; this should still work with Python 1.5.2 for now.
Use the string module instead of string methods; this should still work with Python 1.5.2 for now.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
"""Core XML support for Python. This package contains three sub-packages: dom -- The W3C Document Object Model. This supports DOM Level 1 + Namespaces. parsers -- Python wrappers for XML parsers (currently only supports Expat). sax -- The Simple API for XML, developed by XML-Dev, led by David Megginson and ported to Python by Lars Marius Garshol. This supports the SAX 2 API. """ __all__ = ["dom", "parsers", "sax"] __version__ = "$Revision$"[1:-1].split()[1] _MINIMUM_XMLPLUS_VERSION = (0, 6, 1) try: import _xmlplus except ImportError: pass else: try: v = _xmlplus.version_info except AttributeError: # _xmlplue is too old; ignore it pass else: if v >= _MINIMUM_XMLPLUS_VERSION: import sys sys.modules[__name__] = _xmlplus else: del v Use the string module instead of string methods; this should still work with Python 1.5.2 for now.
"""Core XML support for Python. This package contains three sub-packages: dom -- The W3C Document Object Model. This supports DOM Level 1 + Namespaces. parsers -- Python wrappers for XML parsers (currently only supports Expat). sax -- The Simple API for XML, developed by XML-Dev, led by David Megginson and ported to Python by Lars Marius Garshol. This supports the SAX 2 API. """ __all__ = ["dom", "parsers", "sax"] import string __version__ = string.split("$Revision$")[1] del string _MINIMUM_XMLPLUS_VERSION = (0, 6, 1) try: import _xmlplus except ImportError: pass else: try: v = _xmlplus.version_info except AttributeError: # _xmlplue is too old; ignore it pass else: if v >= _MINIMUM_XMLPLUS_VERSION: import sys sys.modules[__name__] = _xmlplus else: del v
<commit_before>"""Core XML support for Python. This package contains three sub-packages: dom -- The W3C Document Object Model. This supports DOM Level 1 + Namespaces. parsers -- Python wrappers for XML parsers (currently only supports Expat). sax -- The Simple API for XML, developed by XML-Dev, led by David Megginson and ported to Python by Lars Marius Garshol. This supports the SAX 2 API. """ __all__ = ["dom", "parsers", "sax"] __version__ = "$Revision$"[1:-1].split()[1] _MINIMUM_XMLPLUS_VERSION = (0, 6, 1) try: import _xmlplus except ImportError: pass else: try: v = _xmlplus.version_info except AttributeError: # _xmlplue is too old; ignore it pass else: if v >= _MINIMUM_XMLPLUS_VERSION: import sys sys.modules[__name__] = _xmlplus else: del v <commit_msg>Use the string module instead of string methods; this should still work with Python 1.5.2 for now.<commit_after>
"""Core XML support for Python. This package contains three sub-packages: dom -- The W3C Document Object Model. This supports DOM Level 1 + Namespaces. parsers -- Python wrappers for XML parsers (currently only supports Expat). sax -- The Simple API for XML, developed by XML-Dev, led by David Megginson and ported to Python by Lars Marius Garshol. This supports the SAX 2 API. """ __all__ = ["dom", "parsers", "sax"] import string __version__ = string.split("$Revision$")[1] del string _MINIMUM_XMLPLUS_VERSION = (0, 6, 1) try: import _xmlplus except ImportError: pass else: try: v = _xmlplus.version_info except AttributeError: # _xmlplue is too old; ignore it pass else: if v >= _MINIMUM_XMLPLUS_VERSION: import sys sys.modules[__name__] = _xmlplus else: del v
"""Core XML support for Python. This package contains three sub-packages: dom -- The W3C Document Object Model. This supports DOM Level 1 + Namespaces. parsers -- Python wrappers for XML parsers (currently only supports Expat). sax -- The Simple API for XML, developed by XML-Dev, led by David Megginson and ported to Python by Lars Marius Garshol. This supports the SAX 2 API. """ __all__ = ["dom", "parsers", "sax"] __version__ = "$Revision$"[1:-1].split()[1] _MINIMUM_XMLPLUS_VERSION = (0, 6, 1) try: import _xmlplus except ImportError: pass else: try: v = _xmlplus.version_info except AttributeError: # _xmlplue is too old; ignore it pass else: if v >= _MINIMUM_XMLPLUS_VERSION: import sys sys.modules[__name__] = _xmlplus else: del v Use the string module instead of string methods; this should still work with Python 1.5.2 for now."""Core XML support for Python. This package contains three sub-packages: dom -- The W3C Document Object Model. This supports DOM Level 1 + Namespaces. parsers -- Python wrappers for XML parsers (currently only supports Expat). sax -- The Simple API for XML, developed by XML-Dev, led by David Megginson and ported to Python by Lars Marius Garshol. This supports the SAX 2 API. """ __all__ = ["dom", "parsers", "sax"] import string __version__ = string.split("$Revision$")[1] del string _MINIMUM_XMLPLUS_VERSION = (0, 6, 1) try: import _xmlplus except ImportError: pass else: try: v = _xmlplus.version_info except AttributeError: # _xmlplue is too old; ignore it pass else: if v >= _MINIMUM_XMLPLUS_VERSION: import sys sys.modules[__name__] = _xmlplus else: del v
<commit_before>"""Core XML support for Python. This package contains three sub-packages: dom -- The W3C Document Object Model. This supports DOM Level 1 + Namespaces. parsers -- Python wrappers for XML parsers (currently only supports Expat). sax -- The Simple API for XML, developed by XML-Dev, led by David Megginson and ported to Python by Lars Marius Garshol. This supports the SAX 2 API. """ __all__ = ["dom", "parsers", "sax"] __version__ = "$Revision$"[1:-1].split()[1] _MINIMUM_XMLPLUS_VERSION = (0, 6, 1) try: import _xmlplus except ImportError: pass else: try: v = _xmlplus.version_info except AttributeError: # _xmlplue is too old; ignore it pass else: if v >= _MINIMUM_XMLPLUS_VERSION: import sys sys.modules[__name__] = _xmlplus else: del v <commit_msg>Use the string module instead of string methods; this should still work with Python 1.5.2 for now.<commit_after>"""Core XML support for Python. This package contains three sub-packages: dom -- The W3C Document Object Model. This supports DOM Level 1 + Namespaces. parsers -- Python wrappers for XML parsers (currently only supports Expat). sax -- The Simple API for XML, developed by XML-Dev, led by David Megginson and ported to Python by Lars Marius Garshol. This supports the SAX 2 API. """ __all__ = ["dom", "parsers", "sax"] import string __version__ = string.split("$Revision$")[1] del string _MINIMUM_XMLPLUS_VERSION = (0, 6, 1) try: import _xmlplus except ImportError: pass else: try: v = _xmlplus.version_info except AttributeError: # _xmlplue is too old; ignore it pass else: if v >= _MINIMUM_XMLPLUS_VERSION: import sys sys.modules[__name__] = _xmlplus else: del v
28d4538e02d66d06fcba1d386b506502c7bad4a0
bakery/views.py
bakery/views.py
# -*- coding: utf-8 -*- from django.views.generic import ListView, TemplateView from bakery.cookies.models import Cookie class HomeView(ListView): model = Cookie template_name = 'home.html' home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view()
# -*- coding: utf-8 -*- from django.db.models import Q from django.views.generic import ListView, TemplateView from bakery.cookies.models import Cookie class HomeView(ListView): model = Cookie template_name = 'home.html' def get_queryset(self): queryset = super(HomeView, self).get_queryset() search_query = self.request.GET.get('q', None) if search_query: q = Q(name__icontains=search_query) | Q(description__icontains=search_query) queryset = queryset.filter(q) return queryset home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view()
Integrate search in home view
Integrate search in home view
Python
bsd-3-clause
muffins-on-dope/bakery,muffins-on-dope/bakery,muffins-on-dope/bakery
# -*- coding: utf-8 -*- from django.views.generic import ListView, TemplateView from bakery.cookies.models import Cookie class HomeView(ListView): model = Cookie template_name = 'home.html' home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view() Integrate search in home view
# -*- coding: utf-8 -*- from django.db.models import Q from django.views.generic import ListView, TemplateView from bakery.cookies.models import Cookie class HomeView(ListView): model = Cookie template_name = 'home.html' def get_queryset(self): queryset = super(HomeView, self).get_queryset() search_query = self.request.GET.get('q', None) if search_query: q = Q(name__icontains=search_query) | Q(description__icontains=search_query) queryset = queryset.filter(q) return queryset home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view()
<commit_before># -*- coding: utf-8 -*- from django.views.generic import ListView, TemplateView from bakery.cookies.models import Cookie class HomeView(ListView): model = Cookie template_name = 'home.html' home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view() <commit_msg>Integrate search in home view<commit_after>
# -*- coding: utf-8 -*- from django.db.models import Q from django.views.generic import ListView, TemplateView from bakery.cookies.models import Cookie class HomeView(ListView): model = Cookie template_name = 'home.html' def get_queryset(self): queryset = super(HomeView, self).get_queryset() search_query = self.request.GET.get('q', None) if search_query: q = Q(name__icontains=search_query) | Q(description__icontains=search_query) queryset = queryset.filter(q) return queryset home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view()
# -*- coding: utf-8 -*- from django.views.generic import ListView, TemplateView from bakery.cookies.models import Cookie class HomeView(ListView): model = Cookie template_name = 'home.html' home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view() Integrate search in home view# -*- coding: utf-8 -*- from django.db.models import Q from django.views.generic import ListView, TemplateView from bakery.cookies.models import Cookie class HomeView(ListView): model = Cookie template_name = 'home.html' def get_queryset(self): queryset = super(HomeView, self).get_queryset() search_query = self.request.GET.get('q', None) if search_query: q = Q(name__icontains=search_query) | Q(description__icontains=search_query) queryset = queryset.filter(q) return queryset home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view()
<commit_before># -*- coding: utf-8 -*- from django.views.generic import ListView, TemplateView from bakery.cookies.models import Cookie class HomeView(ListView): model = Cookie template_name = 'home.html' home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view() <commit_msg>Integrate search in home view<commit_after># -*- coding: utf-8 -*- from django.db.models import Q from django.views.generic import ListView, TemplateView from bakery.cookies.models import Cookie class HomeView(ListView): model = Cookie template_name = 'home.html' def get_queryset(self): queryset = super(HomeView, self).get_queryset() search_query = self.request.GET.get('q', None) if search_query: q = Q(name__icontains=search_query) | Q(description__icontains=search_query) queryset = queryset.filter(q) return queryset home = HomeView.as_view() class StylesView(TemplateView): template_name = 'styles.html' styles = StylesView.as_view()
9252a0d7d86b911271bda78ecbf2c99129a1564b
blog/admin.py
blog/admin.py
from django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ( 'title', 'slug', 'event', 'created_at', 'updated_at', 'author', 'is_sponsored', ) search_fields = ('title', 'lead', 'body') list_filter = ('event', 'created_at') fields = ('title', 'slug', 'lead', 'body', 'is_sponsored') ordering = ('-created_at',) prepopulated_fields = {'slug': ('title',)} def save_model(self, request, obj, form, change): """When creating a new object, set the author field.""" if not change: obj.author = request.user obj.save() admin.site.register(Post, PostAdmin)
from django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ( 'title', 'slug', 'event', 'created_at', 'updated_at', 'author', 'is_sponsored', ) search_fields = ('title', 'lead', 'body') list_filter = ('event', 'created_at') fields = ('event', 'title', 'slug', 'lead', 'body', 'is_sponsored') ordering = ('-created_at',) prepopulated_fields = {'slug': ('title',)} def save_model(self, request, obj, form, change): """When creating a new object, set the author field.""" if not change: obj.author = request.user obj.save() admin.site.register(Post, PostAdmin)
Add event field when editing posts
Add event field when editing posts
Python
bsd-3-clause
WebCampZg/conference-web,WebCampZg/conference-web,WebCampZg/conference-web
from django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ( 'title', 'slug', 'event', 'created_at', 'updated_at', 'author', 'is_sponsored', ) search_fields = ('title', 'lead', 'body') list_filter = ('event', 'created_at') fields = ('title', 'slug', 'lead', 'body', 'is_sponsored') ordering = ('-created_at',) prepopulated_fields = {'slug': ('title',)} def save_model(self, request, obj, form, change): """When creating a new object, set the author field.""" if not change: obj.author = request.user obj.save() admin.site.register(Post, PostAdmin) Add event field when editing posts
from django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ( 'title', 'slug', 'event', 'created_at', 'updated_at', 'author', 'is_sponsored', ) search_fields = ('title', 'lead', 'body') list_filter = ('event', 'created_at') fields = ('event', 'title', 'slug', 'lead', 'body', 'is_sponsored') ordering = ('-created_at',) prepopulated_fields = {'slug': ('title',)} def save_model(self, request, obj, form, change): """When creating a new object, set the author field.""" if not change: obj.author = request.user obj.save() admin.site.register(Post, PostAdmin)
<commit_before>from django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ( 'title', 'slug', 'event', 'created_at', 'updated_at', 'author', 'is_sponsored', ) search_fields = ('title', 'lead', 'body') list_filter = ('event', 'created_at') fields = ('title', 'slug', 'lead', 'body', 'is_sponsored') ordering = ('-created_at',) prepopulated_fields = {'slug': ('title',)} def save_model(self, request, obj, form, change): """When creating a new object, set the author field.""" if not change: obj.author = request.user obj.save() admin.site.register(Post, PostAdmin) <commit_msg>Add event field when editing posts<commit_after>
from django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ( 'title', 'slug', 'event', 'created_at', 'updated_at', 'author', 'is_sponsored', ) search_fields = ('title', 'lead', 'body') list_filter = ('event', 'created_at') fields = ('event', 'title', 'slug', 'lead', 'body', 'is_sponsored') ordering = ('-created_at',) prepopulated_fields = {'slug': ('title',)} def save_model(self, request, obj, form, change): """When creating a new object, set the author field.""" if not change: obj.author = request.user obj.save() admin.site.register(Post, PostAdmin)
from django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ( 'title', 'slug', 'event', 'created_at', 'updated_at', 'author', 'is_sponsored', ) search_fields = ('title', 'lead', 'body') list_filter = ('event', 'created_at') fields = ('title', 'slug', 'lead', 'body', 'is_sponsored') ordering = ('-created_at',) prepopulated_fields = {'slug': ('title',)} def save_model(self, request, obj, form, change): """When creating a new object, set the author field.""" if not change: obj.author = request.user obj.save() admin.site.register(Post, PostAdmin) Add event field when editing postsfrom django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ( 'title', 'slug', 'event', 'created_at', 'updated_at', 'author', 'is_sponsored', ) search_fields = ('title', 'lead', 'body') list_filter = ('event', 'created_at') fields = ('event', 'title', 'slug', 'lead', 'body', 'is_sponsored') ordering = ('-created_at',) prepopulated_fields = {'slug': ('title',)} def save_model(self, request, obj, form, change): """When creating a new object, set the author field.""" if not change: obj.author = request.user obj.save() admin.site.register(Post, PostAdmin)
<commit_before>from django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ( 'title', 'slug', 'event', 'created_at', 'updated_at', 'author', 'is_sponsored', ) search_fields = ('title', 'lead', 'body') list_filter = ('event', 'created_at') fields = ('title', 'slug', 'lead', 'body', 'is_sponsored') ordering = ('-created_at',) prepopulated_fields = {'slug': ('title',)} def save_model(self, request, obj, form, change): """When creating a new object, set the author field.""" if not change: obj.author = request.user obj.save() admin.site.register(Post, PostAdmin) <commit_msg>Add event field when editing posts<commit_after>from django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ( 'title', 'slug', 'event', 'created_at', 'updated_at', 'author', 'is_sponsored', ) search_fields = ('title', 'lead', 'body') list_filter = ('event', 'created_at') fields = ('event', 'title', 'slug', 'lead', 'body', 'is_sponsored') ordering = ('-created_at',) prepopulated_fields = {'slug': ('title',)} def save_model(self, request, obj, form, change): """When creating a new object, set the author field.""" if not change: obj.author = request.user obj.save() admin.site.register(Post, PostAdmin)
47132cda83dcb26b7d94b5631ba145d925f05da3
test/test_commonsdowloader.py
test/test_commonsdowloader.py
#!/usr/bin/env python # -*- coding: latin-1 -*- """Unit tests.""" import unittest import commonsdownloader class TestCommonsDownloader(unittest.TestCase): """Testing methods from commonsdownloader.""" def test_clean_up_filename(self): """Test clean_up_filename.""" values = [('Example.jpg', 'Example.jpg'), ('Example.jpg ', 'Example.jpg'), (' Example.jpg', 'Example.jpg'), ('My Example.jpg', 'My_Example.jpg')] for (input_value, expected_value) in values: self.assertEqual(commonsdownloader.clean_up_filename(input_value), expected_value) if __name__ == "__main__": unittest.main()
#!/usr/bin/env python # -*- coding: latin-1 -*- """Unit tests.""" import unittest import commonsdownloader class TestCommonsDownloader(unittest.TestCase): """Testing methods from commonsdownloader.""" def test_clean_up_filename(self): """Test clean_up_filename.""" values = [('Example.jpg', 'Example.jpg'), ('Example.jpg ', 'Example.jpg'), (' Example.jpg', 'Example.jpg'), ('My Example.jpg', 'My_Example.jpg')] for (input_value, expected_value) in values: self.assertEqual(commonsdownloader.clean_up_filename(input_value), expected_value) def test_make_thumb_url(self): """Test make_thumb_url.""" input_value = ('My_Example.jpg', 100) expected_value = "http://commons.wikimedia.org/w/thumb.php?f=My_Example.jpg&width=100" output = commonsdownloader.make_thumb_url(*input_value) self.assertEqual(output, expected_value) if __name__ == "__main__": unittest.main()
Add unit test for make_thumb_url()
Add unit test for make_thumb_url()
Python
mit
Commonists/CommonsDownloader
#!/usr/bin/env python # -*- coding: latin-1 -*- """Unit tests.""" import unittest import commonsdownloader class TestCommonsDownloader(unittest.TestCase): """Testing methods from commonsdownloader.""" def test_clean_up_filename(self): """Test clean_up_filename.""" values = [('Example.jpg', 'Example.jpg'), ('Example.jpg ', 'Example.jpg'), (' Example.jpg', 'Example.jpg'), ('My Example.jpg', 'My_Example.jpg')] for (input_value, expected_value) in values: self.assertEqual(commonsdownloader.clean_up_filename(input_value), expected_value) if __name__ == "__main__": unittest.main() Add unit test for make_thumb_url()
#!/usr/bin/env python # -*- coding: latin-1 -*- """Unit tests.""" import unittest import commonsdownloader class TestCommonsDownloader(unittest.TestCase): """Testing methods from commonsdownloader.""" def test_clean_up_filename(self): """Test clean_up_filename.""" values = [('Example.jpg', 'Example.jpg'), ('Example.jpg ', 'Example.jpg'), (' Example.jpg', 'Example.jpg'), ('My Example.jpg', 'My_Example.jpg')] for (input_value, expected_value) in values: self.assertEqual(commonsdownloader.clean_up_filename(input_value), expected_value) def test_make_thumb_url(self): """Test make_thumb_url.""" input_value = ('My_Example.jpg', 100) expected_value = "http://commons.wikimedia.org/w/thumb.php?f=My_Example.jpg&width=100" output = commonsdownloader.make_thumb_url(*input_value) self.assertEqual(output, expected_value) if __name__ == "__main__": unittest.main()
<commit_before>#!/usr/bin/env python # -*- coding: latin-1 -*- """Unit tests.""" import unittest import commonsdownloader class TestCommonsDownloader(unittest.TestCase): """Testing methods from commonsdownloader.""" def test_clean_up_filename(self): """Test clean_up_filename.""" values = [('Example.jpg', 'Example.jpg'), ('Example.jpg ', 'Example.jpg'), (' Example.jpg', 'Example.jpg'), ('My Example.jpg', 'My_Example.jpg')] for (input_value, expected_value) in values: self.assertEqual(commonsdownloader.clean_up_filename(input_value), expected_value) if __name__ == "__main__": unittest.main() <commit_msg>Add unit test for make_thumb_url()<commit_after>
#!/usr/bin/env python # -*- coding: latin-1 -*- """Unit tests.""" import unittest import commonsdownloader class TestCommonsDownloader(unittest.TestCase): """Testing methods from commonsdownloader.""" def test_clean_up_filename(self): """Test clean_up_filename.""" values = [('Example.jpg', 'Example.jpg'), ('Example.jpg ', 'Example.jpg'), (' Example.jpg', 'Example.jpg'), ('My Example.jpg', 'My_Example.jpg')] for (input_value, expected_value) in values: self.assertEqual(commonsdownloader.clean_up_filename(input_value), expected_value) def test_make_thumb_url(self): """Test make_thumb_url.""" input_value = ('My_Example.jpg', 100) expected_value = "http://commons.wikimedia.org/w/thumb.php?f=My_Example.jpg&width=100" output = commonsdownloader.make_thumb_url(*input_value) self.assertEqual(output, expected_value) if __name__ == "__main__": unittest.main()
#!/usr/bin/env python # -*- coding: latin-1 -*- """Unit tests.""" import unittest import commonsdownloader class TestCommonsDownloader(unittest.TestCase): """Testing methods from commonsdownloader.""" def test_clean_up_filename(self): """Test clean_up_filename.""" values = [('Example.jpg', 'Example.jpg'), ('Example.jpg ', 'Example.jpg'), (' Example.jpg', 'Example.jpg'), ('My Example.jpg', 'My_Example.jpg')] for (input_value, expected_value) in values: self.assertEqual(commonsdownloader.clean_up_filename(input_value), expected_value) if __name__ == "__main__": unittest.main() Add unit test for make_thumb_url()#!/usr/bin/env python # -*- coding: latin-1 -*- """Unit tests.""" import unittest import commonsdownloader class TestCommonsDownloader(unittest.TestCase): """Testing methods from commonsdownloader.""" def test_clean_up_filename(self): """Test clean_up_filename.""" values = [('Example.jpg', 'Example.jpg'), ('Example.jpg ', 'Example.jpg'), (' Example.jpg', 'Example.jpg'), ('My Example.jpg', 'My_Example.jpg')] for (input_value, expected_value) in values: self.assertEqual(commonsdownloader.clean_up_filename(input_value), expected_value) def test_make_thumb_url(self): """Test make_thumb_url.""" input_value = ('My_Example.jpg', 100) expected_value = "http://commons.wikimedia.org/w/thumb.php?f=My_Example.jpg&width=100" output = commonsdownloader.make_thumb_url(*input_value) self.assertEqual(output, expected_value) if __name__ == "__main__": unittest.main()
<commit_before>#!/usr/bin/env python # -*- coding: latin-1 -*- """Unit tests.""" import unittest import commonsdownloader class TestCommonsDownloader(unittest.TestCase): """Testing methods from commonsdownloader.""" def test_clean_up_filename(self): """Test clean_up_filename.""" values = [('Example.jpg', 'Example.jpg'), ('Example.jpg ', 'Example.jpg'), (' Example.jpg', 'Example.jpg'), ('My Example.jpg', 'My_Example.jpg')] for (input_value, expected_value) in values: self.assertEqual(commonsdownloader.clean_up_filename(input_value), expected_value) if __name__ == "__main__": unittest.main() <commit_msg>Add unit test for make_thumb_url()<commit_after>#!/usr/bin/env python # -*- coding: latin-1 -*- """Unit tests.""" import unittest import commonsdownloader class TestCommonsDownloader(unittest.TestCase): """Testing methods from commonsdownloader.""" def test_clean_up_filename(self): """Test clean_up_filename.""" values = [('Example.jpg', 'Example.jpg'), ('Example.jpg ', 'Example.jpg'), (' Example.jpg', 'Example.jpg'), ('My Example.jpg', 'My_Example.jpg')] for (input_value, expected_value) in values: self.assertEqual(commonsdownloader.clean_up_filename(input_value), expected_value) def test_make_thumb_url(self): """Test make_thumb_url.""" input_value = ('My_Example.jpg', 100) expected_value = "http://commons.wikimedia.org/w/thumb.php?f=My_Example.jpg&width=100" output = commonsdownloader.make_thumb_url(*input_value) self.assertEqual(output, expected_value) if __name__ == "__main__": unittest.main()
47b4779b82035d0478985c85c3e7e95581ef8efe
CodeFights/arrayPacking.py
CodeFights/arrayPacking.py
#!/usr/local/bin/python # Code Fights Are Equally Strong Problem def arrayPacking(): pass def main(): tests = [ [], [] ] for t in tests: res = arrayPacking(t[0]) if t[1] == res: print("PASSED: arrayPacking({}) returned {}" .format(t[0], res)) else: print(("FAILED: arrayPacking({}) returned {}," "answer: {}").format(t[0], res, t[1])) if __name__ == '__main__': main()
#!/usr/local/bin/python # Code Fights Array Packing (Core) Problem def arrayPacking(a): return sum([n << 8*i for i, n in enumerate(a)]) def main(): tests = [ [[24, 85, 0], 21784], [[23, 45, 39], 2567447] ] for t in tests: res = arrayPacking(t[0]) if t[1] == res: print("PASSED: arrayPacking({}) returned {}" .format(t[0], res)) else: print(("FAILED: arrayPacking({}) returned {}," "answer: {}").format(t[0], res, t[1])) if __name__ == '__main__': main()
Solve Code Fights array packing problem
Solve Code Fights array packing problem
Python
mit
HKuz/Test_Code
#!/usr/local/bin/python # Code Fights Are Equally Strong Problem def arrayPacking(): pass def main(): tests = [ [], [] ] for t in tests: res = arrayPacking(t[0]) if t[1] == res: print("PASSED: arrayPacking({}) returned {}" .format(t[0], res)) else: print(("FAILED: arrayPacking({}) returned {}," "answer: {}").format(t[0], res, t[1])) if __name__ == '__main__': main() Solve Code Fights array packing problem
#!/usr/local/bin/python # Code Fights Array Packing (Core) Problem def arrayPacking(a): return sum([n << 8*i for i, n in enumerate(a)]) def main(): tests = [ [[24, 85, 0], 21784], [[23, 45, 39], 2567447] ] for t in tests: res = arrayPacking(t[0]) if t[1] == res: print("PASSED: arrayPacking({}) returned {}" .format(t[0], res)) else: print(("FAILED: arrayPacking({}) returned {}," "answer: {}").format(t[0], res, t[1])) if __name__ == '__main__': main()
<commit_before>#!/usr/local/bin/python # Code Fights Are Equally Strong Problem def arrayPacking(): pass def main(): tests = [ [], [] ] for t in tests: res = arrayPacking(t[0]) if t[1] == res: print("PASSED: arrayPacking({}) returned {}" .format(t[0], res)) else: print(("FAILED: arrayPacking({}) returned {}," "answer: {}").format(t[0], res, t[1])) if __name__ == '__main__': main() <commit_msg>Solve Code Fights array packing problem<commit_after>
#!/usr/local/bin/python # Code Fights Array Packing (Core) Problem def arrayPacking(a): return sum([n << 8*i for i, n in enumerate(a)]) def main(): tests = [ [[24, 85, 0], 21784], [[23, 45, 39], 2567447] ] for t in tests: res = arrayPacking(t[0]) if t[1] == res: print("PASSED: arrayPacking({}) returned {}" .format(t[0], res)) else: print(("FAILED: arrayPacking({}) returned {}," "answer: {}").format(t[0], res, t[1])) if __name__ == '__main__': main()
#!/usr/local/bin/python # Code Fights Are Equally Strong Problem def arrayPacking(): pass def main(): tests = [ [], [] ] for t in tests: res = arrayPacking(t[0]) if t[1] == res: print("PASSED: arrayPacking({}) returned {}" .format(t[0], res)) else: print(("FAILED: arrayPacking({}) returned {}," "answer: {}").format(t[0], res, t[1])) if __name__ == '__main__': main() Solve Code Fights array packing problem#!/usr/local/bin/python # Code Fights Array Packing (Core) Problem def arrayPacking(a): return sum([n << 8*i for i, n in enumerate(a)]) def main(): tests = [ [[24, 85, 0], 21784], [[23, 45, 39], 2567447] ] for t in tests: res = arrayPacking(t[0]) if t[1] == res: print("PASSED: arrayPacking({}) returned {}" .format(t[0], res)) else: print(("FAILED: arrayPacking({}) returned {}," "answer: {}").format(t[0], res, t[1])) if __name__ == '__main__': main()
<commit_before>#!/usr/local/bin/python # Code Fights Are Equally Strong Problem def arrayPacking(): pass def main(): tests = [ [], [] ] for t in tests: res = arrayPacking(t[0]) if t[1] == res: print("PASSED: arrayPacking({}) returned {}" .format(t[0], res)) else: print(("FAILED: arrayPacking({}) returned {}," "answer: {}").format(t[0], res, t[1])) if __name__ == '__main__': main() <commit_msg>Solve Code Fights array packing problem<commit_after>#!/usr/local/bin/python # Code Fights Array Packing (Core) Problem def arrayPacking(a): return sum([n << 8*i for i, n in enumerate(a)]) def main(): tests = [ [[24, 85, 0], 21784], [[23, 45, 39], 2567447] ] for t in tests: res = arrayPacking(t[0]) if t[1] == res: print("PASSED: arrayPacking({}) returned {}" .format(t[0], res)) else: print(("FAILED: arrayPacking({}) returned {}," "answer: {}").format(t[0], res, t[1])) if __name__ == '__main__': main()
27e79e49af76a0f981f54c1ac2b88ad409bacb95
xie/graphics/utils.py
xie/graphics/utils.py
class TextCodec: def __init__(self): pass def encodeStartPoint(self, p): return "0000{0[0]:02X}{0[1]:02X}".format(p) def encodeEndPoint(self, p): return "0001{0[0]:02X}{0[1]:02X}".format(p) def encodeControlPoint(self, p): return "0002{0[0]:02X}{0[1]:02X}".format(p) def encodeStrokeExpression(self, pointExpressionList): return ",".join(pointExpressionList) def encodeCharacterExpression(self, strokeExpressionList): return ";".join(strokeExpressionList) def isStartPoint(self, pointExpression): return pointExpression[3]=='0' def isEndPoint(self, pointExpression): return pointExpression[3]=='1' def isControlPoint(self, pointExpression): return pointExpression[3]=='2' def decodePointExpression(self, pointExpression): e=pointExpression return (int(e[4:6], 16), int(e[6:8], 16)) def decodeStrokeExpression(self, strokeExpression): return strokeExpression.split(",") def decodeCharacterExpression(self, characterExpression): return characterExpression.split(";")
class TextCodec: def __init__(self): pass def encodeStartPoint(self, p): return "0{0[0]:02X}{0[1]:02X}".format(p) def encodeEndPoint(self, p): return "1{0[0]:02X}{0[1]:02X}".format(p) def encodeControlPoint(self, p): return "2{0[0]:02X}{0[1]:02X}".format(p) def encodeStrokeExpression(self, pointExpressionList): return ",".join(pointExpressionList) def encodeCharacterExpression(self, strokeExpressionList): return ";".join(strokeExpressionList) def isStartPoint(self, pointExpression): return pointExpression[0]=='0' def isEndPoint(self, pointExpression): return pointExpression[0]=='1' def isControlPoint(self, pointExpression): return pointExpression[0]=='2' def decodePointExpression(self, pointExpression): e=pointExpression return (int(e[1:3], 16), int(e[3:5], 16)) def decodeStrokeExpression(self, strokeExpression): return strokeExpression.split(",") def decodeCharacterExpression(self, characterExpression): return characterExpression.split(";")
Change the text format for drawing a stroke. To use 5 digits but not 8 digits to present a point.
Change the text format for drawing a stroke. To use 5 digits but not 8 digits to present a point.
Python
apache-2.0
xrloong/Xie
class TextCodec: def __init__(self): pass def encodeStartPoint(self, p): return "0000{0[0]:02X}{0[1]:02X}".format(p) def encodeEndPoint(self, p): return "0001{0[0]:02X}{0[1]:02X}".format(p) def encodeControlPoint(self, p): return "0002{0[0]:02X}{0[1]:02X}".format(p) def encodeStrokeExpression(self, pointExpressionList): return ",".join(pointExpressionList) def encodeCharacterExpression(self, strokeExpressionList): return ";".join(strokeExpressionList) def isStartPoint(self, pointExpression): return pointExpression[3]=='0' def isEndPoint(self, pointExpression): return pointExpression[3]=='1' def isControlPoint(self, pointExpression): return pointExpression[3]=='2' def decodePointExpression(self, pointExpression): e=pointExpression return (int(e[4:6], 16), int(e[6:8], 16)) def decodeStrokeExpression(self, strokeExpression): return strokeExpression.split(",") def decodeCharacterExpression(self, characterExpression): return characterExpression.split(";") Change the text format for drawing a stroke. To use 5 digits but not 8 digits to present a point.
class TextCodec: def __init__(self): pass def encodeStartPoint(self, p): return "0{0[0]:02X}{0[1]:02X}".format(p) def encodeEndPoint(self, p): return "1{0[0]:02X}{0[1]:02X}".format(p) def encodeControlPoint(self, p): return "2{0[0]:02X}{0[1]:02X}".format(p) def encodeStrokeExpression(self, pointExpressionList): return ",".join(pointExpressionList) def encodeCharacterExpression(self, strokeExpressionList): return ";".join(strokeExpressionList) def isStartPoint(self, pointExpression): return pointExpression[0]=='0' def isEndPoint(self, pointExpression): return pointExpression[0]=='1' def isControlPoint(self, pointExpression): return pointExpression[0]=='2' def decodePointExpression(self, pointExpression): e=pointExpression return (int(e[1:3], 16), int(e[3:5], 16)) def decodeStrokeExpression(self, strokeExpression): return strokeExpression.split(",") def decodeCharacterExpression(self, characterExpression): return characterExpression.split(";")
<commit_before>class TextCodec: def __init__(self): pass def encodeStartPoint(self, p): return "0000{0[0]:02X}{0[1]:02X}".format(p) def encodeEndPoint(self, p): return "0001{0[0]:02X}{0[1]:02X}".format(p) def encodeControlPoint(self, p): return "0002{0[0]:02X}{0[1]:02X}".format(p) def encodeStrokeExpression(self, pointExpressionList): return ",".join(pointExpressionList) def encodeCharacterExpression(self, strokeExpressionList): return ";".join(strokeExpressionList) def isStartPoint(self, pointExpression): return pointExpression[3]=='0' def isEndPoint(self, pointExpression): return pointExpression[3]=='1' def isControlPoint(self, pointExpression): return pointExpression[3]=='2' def decodePointExpression(self, pointExpression): e=pointExpression return (int(e[4:6], 16), int(e[6:8], 16)) def decodeStrokeExpression(self, strokeExpression): return strokeExpression.split(",") def decodeCharacterExpression(self, characterExpression): return characterExpression.split(";") <commit_msg>Change the text format for drawing a stroke. To use 5 digits but not 8 digits to present a point.<commit_after>
class TextCodec: def __init__(self): pass def encodeStartPoint(self, p): return "0{0[0]:02X}{0[1]:02X}".format(p) def encodeEndPoint(self, p): return "1{0[0]:02X}{0[1]:02X}".format(p) def encodeControlPoint(self, p): return "2{0[0]:02X}{0[1]:02X}".format(p) def encodeStrokeExpression(self, pointExpressionList): return ",".join(pointExpressionList) def encodeCharacterExpression(self, strokeExpressionList): return ";".join(strokeExpressionList) def isStartPoint(self, pointExpression): return pointExpression[0]=='0' def isEndPoint(self, pointExpression): return pointExpression[0]=='1' def isControlPoint(self, pointExpression): return pointExpression[0]=='2' def decodePointExpression(self, pointExpression): e=pointExpression return (int(e[1:3], 16), int(e[3:5], 16)) def decodeStrokeExpression(self, strokeExpression): return strokeExpression.split(",") def decodeCharacterExpression(self, characterExpression): return characterExpression.split(";")
class TextCodec: def __init__(self): pass def encodeStartPoint(self, p): return "0000{0[0]:02X}{0[1]:02X}".format(p) def encodeEndPoint(self, p): return "0001{0[0]:02X}{0[1]:02X}".format(p) def encodeControlPoint(self, p): return "0002{0[0]:02X}{0[1]:02X}".format(p) def encodeStrokeExpression(self, pointExpressionList): return ",".join(pointExpressionList) def encodeCharacterExpression(self, strokeExpressionList): return ";".join(strokeExpressionList) def isStartPoint(self, pointExpression): return pointExpression[3]=='0' def isEndPoint(self, pointExpression): return pointExpression[3]=='1' def isControlPoint(self, pointExpression): return pointExpression[3]=='2' def decodePointExpression(self, pointExpression): e=pointExpression return (int(e[4:6], 16), int(e[6:8], 16)) def decodeStrokeExpression(self, strokeExpression): return strokeExpression.split(",") def decodeCharacterExpression(self, characterExpression): return characterExpression.split(";") Change the text format for drawing a stroke. To use 5 digits but not 8 digits to present a point.class TextCodec: def __init__(self): pass def encodeStartPoint(self, p): return "0{0[0]:02X}{0[1]:02X}".format(p) def encodeEndPoint(self, p): return "1{0[0]:02X}{0[1]:02X}".format(p) def encodeControlPoint(self, p): return "2{0[0]:02X}{0[1]:02X}".format(p) def encodeStrokeExpression(self, pointExpressionList): return ",".join(pointExpressionList) def encodeCharacterExpression(self, strokeExpressionList): return ";".join(strokeExpressionList) def isStartPoint(self, pointExpression): return pointExpression[0]=='0' def isEndPoint(self, pointExpression): return pointExpression[0]=='1' def isControlPoint(self, pointExpression): return pointExpression[0]=='2' def decodePointExpression(self, pointExpression): e=pointExpression return (int(e[1:3], 16), int(e[3:5], 16)) def decodeStrokeExpression(self, strokeExpression): return strokeExpression.split(",") def decodeCharacterExpression(self, characterExpression): return characterExpression.split(";")
<commit_before>class TextCodec: def __init__(self): pass def encodeStartPoint(self, p): return "0000{0[0]:02X}{0[1]:02X}".format(p) def encodeEndPoint(self, p): return "0001{0[0]:02X}{0[1]:02X}".format(p) def encodeControlPoint(self, p): return "0002{0[0]:02X}{0[1]:02X}".format(p) def encodeStrokeExpression(self, pointExpressionList): return ",".join(pointExpressionList) def encodeCharacterExpression(self, strokeExpressionList): return ";".join(strokeExpressionList) def isStartPoint(self, pointExpression): return pointExpression[3]=='0' def isEndPoint(self, pointExpression): return pointExpression[3]=='1' def isControlPoint(self, pointExpression): return pointExpression[3]=='2' def decodePointExpression(self, pointExpression): e=pointExpression return (int(e[4:6], 16), int(e[6:8], 16)) def decodeStrokeExpression(self, strokeExpression): return strokeExpression.split(",") def decodeCharacterExpression(self, characterExpression): return characterExpression.split(";") <commit_msg>Change the text format for drawing a stroke. To use 5 digits but not 8 digits to present a point.<commit_after>class TextCodec: def __init__(self): pass def encodeStartPoint(self, p): return "0{0[0]:02X}{0[1]:02X}".format(p) def encodeEndPoint(self, p): return "1{0[0]:02X}{0[1]:02X}".format(p) def encodeControlPoint(self, p): return "2{0[0]:02X}{0[1]:02X}".format(p) def encodeStrokeExpression(self, pointExpressionList): return ",".join(pointExpressionList) def encodeCharacterExpression(self, strokeExpressionList): return ";".join(strokeExpressionList) def isStartPoint(self, pointExpression): return pointExpression[0]=='0' def isEndPoint(self, pointExpression): return pointExpression[0]=='1' def isControlPoint(self, pointExpression): return pointExpression[0]=='2' def decodePointExpression(self, pointExpression): e=pointExpression return (int(e[1:3], 16), int(e[3:5], 16)) def decodeStrokeExpression(self, strokeExpression): return strokeExpression.split(",") def decodeCharacterExpression(self, characterExpression): return characterExpression.split(";")
f279d22072e853332c3fba60ad6e98fbeb4f35b8
untappd/untappd_object.py
untappd/untappd_object.py
class _NotSetType: def __repr__(self): return "NotSet" # pragma no cover NotSet = _NotSetType() class BasicUntappdObject(object): def __init__(self, attributes): self._init_attributes() self._setup_attributes(attributes) class UntappdObject(BasicUntappdObject): pass
#TODO do we want to set a base object for untappd objects? do they share enough # qualities to need a shared type? class _NotSetType: def __repr__(self): return "NotSet" # pragma no cover NotSet = _NotSetType() class BasicUntappdObject(object): def __init__(self, attributes): self._init_attributes() self._setup_attributes(attributes) #TODO determine what an Untappdobject vs a BasicUntappdObject would require class UntappdObject(BasicUntappdObject): pass
Add a few design comments.
Add a few design comments.
Python
mit
adamomfg/pytappd
class _NotSetType: def __repr__(self): return "NotSet" # pragma no cover NotSet = _NotSetType() class BasicUntappdObject(object): def __init__(self, attributes): self._init_attributes() self._setup_attributes(attributes) class UntappdObject(BasicUntappdObject): passAdd a few design comments.
#TODO do we want to set a base object for untappd objects? do they share enough # qualities to need a shared type? class _NotSetType: def __repr__(self): return "NotSet" # pragma no cover NotSet = _NotSetType() class BasicUntappdObject(object): def __init__(self, attributes): self._init_attributes() self._setup_attributes(attributes) #TODO determine what an Untappdobject vs a BasicUntappdObject would require class UntappdObject(BasicUntappdObject): pass
<commit_before> class _NotSetType: def __repr__(self): return "NotSet" # pragma no cover NotSet = _NotSetType() class BasicUntappdObject(object): def __init__(self, attributes): self._init_attributes() self._setup_attributes(attributes) class UntappdObject(BasicUntappdObject): pass<commit_msg>Add a few design comments.<commit_after>
#TODO do we want to set a base object for untappd objects? do they share enough # qualities to need a shared type? class _NotSetType: def __repr__(self): return "NotSet" # pragma no cover NotSet = _NotSetType() class BasicUntappdObject(object): def __init__(self, attributes): self._init_attributes() self._setup_attributes(attributes) #TODO determine what an Untappdobject vs a BasicUntappdObject would require class UntappdObject(BasicUntappdObject): pass
class _NotSetType: def __repr__(self): return "NotSet" # pragma no cover NotSet = _NotSetType() class BasicUntappdObject(object): def __init__(self, attributes): self._init_attributes() self._setup_attributes(attributes) class UntappdObject(BasicUntappdObject): passAdd a few design comments.#TODO do we want to set a base object for untappd objects? do they share enough # qualities to need a shared type? class _NotSetType: def __repr__(self): return "NotSet" # pragma no cover NotSet = _NotSetType() class BasicUntappdObject(object): def __init__(self, attributes): self._init_attributes() self._setup_attributes(attributes) #TODO determine what an Untappdobject vs a BasicUntappdObject would require class UntappdObject(BasicUntappdObject): pass
<commit_before> class _NotSetType: def __repr__(self): return "NotSet" # pragma no cover NotSet = _NotSetType() class BasicUntappdObject(object): def __init__(self, attributes): self._init_attributes() self._setup_attributes(attributes) class UntappdObject(BasicUntappdObject): pass<commit_msg>Add a few design comments.<commit_after>#TODO do we want to set a base object for untappd objects? do they share enough # qualities to need a shared type? class _NotSetType: def __repr__(self): return "NotSet" # pragma no cover NotSet = _NotSetType() class BasicUntappdObject(object): def __init__(self, attributes): self._init_attributes() self._setup_attributes(attributes) #TODO determine what an Untappdobject vs a BasicUntappdObject would require class UntappdObject(BasicUntappdObject): pass
7d70d97ebbf5c901b1be1071ed6a0d8d350b53c0
appengine/swarming/swarming_bot/api/__init__.py
appengine/swarming/swarming_bot/api/__init__.py
# Copyright 2016 The LUCI Authors. All rights reserved. # Use of this source code is governed under the Apache License, Version 2.0 # that can be found in the LICENSE file. __version__ = '1.0.0'
Add __version__ to Swarming bot 'api' package.
Add __version__ to Swarming bot 'api' package. It will simplify making bot_config.py hooks work with different versions of bot code (e.g. when deploying changes to bot API). R=maruel@chromium.org BUG= Review-Url: https://codereview.chromium.org/2270113002
Python
apache-2.0
luci/luci-py,luci/luci-py,luci/luci-py,luci/luci-py
Add __version__ to Swarming bot 'api' package. It will simplify making bot_config.py hooks work with different versions of bot code (e.g. when deploying changes to bot API). R=maruel@chromium.org BUG= Review-Url: https://codereview.chromium.org/2270113002
# Copyright 2016 The LUCI Authors. All rights reserved. # Use of this source code is governed under the Apache License, Version 2.0 # that can be found in the LICENSE file. __version__ = '1.0.0'
<commit_before><commit_msg>Add __version__ to Swarming bot 'api' package. It will simplify making bot_config.py hooks work with different versions of bot code (e.g. when deploying changes to bot API). R=maruel@chromium.org BUG= Review-Url: https://codereview.chromium.org/2270113002<commit_after>
# Copyright 2016 The LUCI Authors. All rights reserved. # Use of this source code is governed under the Apache License, Version 2.0 # that can be found in the LICENSE file. __version__ = '1.0.0'
Add __version__ to Swarming bot 'api' package. It will simplify making bot_config.py hooks work with different versions of bot code (e.g. when deploying changes to bot API). R=maruel@chromium.org BUG= Review-Url: https://codereview.chromium.org/2270113002# Copyright 2016 The LUCI Authors. All rights reserved. # Use of this source code is governed under the Apache License, Version 2.0 # that can be found in the LICENSE file. __version__ = '1.0.0'
<commit_before><commit_msg>Add __version__ to Swarming bot 'api' package. It will simplify making bot_config.py hooks work with different versions of bot code (e.g. when deploying changes to bot API). R=maruel@chromium.org BUG= Review-Url: https://codereview.chromium.org/2270113002<commit_after># Copyright 2016 The LUCI Authors. All rights reserved. # Use of this source code is governed under the Apache License, Version 2.0 # that can be found in the LICENSE file. __version__ = '1.0.0'
42a459dc41cee9ea48f7e332c022643102e05355
applications/pagefetch_project/configuration.py
applications/pagefetch_project/configuration.py
__author__ = 'leif' import os PROJ_PATH = os.getcwd() print "Project Path: %s" % (PROJ_PATH) GAME_DB = os.path.join(PROJ_PATH,'game.db') TEMP_PATH = os.path.join(PROJ_PATH,'templates') STATIC_PATH = os.path.join(PROJ_PATH,'static') MEDIA_PATH = os.path.join(PROJ_PATH,'media') MEDIA_URL = '/media/' UPLOAD_DIR = os.path.join(PROJ_PATH,'media') MEDIA_ROOT = 'data/' DEPLOY = False DEBUG = True APP_NAME = 'pagefetch' DATA_DIR = os.path.join(PROJ_PATH,'data')
__author__ = 'leif' import os PROJ_PATH = os.getcwd() print "Project Path: %s" % (PROJ_PATH) GAME_DB = os.path.join(PROJ_PATH,'game.db') TEMP_PATH = os.path.join(PROJ_PATH,'templates') STATIC_PATH = os.path.join(PROJ_PATH,'static') MEDIA_PATH = os.path.join('') MEDIA_URL = '/data/' #UPLOAD_DIR = os.path.join(PROJ_PATH,'data') UPLOAD_DIR = '%Y/%m/%d' MEDIA_ROOT = 'data/' DEPLOY = False DEBUG = True APP_NAME = 'pagefetch' DATA_DIR = os.path.join(PROJ_PATH,'data')
Change to upload paths for images.
Change to upload paths for images.
Python
mit
leifos/ifind,leifos/ifind
__author__ = 'leif' import os PROJ_PATH = os.getcwd() print "Project Path: %s" % (PROJ_PATH) GAME_DB = os.path.join(PROJ_PATH,'game.db') TEMP_PATH = os.path.join(PROJ_PATH,'templates') STATIC_PATH = os.path.join(PROJ_PATH,'static') MEDIA_PATH = os.path.join(PROJ_PATH,'media') MEDIA_URL = '/media/' UPLOAD_DIR = os.path.join(PROJ_PATH,'media') MEDIA_ROOT = 'data/' DEPLOY = False DEBUG = True APP_NAME = 'pagefetch' DATA_DIR = os.path.join(PROJ_PATH,'data') Change to upload paths for images.
__author__ = 'leif' import os PROJ_PATH = os.getcwd() print "Project Path: %s" % (PROJ_PATH) GAME_DB = os.path.join(PROJ_PATH,'game.db') TEMP_PATH = os.path.join(PROJ_PATH,'templates') STATIC_PATH = os.path.join(PROJ_PATH,'static') MEDIA_PATH = os.path.join('') MEDIA_URL = '/data/' #UPLOAD_DIR = os.path.join(PROJ_PATH,'data') UPLOAD_DIR = '%Y/%m/%d' MEDIA_ROOT = 'data/' DEPLOY = False DEBUG = True APP_NAME = 'pagefetch' DATA_DIR = os.path.join(PROJ_PATH,'data')
<commit_before>__author__ = 'leif' import os PROJ_PATH = os.getcwd() print "Project Path: %s" % (PROJ_PATH) GAME_DB = os.path.join(PROJ_PATH,'game.db') TEMP_PATH = os.path.join(PROJ_PATH,'templates') STATIC_PATH = os.path.join(PROJ_PATH,'static') MEDIA_PATH = os.path.join(PROJ_PATH,'media') MEDIA_URL = '/media/' UPLOAD_DIR = os.path.join(PROJ_PATH,'media') MEDIA_ROOT = 'data/' DEPLOY = False DEBUG = True APP_NAME = 'pagefetch' DATA_DIR = os.path.join(PROJ_PATH,'data') <commit_msg>Change to upload paths for images.<commit_after>
__author__ = 'leif' import os PROJ_PATH = os.getcwd() print "Project Path: %s" % (PROJ_PATH) GAME_DB = os.path.join(PROJ_PATH,'game.db') TEMP_PATH = os.path.join(PROJ_PATH,'templates') STATIC_PATH = os.path.join(PROJ_PATH,'static') MEDIA_PATH = os.path.join('') MEDIA_URL = '/data/' #UPLOAD_DIR = os.path.join(PROJ_PATH,'data') UPLOAD_DIR = '%Y/%m/%d' MEDIA_ROOT = 'data/' DEPLOY = False DEBUG = True APP_NAME = 'pagefetch' DATA_DIR = os.path.join(PROJ_PATH,'data')
__author__ = 'leif' import os PROJ_PATH = os.getcwd() print "Project Path: %s" % (PROJ_PATH) GAME_DB = os.path.join(PROJ_PATH,'game.db') TEMP_PATH = os.path.join(PROJ_PATH,'templates') STATIC_PATH = os.path.join(PROJ_PATH,'static') MEDIA_PATH = os.path.join(PROJ_PATH,'media') MEDIA_URL = '/media/' UPLOAD_DIR = os.path.join(PROJ_PATH,'media') MEDIA_ROOT = 'data/' DEPLOY = False DEBUG = True APP_NAME = 'pagefetch' DATA_DIR = os.path.join(PROJ_PATH,'data') Change to upload paths for images.__author__ = 'leif' import os PROJ_PATH = os.getcwd() print "Project Path: %s" % (PROJ_PATH) GAME_DB = os.path.join(PROJ_PATH,'game.db') TEMP_PATH = os.path.join(PROJ_PATH,'templates') STATIC_PATH = os.path.join(PROJ_PATH,'static') MEDIA_PATH = os.path.join('') MEDIA_URL = '/data/' #UPLOAD_DIR = os.path.join(PROJ_PATH,'data') UPLOAD_DIR = '%Y/%m/%d' MEDIA_ROOT = 'data/' DEPLOY = False DEBUG = True APP_NAME = 'pagefetch' DATA_DIR = os.path.join(PROJ_PATH,'data')
<commit_before>__author__ = 'leif' import os PROJ_PATH = os.getcwd() print "Project Path: %s" % (PROJ_PATH) GAME_DB = os.path.join(PROJ_PATH,'game.db') TEMP_PATH = os.path.join(PROJ_PATH,'templates') STATIC_PATH = os.path.join(PROJ_PATH,'static') MEDIA_PATH = os.path.join(PROJ_PATH,'media') MEDIA_URL = '/media/' UPLOAD_DIR = os.path.join(PROJ_PATH,'media') MEDIA_ROOT = 'data/' DEPLOY = False DEBUG = True APP_NAME = 'pagefetch' DATA_DIR = os.path.join(PROJ_PATH,'data') <commit_msg>Change to upload paths for images.<commit_after>__author__ = 'leif' import os PROJ_PATH = os.getcwd() print "Project Path: %s" % (PROJ_PATH) GAME_DB = os.path.join(PROJ_PATH,'game.db') TEMP_PATH = os.path.join(PROJ_PATH,'templates') STATIC_PATH = os.path.join(PROJ_PATH,'static') MEDIA_PATH = os.path.join('') MEDIA_URL = '/data/' #UPLOAD_DIR = os.path.join(PROJ_PATH,'data') UPLOAD_DIR = '%Y/%m/%d' MEDIA_ROOT = 'data/' DEPLOY = False DEBUG = True APP_NAME = 'pagefetch' DATA_DIR = os.path.join(PROJ_PATH,'data')
90f1cfb302c5b95243731c3c6688c5c3193b821c
mmd_tools/auto_scene_setup.py
mmd_tools/auto_scene_setup.py
# -*- coding: utf-8 -*- import bpy def setupFrameRanges(): s, e = 1, 1 for i in bpy.data.actions: ts, te = i.frame_range s = min(s, ts) e = max(e, te) bpy.context.scene.frame_start = s bpy.context.scene.frame_end = e bpy.context.scene.rigidbody_world.point_cache.frame_start = s bpy.context.scene.rigidbody_world.point_cache.frame_end = e def setupLighting(): bpy.context.scene.world.light_settings.use_ambient_occlusion = True bpy.context.scene.world.light_settings.use_environment_light = True bpy.context.scene.world.light_settings.use_indirect_light = True def setupFps(): bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1
# -*- coding: utf-8 -*- import bpy def setupFrameRanges(): s, e = 1, 1 for i in bpy.data.actions: ts, te = i.frame_range s = min(s, ts) e = max(e, te) bpy.context.scene.frame_start = s bpy.context.scene.frame_end = e if bpy.context.scene.rigidbody_world is not None: bpy.context.scene.rigidbody_world.point_cache.frame_start = s bpy.context.scene.rigidbody_world.point_cache.frame_end = e def setupLighting(): bpy.context.scene.world.light_settings.use_ambient_occlusion = True bpy.context.scene.world.light_settings.use_environment_light = True bpy.context.scene.world.light_settings.use_indirect_light = True def setupFps(): bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1
Fix the bug that causes "set frame range" error on a scene that has no rigid body world.
Fix the bug that causes "set frame range" error on a scene that has no rigid body world.
Python
mit
sugiany/blender_mmd_tools,lordscales91/blender_mmd_tools
# -*- coding: utf-8 -*- import bpy def setupFrameRanges(): s, e = 1, 1 for i in bpy.data.actions: ts, te = i.frame_range s = min(s, ts) e = max(e, te) bpy.context.scene.frame_start = s bpy.context.scene.frame_end = e bpy.context.scene.rigidbody_world.point_cache.frame_start = s bpy.context.scene.rigidbody_world.point_cache.frame_end = e def setupLighting(): bpy.context.scene.world.light_settings.use_ambient_occlusion = True bpy.context.scene.world.light_settings.use_environment_light = True bpy.context.scene.world.light_settings.use_indirect_light = True def setupFps(): bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1 Fix the bug that causes "set frame range" error on a scene that has no rigid body world.
# -*- coding: utf-8 -*- import bpy def setupFrameRanges(): s, e = 1, 1 for i in bpy.data.actions: ts, te = i.frame_range s = min(s, ts) e = max(e, te) bpy.context.scene.frame_start = s bpy.context.scene.frame_end = e if bpy.context.scene.rigidbody_world is not None: bpy.context.scene.rigidbody_world.point_cache.frame_start = s bpy.context.scene.rigidbody_world.point_cache.frame_end = e def setupLighting(): bpy.context.scene.world.light_settings.use_ambient_occlusion = True bpy.context.scene.world.light_settings.use_environment_light = True bpy.context.scene.world.light_settings.use_indirect_light = True def setupFps(): bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1
<commit_before># -*- coding: utf-8 -*- import bpy def setupFrameRanges(): s, e = 1, 1 for i in bpy.data.actions: ts, te = i.frame_range s = min(s, ts) e = max(e, te) bpy.context.scene.frame_start = s bpy.context.scene.frame_end = e bpy.context.scene.rigidbody_world.point_cache.frame_start = s bpy.context.scene.rigidbody_world.point_cache.frame_end = e def setupLighting(): bpy.context.scene.world.light_settings.use_ambient_occlusion = True bpy.context.scene.world.light_settings.use_environment_light = True bpy.context.scene.world.light_settings.use_indirect_light = True def setupFps(): bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1 <commit_msg>Fix the bug that causes "set frame range" error on a scene that has no rigid body world.<commit_after>
# -*- coding: utf-8 -*- import bpy def setupFrameRanges(): s, e = 1, 1 for i in bpy.data.actions: ts, te = i.frame_range s = min(s, ts) e = max(e, te) bpy.context.scene.frame_start = s bpy.context.scene.frame_end = e if bpy.context.scene.rigidbody_world is not None: bpy.context.scene.rigidbody_world.point_cache.frame_start = s bpy.context.scene.rigidbody_world.point_cache.frame_end = e def setupLighting(): bpy.context.scene.world.light_settings.use_ambient_occlusion = True bpy.context.scene.world.light_settings.use_environment_light = True bpy.context.scene.world.light_settings.use_indirect_light = True def setupFps(): bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1
# -*- coding: utf-8 -*- import bpy def setupFrameRanges(): s, e = 1, 1 for i in bpy.data.actions: ts, te = i.frame_range s = min(s, ts) e = max(e, te) bpy.context.scene.frame_start = s bpy.context.scene.frame_end = e bpy.context.scene.rigidbody_world.point_cache.frame_start = s bpy.context.scene.rigidbody_world.point_cache.frame_end = e def setupLighting(): bpy.context.scene.world.light_settings.use_ambient_occlusion = True bpy.context.scene.world.light_settings.use_environment_light = True bpy.context.scene.world.light_settings.use_indirect_light = True def setupFps(): bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1 Fix the bug that causes "set frame range" error on a scene that has no rigid body world.# -*- coding: utf-8 -*- import bpy def setupFrameRanges(): s, e = 1, 1 for i in bpy.data.actions: ts, te = i.frame_range s = min(s, ts) e = max(e, te) bpy.context.scene.frame_start = s bpy.context.scene.frame_end = e if bpy.context.scene.rigidbody_world is not None: bpy.context.scene.rigidbody_world.point_cache.frame_start = s bpy.context.scene.rigidbody_world.point_cache.frame_end = e def setupLighting(): bpy.context.scene.world.light_settings.use_ambient_occlusion = True bpy.context.scene.world.light_settings.use_environment_light = True bpy.context.scene.world.light_settings.use_indirect_light = True def setupFps(): bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1
<commit_before># -*- coding: utf-8 -*- import bpy def setupFrameRanges(): s, e = 1, 1 for i in bpy.data.actions: ts, te = i.frame_range s = min(s, ts) e = max(e, te) bpy.context.scene.frame_start = s bpy.context.scene.frame_end = e bpy.context.scene.rigidbody_world.point_cache.frame_start = s bpy.context.scene.rigidbody_world.point_cache.frame_end = e def setupLighting(): bpy.context.scene.world.light_settings.use_ambient_occlusion = True bpy.context.scene.world.light_settings.use_environment_light = True bpy.context.scene.world.light_settings.use_indirect_light = True def setupFps(): bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1 <commit_msg>Fix the bug that causes "set frame range" error on a scene that has no rigid body world.<commit_after># -*- coding: utf-8 -*- import bpy def setupFrameRanges(): s, e = 1, 1 for i in bpy.data.actions: ts, te = i.frame_range s = min(s, ts) e = max(e, te) bpy.context.scene.frame_start = s bpy.context.scene.frame_end = e if bpy.context.scene.rigidbody_world is not None: bpy.context.scene.rigidbody_world.point_cache.frame_start = s bpy.context.scene.rigidbody_world.point_cache.frame_end = e def setupLighting(): bpy.context.scene.world.light_settings.use_ambient_occlusion = True bpy.context.scene.world.light_settings.use_environment_light = True bpy.context.scene.world.light_settings.use_indirect_light = True def setupFps(): bpy.context.scene.render.fps = 30 bpy.context.scene.render.fps_base = 1
01ebdc54886f01a9aa58098c8987b0ce7620706a
simplestatistics/statistics/standard_deviation.py
simplestatistics/statistics/standard_deviation.py
import math from .variance import variance def standard_deviation(data): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data))
import math from .variance import variance def standard_deviation(data, sample = True): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. sample: A boolean value. If True, calculates standard deviation for sample. If False, calculates standard deviation for population. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> ss.standard_deviation([1, 2, 3], False) 0.816496580927726 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data, sample))
Add sample param to Standard Deviation function
Add sample param to Standard Deviation function Boolean param to make possible to calculate Standard Deviation for population (Default is sample).
Python
unknown
sheriferson/simple-statistics-py,tmcw/simple-statistics-py,sheriferson/simplestatistics
import math from .variance import variance def standard_deviation(data): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data)) Add sample param to Standard Deviation function Boolean param to make possible to calculate Standard Deviation for population (Default is sample).
import math from .variance import variance def standard_deviation(data, sample = True): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. sample: A boolean value. If True, calculates standard deviation for sample. If False, calculates standard deviation for population. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> ss.standard_deviation([1, 2, 3], False) 0.816496580927726 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data, sample))
<commit_before>import math from .variance import variance def standard_deviation(data): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data)) <commit_msg>Add sample param to Standard Deviation function Boolean param to make possible to calculate Standard Deviation for population (Default is sample).<commit_after>
import math from .variance import variance def standard_deviation(data, sample = True): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. sample: A boolean value. If True, calculates standard deviation for sample. If False, calculates standard deviation for population. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> ss.standard_deviation([1, 2, 3], False) 0.816496580927726 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data, sample))
import math from .variance import variance def standard_deviation(data): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data)) Add sample param to Standard Deviation function Boolean param to make possible to calculate Standard Deviation for population (Default is sample).import math from .variance import variance def standard_deviation(data, sample = True): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. sample: A boolean value. If True, calculates standard deviation for sample. If False, calculates standard deviation for population. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> ss.standard_deviation([1, 2, 3], False) 0.816496580927726 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data, sample))
<commit_before>import math from .variance import variance def standard_deviation(data): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data)) <commit_msg>Add sample param to Standard Deviation function Boolean param to make possible to calculate Standard Deviation for population (Default is sample).<commit_after>import math from .variance import variance def standard_deviation(data, sample = True): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. sample: A boolean value. If True, calculates standard deviation for sample. If False, calculates standard deviation for population. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> ss.standard_deviation([1, 2, 3], False) 0.816496580927726 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data, sample))
6f8ef3108e0a13fd4e2d2c9337d5809dfa12b732
skyfield/api.py
skyfield/api.py
"""Top-level objects and functions offered by the Skyfield library. Importing this ``skyfield.api`` module causes Skyfield to load up the default JPL planetary ephemeris ``de421`` and create planet objects like ``earth`` and ``mars`` that are ready for your use. """ import de421 from datetime import datetime from .starlib import Star from .timelib import JulianDate, now, utc from .units import Angle def build_ephemeris(): from .data.horizons import festoon_ephemeris from .jpllib import Ephemeris ephemeris = Ephemeris(de421) festoon_ephemeris(ephemeris) return ephemeris ephemeris = build_ephemeris() del build_ephemeris sun = ephemeris.sun mercury = ephemeris.mercury venus = ephemeris.venus earth = ephemeris.earth moon = ephemeris.moon mars = ephemeris.mars jupiter = ephemeris.jupiter saturn = ephemeris.saturn uranus = ephemeris.uranus neptune = ephemeris.neptune pluto = ephemeris.pluto eight_planets = (mercury, venus, earth, mars, jupiter, saturn, uranus, neptune) nine_planets = eight_planets + (pluto,)
"""Top-level objects and functions offered by the Skyfield library. Importing this ``skyfield.api`` module causes Skyfield to load up the default JPL planetary ephemeris ``de421`` and create planet objects like ``earth`` and ``mars`` that are ready for your use. """ import de421 from datetime import datetime from .starlib import Star from .timelib import JulianDate, T0, now, utc from .units import Angle def build_ephemeris(): from .data.horizons import festoon_ephemeris from .jpllib import Ephemeris ephemeris = Ephemeris(de421) festoon_ephemeris(ephemeris) return ephemeris ephemeris = build_ephemeris() del build_ephemeris sun = ephemeris.sun mercury = ephemeris.mercury venus = ephemeris.venus earth = ephemeris.earth moon = ephemeris.moon mars = ephemeris.mars jupiter = ephemeris.jupiter saturn = ephemeris.saturn uranus = ephemeris.uranus neptune = ephemeris.neptune pluto = ephemeris.pluto eight_planets = (mercury, venus, earth, mars, jupiter, saturn, uranus, neptune) nine_planets = eight_planets + (pluto,)
Add T0 to the symbols available through the API
Add T0 to the symbols available through the API
Python
mit
GuidoBR/python-skyfield,GuidoBR/python-skyfield,skyfielders/python-skyfield,ozialien/python-skyfield,exoanalytic/python-skyfield,ozialien/python-skyfield,skyfielders/python-skyfield,exoanalytic/python-skyfield
"""Top-level objects and functions offered by the Skyfield library. Importing this ``skyfield.api`` module causes Skyfield to load up the default JPL planetary ephemeris ``de421`` and create planet objects like ``earth`` and ``mars`` that are ready for your use. """ import de421 from datetime import datetime from .starlib import Star from .timelib import JulianDate, now, utc from .units import Angle def build_ephemeris(): from .data.horizons import festoon_ephemeris from .jpllib import Ephemeris ephemeris = Ephemeris(de421) festoon_ephemeris(ephemeris) return ephemeris ephemeris = build_ephemeris() del build_ephemeris sun = ephemeris.sun mercury = ephemeris.mercury venus = ephemeris.venus earth = ephemeris.earth moon = ephemeris.moon mars = ephemeris.mars jupiter = ephemeris.jupiter saturn = ephemeris.saturn uranus = ephemeris.uranus neptune = ephemeris.neptune pluto = ephemeris.pluto eight_planets = (mercury, venus, earth, mars, jupiter, saturn, uranus, neptune) nine_planets = eight_planets + (pluto,) Add T0 to the symbols available through the API
"""Top-level objects and functions offered by the Skyfield library. Importing this ``skyfield.api`` module causes Skyfield to load up the default JPL planetary ephemeris ``de421`` and create planet objects like ``earth`` and ``mars`` that are ready for your use. """ import de421 from datetime import datetime from .starlib import Star from .timelib import JulianDate, T0, now, utc from .units import Angle def build_ephemeris(): from .data.horizons import festoon_ephemeris from .jpllib import Ephemeris ephemeris = Ephemeris(de421) festoon_ephemeris(ephemeris) return ephemeris ephemeris = build_ephemeris() del build_ephemeris sun = ephemeris.sun mercury = ephemeris.mercury venus = ephemeris.venus earth = ephemeris.earth moon = ephemeris.moon mars = ephemeris.mars jupiter = ephemeris.jupiter saturn = ephemeris.saturn uranus = ephemeris.uranus neptune = ephemeris.neptune pluto = ephemeris.pluto eight_planets = (mercury, venus, earth, mars, jupiter, saturn, uranus, neptune) nine_planets = eight_planets + (pluto,)
<commit_before>"""Top-level objects and functions offered by the Skyfield library. Importing this ``skyfield.api`` module causes Skyfield to load up the default JPL planetary ephemeris ``de421`` and create planet objects like ``earth`` and ``mars`` that are ready for your use. """ import de421 from datetime import datetime from .starlib import Star from .timelib import JulianDate, now, utc from .units import Angle def build_ephemeris(): from .data.horizons import festoon_ephemeris from .jpllib import Ephemeris ephemeris = Ephemeris(de421) festoon_ephemeris(ephemeris) return ephemeris ephemeris = build_ephemeris() del build_ephemeris sun = ephemeris.sun mercury = ephemeris.mercury venus = ephemeris.venus earth = ephemeris.earth moon = ephemeris.moon mars = ephemeris.mars jupiter = ephemeris.jupiter saturn = ephemeris.saturn uranus = ephemeris.uranus neptune = ephemeris.neptune pluto = ephemeris.pluto eight_planets = (mercury, venus, earth, mars, jupiter, saturn, uranus, neptune) nine_planets = eight_planets + (pluto,) <commit_msg>Add T0 to the symbols available through the API<commit_after>
"""Top-level objects and functions offered by the Skyfield library. Importing this ``skyfield.api`` module causes Skyfield to load up the default JPL planetary ephemeris ``de421`` and create planet objects like ``earth`` and ``mars`` that are ready for your use. """ import de421 from datetime import datetime from .starlib import Star from .timelib import JulianDate, T0, now, utc from .units import Angle def build_ephemeris(): from .data.horizons import festoon_ephemeris from .jpllib import Ephemeris ephemeris = Ephemeris(de421) festoon_ephemeris(ephemeris) return ephemeris ephemeris = build_ephemeris() del build_ephemeris sun = ephemeris.sun mercury = ephemeris.mercury venus = ephemeris.venus earth = ephemeris.earth moon = ephemeris.moon mars = ephemeris.mars jupiter = ephemeris.jupiter saturn = ephemeris.saturn uranus = ephemeris.uranus neptune = ephemeris.neptune pluto = ephemeris.pluto eight_planets = (mercury, venus, earth, mars, jupiter, saturn, uranus, neptune) nine_planets = eight_planets + (pluto,)
"""Top-level objects and functions offered by the Skyfield library. Importing this ``skyfield.api`` module causes Skyfield to load up the default JPL planetary ephemeris ``de421`` and create planet objects like ``earth`` and ``mars`` that are ready for your use. """ import de421 from datetime import datetime from .starlib import Star from .timelib import JulianDate, now, utc from .units import Angle def build_ephemeris(): from .data.horizons import festoon_ephemeris from .jpllib import Ephemeris ephemeris = Ephemeris(de421) festoon_ephemeris(ephemeris) return ephemeris ephemeris = build_ephemeris() del build_ephemeris sun = ephemeris.sun mercury = ephemeris.mercury venus = ephemeris.venus earth = ephemeris.earth moon = ephemeris.moon mars = ephemeris.mars jupiter = ephemeris.jupiter saturn = ephemeris.saturn uranus = ephemeris.uranus neptune = ephemeris.neptune pluto = ephemeris.pluto eight_planets = (mercury, venus, earth, mars, jupiter, saturn, uranus, neptune) nine_planets = eight_planets + (pluto,) Add T0 to the symbols available through the API"""Top-level objects and functions offered by the Skyfield library. Importing this ``skyfield.api`` module causes Skyfield to load up the default JPL planetary ephemeris ``de421`` and create planet objects like ``earth`` and ``mars`` that are ready for your use. """ import de421 from datetime import datetime from .starlib import Star from .timelib import JulianDate, T0, now, utc from .units import Angle def build_ephemeris(): from .data.horizons import festoon_ephemeris from .jpllib import Ephemeris ephemeris = Ephemeris(de421) festoon_ephemeris(ephemeris) return ephemeris ephemeris = build_ephemeris() del build_ephemeris sun = ephemeris.sun mercury = ephemeris.mercury venus = ephemeris.venus earth = ephemeris.earth moon = ephemeris.moon mars = ephemeris.mars jupiter = ephemeris.jupiter saturn = ephemeris.saturn uranus = ephemeris.uranus neptune = ephemeris.neptune pluto = ephemeris.pluto eight_planets = (mercury, venus, earth, mars, jupiter, saturn, uranus, neptune) nine_planets = eight_planets + (pluto,)
<commit_before>"""Top-level objects and functions offered by the Skyfield library. Importing this ``skyfield.api`` module causes Skyfield to load up the default JPL planetary ephemeris ``de421`` and create planet objects like ``earth`` and ``mars`` that are ready for your use. """ import de421 from datetime import datetime from .starlib import Star from .timelib import JulianDate, now, utc from .units import Angle def build_ephemeris(): from .data.horizons import festoon_ephemeris from .jpllib import Ephemeris ephemeris = Ephemeris(de421) festoon_ephemeris(ephemeris) return ephemeris ephemeris = build_ephemeris() del build_ephemeris sun = ephemeris.sun mercury = ephemeris.mercury venus = ephemeris.venus earth = ephemeris.earth moon = ephemeris.moon mars = ephemeris.mars jupiter = ephemeris.jupiter saturn = ephemeris.saturn uranus = ephemeris.uranus neptune = ephemeris.neptune pluto = ephemeris.pluto eight_planets = (mercury, venus, earth, mars, jupiter, saturn, uranus, neptune) nine_planets = eight_planets + (pluto,) <commit_msg>Add T0 to the symbols available through the API<commit_after>"""Top-level objects and functions offered by the Skyfield library. Importing this ``skyfield.api`` module causes Skyfield to load up the default JPL planetary ephemeris ``de421`` and create planet objects like ``earth`` and ``mars`` that are ready for your use. """ import de421 from datetime import datetime from .starlib import Star from .timelib import JulianDate, T0, now, utc from .units import Angle def build_ephemeris(): from .data.horizons import festoon_ephemeris from .jpllib import Ephemeris ephemeris = Ephemeris(de421) festoon_ephemeris(ephemeris) return ephemeris ephemeris = build_ephemeris() del build_ephemeris sun = ephemeris.sun mercury = ephemeris.mercury venus = ephemeris.venus earth = ephemeris.earth moon = ephemeris.moon mars = ephemeris.mars jupiter = ephemeris.jupiter saturn = ephemeris.saturn uranus = ephemeris.uranus neptune = ephemeris.neptune pluto = ephemeris.pluto eight_planets = (mercury, venus, earth, mars, jupiter, saturn, uranus, neptune) nine_planets = eight_planets + (pluto,)
e0cd0f9a14ac354f19c4e91367ac75b34d58ae8e
pirx/checks.py
pirx/checks.py
#!/usr/bin/env python import socket import sys def host(name): return socket.gethostname() == name def arg(name, expected_value=None): args = [ arg.split('=') for arg in sys.argv[1:] if '=' in arg else (arg, None) ] for arg_name, arg_value in args: if arg_name.lstrip('--') == name: return arg_value == expected_value return False
#!/usr/bin/env python import socket import sys def host(name): """Check if host name is equal to the given name""" return socket.gethostname() == name def arg(name, expected_value=None): """ Check if command-line argument with a given name was passed and if it has the expected value. """ args = [ arg.split('=') for arg in sys.argv[1:] if '=' in arg else (arg, None) ] for arg_name, arg_value in args: if arg_name.lstrip('--') == name: return arg_value == expected_value return False
Set docstrings for check functions
Set docstrings for check functions
Python
mit
piotrekw/pirx
#!/usr/bin/env python import socket import sys def host(name): return socket.gethostname() == name def arg(name, expected_value=None): args = [ arg.split('=') for arg in sys.argv[1:] if '=' in arg else (arg, None) ] for arg_name, arg_value in args: if arg_name.lstrip('--') == name: return arg_value == expected_value return False Set docstrings for check functions
#!/usr/bin/env python import socket import sys def host(name): """Check if host name is equal to the given name""" return socket.gethostname() == name def arg(name, expected_value=None): """ Check if command-line argument with a given name was passed and if it has the expected value. """ args = [ arg.split('=') for arg in sys.argv[1:] if '=' in arg else (arg, None) ] for arg_name, arg_value in args: if arg_name.lstrip('--') == name: return arg_value == expected_value return False
<commit_before>#!/usr/bin/env python import socket import sys def host(name): return socket.gethostname() == name def arg(name, expected_value=None): args = [ arg.split('=') for arg in sys.argv[1:] if '=' in arg else (arg, None) ] for arg_name, arg_value in args: if arg_name.lstrip('--') == name: return arg_value == expected_value return False <commit_msg>Set docstrings for check functions<commit_after>
#!/usr/bin/env python import socket import sys def host(name): """Check if host name is equal to the given name""" return socket.gethostname() == name def arg(name, expected_value=None): """ Check if command-line argument with a given name was passed and if it has the expected value. """ args = [ arg.split('=') for arg in sys.argv[1:] if '=' in arg else (arg, None) ] for arg_name, arg_value in args: if arg_name.lstrip('--') == name: return arg_value == expected_value return False
#!/usr/bin/env python import socket import sys def host(name): return socket.gethostname() == name def arg(name, expected_value=None): args = [ arg.split('=') for arg in sys.argv[1:] if '=' in arg else (arg, None) ] for arg_name, arg_value in args: if arg_name.lstrip('--') == name: return arg_value == expected_value return False Set docstrings for check functions#!/usr/bin/env python import socket import sys def host(name): """Check if host name is equal to the given name""" return socket.gethostname() == name def arg(name, expected_value=None): """ Check if command-line argument with a given name was passed and if it has the expected value. """ args = [ arg.split('=') for arg in sys.argv[1:] if '=' in arg else (arg, None) ] for arg_name, arg_value in args: if arg_name.lstrip('--') == name: return arg_value == expected_value return False
<commit_before>#!/usr/bin/env python import socket import sys def host(name): return socket.gethostname() == name def arg(name, expected_value=None): args = [ arg.split('=') for arg in sys.argv[1:] if '=' in arg else (arg, None) ] for arg_name, arg_value in args: if arg_name.lstrip('--') == name: return arg_value == expected_value return False <commit_msg>Set docstrings for check functions<commit_after>#!/usr/bin/env python import socket import sys def host(name): """Check if host name is equal to the given name""" return socket.gethostname() == name def arg(name, expected_value=None): """ Check if command-line argument with a given name was passed and if it has the expected value. """ args = [ arg.split('=') for arg in sys.argv[1:] if '=' in arg else (arg, None) ] for arg_name, arg_value in args: if arg_name.lstrip('--') == name: return arg_value == expected_value return False
af1df841b3d0c013c0678fa6fb9821b61a9eb87c
policy_evaluation/deterministic.py
policy_evaluation/deterministic.py
from torch.autograd import Variable class DeterministicPolicy(object): def __init__(self, policy): """Assumes policy returns an autograd.Variable""" self.name = "DP" self.policy = policy self.cuda = next(policy.parameters()).is_cuda def get_action(self, state): """ Takes best action based on estimated state-action values.""" state = state.cuda() if self.cuda else state q_val, argmax_a = self.policy( Variable(state, volatile=True)).data.max(1) """ result = self.policy(Variable(state_batch, volatile=True)) print(result) """ return (q_val[0], argmax_a[0])
from torch.autograd import Variable class DeterministicPolicy(object): def __init__(self, policy): """Assumes policy returns an autograd.Variable""" self.name = "DP" self.policy = policy self.cuda = next(policy.parameters()).is_cuda def get_action(self, state): """ Takes best action based on estimated state-action values.""" state = state.cuda() if self.cuda else state q_val, argmax_a = self.policy( Variable(state, volatile=True)).data.max(1) """ result = self.policy(Variable(state_batch, volatile=True)) print(result) """ return (q_val.squeeze()[0], argmax_a.squeeze()[0])
Make DQN backward compatible with pytorch 0.1.2.
Make DQN backward compatible with pytorch 0.1.2.
Python
mit
floringogianu/categorical-dqn
from torch.autograd import Variable class DeterministicPolicy(object): def __init__(self, policy): """Assumes policy returns an autograd.Variable""" self.name = "DP" self.policy = policy self.cuda = next(policy.parameters()).is_cuda def get_action(self, state): """ Takes best action based on estimated state-action values.""" state = state.cuda() if self.cuda else state q_val, argmax_a = self.policy( Variable(state, volatile=True)).data.max(1) """ result = self.policy(Variable(state_batch, volatile=True)) print(result) """ return (q_val[0], argmax_a[0]) Make DQN backward compatible with pytorch 0.1.2.
from torch.autograd import Variable class DeterministicPolicy(object): def __init__(self, policy): """Assumes policy returns an autograd.Variable""" self.name = "DP" self.policy = policy self.cuda = next(policy.parameters()).is_cuda def get_action(self, state): """ Takes best action based on estimated state-action values.""" state = state.cuda() if self.cuda else state q_val, argmax_a = self.policy( Variable(state, volatile=True)).data.max(1) """ result = self.policy(Variable(state_batch, volatile=True)) print(result) """ return (q_val.squeeze()[0], argmax_a.squeeze()[0])
<commit_before>from torch.autograd import Variable class DeterministicPolicy(object): def __init__(self, policy): """Assumes policy returns an autograd.Variable""" self.name = "DP" self.policy = policy self.cuda = next(policy.parameters()).is_cuda def get_action(self, state): """ Takes best action based on estimated state-action values.""" state = state.cuda() if self.cuda else state q_val, argmax_a = self.policy( Variable(state, volatile=True)).data.max(1) """ result = self.policy(Variable(state_batch, volatile=True)) print(result) """ return (q_val[0], argmax_a[0]) <commit_msg>Make DQN backward compatible with pytorch 0.1.2.<commit_after>
from torch.autograd import Variable class DeterministicPolicy(object): def __init__(self, policy): """Assumes policy returns an autograd.Variable""" self.name = "DP" self.policy = policy self.cuda = next(policy.parameters()).is_cuda def get_action(self, state): """ Takes best action based on estimated state-action values.""" state = state.cuda() if self.cuda else state q_val, argmax_a = self.policy( Variable(state, volatile=True)).data.max(1) """ result = self.policy(Variable(state_batch, volatile=True)) print(result) """ return (q_val.squeeze()[0], argmax_a.squeeze()[0])
from torch.autograd import Variable class DeterministicPolicy(object): def __init__(self, policy): """Assumes policy returns an autograd.Variable""" self.name = "DP" self.policy = policy self.cuda = next(policy.parameters()).is_cuda def get_action(self, state): """ Takes best action based on estimated state-action values.""" state = state.cuda() if self.cuda else state q_val, argmax_a = self.policy( Variable(state, volatile=True)).data.max(1) """ result = self.policy(Variable(state_batch, volatile=True)) print(result) """ return (q_val[0], argmax_a[0]) Make DQN backward compatible with pytorch 0.1.2.from torch.autograd import Variable class DeterministicPolicy(object): def __init__(self, policy): """Assumes policy returns an autograd.Variable""" self.name = "DP" self.policy = policy self.cuda = next(policy.parameters()).is_cuda def get_action(self, state): """ Takes best action based on estimated state-action values.""" state = state.cuda() if self.cuda else state q_val, argmax_a = self.policy( Variable(state, volatile=True)).data.max(1) """ result = self.policy(Variable(state_batch, volatile=True)) print(result) """ return (q_val.squeeze()[0], argmax_a.squeeze()[0])
<commit_before>from torch.autograd import Variable class DeterministicPolicy(object): def __init__(self, policy): """Assumes policy returns an autograd.Variable""" self.name = "DP" self.policy = policy self.cuda = next(policy.parameters()).is_cuda def get_action(self, state): """ Takes best action based on estimated state-action values.""" state = state.cuda() if self.cuda else state q_val, argmax_a = self.policy( Variable(state, volatile=True)).data.max(1) """ result = self.policy(Variable(state_batch, volatile=True)) print(result) """ return (q_val[0], argmax_a[0]) <commit_msg>Make DQN backward compatible with pytorch 0.1.2.<commit_after>from torch.autograd import Variable class DeterministicPolicy(object): def __init__(self, policy): """Assumes policy returns an autograd.Variable""" self.name = "DP" self.policy = policy self.cuda = next(policy.parameters()).is_cuda def get_action(self, state): """ Takes best action based on estimated state-action values.""" state = state.cuda() if self.cuda else state q_val, argmax_a = self.policy( Variable(state, volatile=True)).data.max(1) """ result = self.policy(Variable(state_batch, volatile=True)) print(result) """ return (q_val.squeeze()[0], argmax_a.squeeze()[0])
73b380000ad1ba87169f3a9a7bd219b76109945e
selectable/tests/__init__.py
selectable/tests/__init__.py
from django.db import models from django.utils.encoding import python_2_unicode_compatible from ..base import ModelLookup from ..registry import registry @python_2_unicode_compatible class Thing(models.Model): name = models.CharField(max_length=100) description = models.CharField(max_length=100) def __str__(self): return self.name @python_2_unicode_compatible class OtherThing(models.Model): name = models.CharField(max_length=100) thing = models.ForeignKey(Thing) def __str__(self): return self.name @python_2_unicode_compatible class ManyThing(models.Model): name = models.CharField(max_length=100) things = models.ManyToManyField(Thing) def __str__(self): return self.name class ThingLookup(ModelLookup): model = Thing search_fields = ('name__icontains', ) registry.register(ThingLookup)
from django.db import models from django.utils.encoding import python_2_unicode_compatible from ..base import ModelLookup from ..registry import registry @python_2_unicode_compatible class Thing(models.Model): name = models.CharField(max_length=100) description = models.CharField(max_length=100) def __str__(self): return self.name class Meta: ordering = ['id'] @python_2_unicode_compatible class OtherThing(models.Model): name = models.CharField(max_length=100) thing = models.ForeignKey(Thing) def __str__(self): return self.name @python_2_unicode_compatible class ManyThing(models.Model): name = models.CharField(max_length=100) things = models.ManyToManyField(Thing) def __str__(self): return self.name class ThingLookup(ModelLookup): model = Thing search_fields = ('name__icontains', ) registry.register(ThingLookup)
Fix warning in test suite when running under Django 1.11
Fix warning in test suite when running under Django 1.11
Python
bsd-2-clause
mlavin/django-selectable,mlavin/django-selectable,mlavin/django-selectable
from django.db import models from django.utils.encoding import python_2_unicode_compatible from ..base import ModelLookup from ..registry import registry @python_2_unicode_compatible class Thing(models.Model): name = models.CharField(max_length=100) description = models.CharField(max_length=100) def __str__(self): return self.name @python_2_unicode_compatible class OtherThing(models.Model): name = models.CharField(max_length=100) thing = models.ForeignKey(Thing) def __str__(self): return self.name @python_2_unicode_compatible class ManyThing(models.Model): name = models.CharField(max_length=100) things = models.ManyToManyField(Thing) def __str__(self): return self.name class ThingLookup(ModelLookup): model = Thing search_fields = ('name__icontains', ) registry.register(ThingLookup) Fix warning in test suite when running under Django 1.11
from django.db import models from django.utils.encoding import python_2_unicode_compatible from ..base import ModelLookup from ..registry import registry @python_2_unicode_compatible class Thing(models.Model): name = models.CharField(max_length=100) description = models.CharField(max_length=100) def __str__(self): return self.name class Meta: ordering = ['id'] @python_2_unicode_compatible class OtherThing(models.Model): name = models.CharField(max_length=100) thing = models.ForeignKey(Thing) def __str__(self): return self.name @python_2_unicode_compatible class ManyThing(models.Model): name = models.CharField(max_length=100) things = models.ManyToManyField(Thing) def __str__(self): return self.name class ThingLookup(ModelLookup): model = Thing search_fields = ('name__icontains', ) registry.register(ThingLookup)
<commit_before>from django.db import models from django.utils.encoding import python_2_unicode_compatible from ..base import ModelLookup from ..registry import registry @python_2_unicode_compatible class Thing(models.Model): name = models.CharField(max_length=100) description = models.CharField(max_length=100) def __str__(self): return self.name @python_2_unicode_compatible class OtherThing(models.Model): name = models.CharField(max_length=100) thing = models.ForeignKey(Thing) def __str__(self): return self.name @python_2_unicode_compatible class ManyThing(models.Model): name = models.CharField(max_length=100) things = models.ManyToManyField(Thing) def __str__(self): return self.name class ThingLookup(ModelLookup): model = Thing search_fields = ('name__icontains', ) registry.register(ThingLookup) <commit_msg>Fix warning in test suite when running under Django 1.11<commit_after>
from django.db import models from django.utils.encoding import python_2_unicode_compatible from ..base import ModelLookup from ..registry import registry @python_2_unicode_compatible class Thing(models.Model): name = models.CharField(max_length=100) description = models.CharField(max_length=100) def __str__(self): return self.name class Meta: ordering = ['id'] @python_2_unicode_compatible class OtherThing(models.Model): name = models.CharField(max_length=100) thing = models.ForeignKey(Thing) def __str__(self): return self.name @python_2_unicode_compatible class ManyThing(models.Model): name = models.CharField(max_length=100) things = models.ManyToManyField(Thing) def __str__(self): return self.name class ThingLookup(ModelLookup): model = Thing search_fields = ('name__icontains', ) registry.register(ThingLookup)
from django.db import models from django.utils.encoding import python_2_unicode_compatible from ..base import ModelLookup from ..registry import registry @python_2_unicode_compatible class Thing(models.Model): name = models.CharField(max_length=100) description = models.CharField(max_length=100) def __str__(self): return self.name @python_2_unicode_compatible class OtherThing(models.Model): name = models.CharField(max_length=100) thing = models.ForeignKey(Thing) def __str__(self): return self.name @python_2_unicode_compatible class ManyThing(models.Model): name = models.CharField(max_length=100) things = models.ManyToManyField(Thing) def __str__(self): return self.name class ThingLookup(ModelLookup): model = Thing search_fields = ('name__icontains', ) registry.register(ThingLookup) Fix warning in test suite when running under Django 1.11from django.db import models from django.utils.encoding import python_2_unicode_compatible from ..base import ModelLookup from ..registry import registry @python_2_unicode_compatible class Thing(models.Model): name = models.CharField(max_length=100) description = models.CharField(max_length=100) def __str__(self): return self.name class Meta: ordering = ['id'] @python_2_unicode_compatible class OtherThing(models.Model): name = models.CharField(max_length=100) thing = models.ForeignKey(Thing) def __str__(self): return self.name @python_2_unicode_compatible class ManyThing(models.Model): name = models.CharField(max_length=100) things = models.ManyToManyField(Thing) def __str__(self): return self.name class ThingLookup(ModelLookup): model = Thing search_fields = ('name__icontains', ) registry.register(ThingLookup)
<commit_before>from django.db import models from django.utils.encoding import python_2_unicode_compatible from ..base import ModelLookup from ..registry import registry @python_2_unicode_compatible class Thing(models.Model): name = models.CharField(max_length=100) description = models.CharField(max_length=100) def __str__(self): return self.name @python_2_unicode_compatible class OtherThing(models.Model): name = models.CharField(max_length=100) thing = models.ForeignKey(Thing) def __str__(self): return self.name @python_2_unicode_compatible class ManyThing(models.Model): name = models.CharField(max_length=100) things = models.ManyToManyField(Thing) def __str__(self): return self.name class ThingLookup(ModelLookup): model = Thing search_fields = ('name__icontains', ) registry.register(ThingLookup) <commit_msg>Fix warning in test suite when running under Django 1.11<commit_after>from django.db import models from django.utils.encoding import python_2_unicode_compatible from ..base import ModelLookup from ..registry import registry @python_2_unicode_compatible class Thing(models.Model): name = models.CharField(max_length=100) description = models.CharField(max_length=100) def __str__(self): return self.name class Meta: ordering = ['id'] @python_2_unicode_compatible class OtherThing(models.Model): name = models.CharField(max_length=100) thing = models.ForeignKey(Thing) def __str__(self): return self.name @python_2_unicode_compatible class ManyThing(models.Model): name = models.CharField(max_length=100) things = models.ManyToManyField(Thing) def __str__(self): return self.name class ThingLookup(ModelLookup): model = Thing search_fields = ('name__icontains', ) registry.register(ThingLookup)
3a2ca4573866b7b81d4b946ce87b9f36b487d272
src/dojo.py
src/dojo.py
class Dojo(object): def __init__(self): self.all_rooms = [] self.all_people = [] def create_room(self, room_type, room_name): pass
class Dojo(object): """This class is responsible for managing and allocating rooms to people""" def __init__(self): self.all_rooms = [] self.all_people = [] def create_room(self, room_type, room_name): pass
Add docstring to Dojo class
Add docstring to Dojo class
Python
mit
EdwinKato/Space-Allocator,EdwinKato/Space-Allocator
class Dojo(object): def __init__(self): self.all_rooms = [] self.all_people = [] def create_room(self, room_type, room_name): pass Add docstring to Dojo class
class Dojo(object): """This class is responsible for managing and allocating rooms to people""" def __init__(self): self.all_rooms = [] self.all_people = [] def create_room(self, room_type, room_name): pass
<commit_before>class Dojo(object): def __init__(self): self.all_rooms = [] self.all_people = [] def create_room(self, room_type, room_name): pass <commit_msg>Add docstring to Dojo class<commit_after>
class Dojo(object): """This class is responsible for managing and allocating rooms to people""" def __init__(self): self.all_rooms = [] self.all_people = [] def create_room(self, room_type, room_name): pass
class Dojo(object): def __init__(self): self.all_rooms = [] self.all_people = [] def create_room(self, room_type, room_name): pass Add docstring to Dojo classclass Dojo(object): """This class is responsible for managing and allocating rooms to people""" def __init__(self): self.all_rooms = [] self.all_people = [] def create_room(self, room_type, room_name): pass
<commit_before>class Dojo(object): def __init__(self): self.all_rooms = [] self.all_people = [] def create_room(self, room_type, room_name): pass <commit_msg>Add docstring to Dojo class<commit_after>class Dojo(object): """This class is responsible for managing and allocating rooms to people""" def __init__(self): self.all_rooms = [] self.all_people = [] def create_room(self, room_type, room_name): pass
d73379f6ffc82bd8f5d61a80d7f1037a69f5a3dc
yapf/yapflib/py3compat.py
yapf/yapflib/py3compat.py
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Python2 / Python3 compatibility.""" import sys PY3 = sys.version_info[0] == 3 if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO range = range ifilter = filter raw_input = input else: import __builtin__ import cStringIO StringIO = BytesIO = cStringIO.StringIO range = xrange from itertools import ifilter raw_input = raw_input def EncodeForStdout(s): """Encode the given string for emission to stdout. The string may contain non-ascii characters. This is a problem when stdout is redirected, because then Python doesn't know the encoding and we may get a UnicodeEncodeError. """ if PY3: return s else: return s.encode('UTF-8') def unicode(s): """Force conversion of s to unicode.""" if PY3: return s else: return __builtin__.unicode(s, "unicode_escape")
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Python2 / Python3 compatibility.""" import sys PY3 = sys.version_info[0] == 3 if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO range = range ifilter = filter raw_input = input else: import __builtin__ import cStringIO StringIO = BytesIO = cStringIO.StringIO range = xrange from itertools import ifilter raw_input = raw_input def EncodeForStdout(s): """Encode the given string for emission to stdout. The string may contain non-ascii characters. This is a problem when stdout is redirected, because then Python doesn't know the encoding and we may get a UnicodeEncodeError. """ if PY3: return s else: return s.encode('UTF-8') def unicode(s): """Force conversion of s to unicode.""" if PY3: return s else: return __builtin__.unicode(s, 'unicode_escape')
Make the use of quotes consistent.
Make the use of quotes consistent.
Python
apache-2.0
abduld/yapf,jamesblunt/yapf,sbc100/yapf,wklken/yapf,google/yapf,sbc100/yapf,thurday/yapf,dmoliveira/yapf,nikolamilosevic86/yapf,bygloam/yapf,lucius-feng/yapf,elviswf/yapf,hayd/yapf,google/yapf,ambv/yapf
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Python2 / Python3 compatibility.""" import sys PY3 = sys.version_info[0] == 3 if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO range = range ifilter = filter raw_input = input else: import __builtin__ import cStringIO StringIO = BytesIO = cStringIO.StringIO range = xrange from itertools import ifilter raw_input = raw_input def EncodeForStdout(s): """Encode the given string for emission to stdout. The string may contain non-ascii characters. This is a problem when stdout is redirected, because then Python doesn't know the encoding and we may get a UnicodeEncodeError. """ if PY3: return s else: return s.encode('UTF-8') def unicode(s): """Force conversion of s to unicode.""" if PY3: return s else: return __builtin__.unicode(s, "unicode_escape") Make the use of quotes consistent.
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Python2 / Python3 compatibility.""" import sys PY3 = sys.version_info[0] == 3 if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO range = range ifilter = filter raw_input = input else: import __builtin__ import cStringIO StringIO = BytesIO = cStringIO.StringIO range = xrange from itertools import ifilter raw_input = raw_input def EncodeForStdout(s): """Encode the given string for emission to stdout. The string may contain non-ascii characters. This is a problem when stdout is redirected, because then Python doesn't know the encoding and we may get a UnicodeEncodeError. """ if PY3: return s else: return s.encode('UTF-8') def unicode(s): """Force conversion of s to unicode.""" if PY3: return s else: return __builtin__.unicode(s, 'unicode_escape')
<commit_before># Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Python2 / Python3 compatibility.""" import sys PY3 = sys.version_info[0] == 3 if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO range = range ifilter = filter raw_input = input else: import __builtin__ import cStringIO StringIO = BytesIO = cStringIO.StringIO range = xrange from itertools import ifilter raw_input = raw_input def EncodeForStdout(s): """Encode the given string for emission to stdout. The string may contain non-ascii characters. This is a problem when stdout is redirected, because then Python doesn't know the encoding and we may get a UnicodeEncodeError. """ if PY3: return s else: return s.encode('UTF-8') def unicode(s): """Force conversion of s to unicode.""" if PY3: return s else: return __builtin__.unicode(s, "unicode_escape") <commit_msg>Make the use of quotes consistent.<commit_after>
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Python2 / Python3 compatibility.""" import sys PY3 = sys.version_info[0] == 3 if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO range = range ifilter = filter raw_input = input else: import __builtin__ import cStringIO StringIO = BytesIO = cStringIO.StringIO range = xrange from itertools import ifilter raw_input = raw_input def EncodeForStdout(s): """Encode the given string for emission to stdout. The string may contain non-ascii characters. This is a problem when stdout is redirected, because then Python doesn't know the encoding and we may get a UnicodeEncodeError. """ if PY3: return s else: return s.encode('UTF-8') def unicode(s): """Force conversion of s to unicode.""" if PY3: return s else: return __builtin__.unicode(s, 'unicode_escape')
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Python2 / Python3 compatibility.""" import sys PY3 = sys.version_info[0] == 3 if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO range = range ifilter = filter raw_input = input else: import __builtin__ import cStringIO StringIO = BytesIO = cStringIO.StringIO range = xrange from itertools import ifilter raw_input = raw_input def EncodeForStdout(s): """Encode the given string for emission to stdout. The string may contain non-ascii characters. This is a problem when stdout is redirected, because then Python doesn't know the encoding and we may get a UnicodeEncodeError. """ if PY3: return s else: return s.encode('UTF-8') def unicode(s): """Force conversion of s to unicode.""" if PY3: return s else: return __builtin__.unicode(s, "unicode_escape") Make the use of quotes consistent.# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Python2 / Python3 compatibility.""" import sys PY3 = sys.version_info[0] == 3 if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO range = range ifilter = filter raw_input = input else: import __builtin__ import cStringIO StringIO = BytesIO = cStringIO.StringIO range = xrange from itertools import ifilter raw_input = raw_input def EncodeForStdout(s): """Encode the given string for emission to stdout. The string may contain non-ascii characters. This is a problem when stdout is redirected, because then Python doesn't know the encoding and we may get a UnicodeEncodeError. """ if PY3: return s else: return s.encode('UTF-8') def unicode(s): """Force conversion of s to unicode.""" if PY3: return s else: return __builtin__.unicode(s, 'unicode_escape')
<commit_before># Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Python2 / Python3 compatibility.""" import sys PY3 = sys.version_info[0] == 3 if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO range = range ifilter = filter raw_input = input else: import __builtin__ import cStringIO StringIO = BytesIO = cStringIO.StringIO range = xrange from itertools import ifilter raw_input = raw_input def EncodeForStdout(s): """Encode the given string for emission to stdout. The string may contain non-ascii characters. This is a problem when stdout is redirected, because then Python doesn't know the encoding and we may get a UnicodeEncodeError. """ if PY3: return s else: return s.encode('UTF-8') def unicode(s): """Force conversion of s to unicode.""" if PY3: return s else: return __builtin__.unicode(s, "unicode_escape") <commit_msg>Make the use of quotes consistent.<commit_after># Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Python2 / Python3 compatibility.""" import sys PY3 = sys.version_info[0] == 3 if PY3: import io StringIO = io.StringIO BytesIO = io.BytesIO range = range ifilter = filter raw_input = input else: import __builtin__ import cStringIO StringIO = BytesIO = cStringIO.StringIO range = xrange from itertools import ifilter raw_input = raw_input def EncodeForStdout(s): """Encode the given string for emission to stdout. The string may contain non-ascii characters. This is a problem when stdout is redirected, because then Python doesn't know the encoding and we may get a UnicodeEncodeError. """ if PY3: return s else: return s.encode('UTF-8') def unicode(s): """Force conversion of s to unicode.""" if PY3: return s else: return __builtin__.unicode(s, 'unicode_escape')
11eac909105ee944ebef38bd23e1f320a8dd1175
shakespearelang/character.py
shakespearelang/character.py
from .errors import ShakespeareRuntimeError class Character: """A character in an SPL play.""" def __init__(self, name): self.value = 0 self.stack = [] self.on_stage = False self.name = name @classmethod def from_dramatis_persona(cls, persona): name = persona.character if not isinstance(name, str): name = " ".join(name) return cls(name) def __str__(self): return f'{self.name} = {self.value} ({" ".join([str(v) for v in self.stack][::-1])})' def push(self, newValue): """Push a value onto the character's stack.""" self.stack.append(newValue) def pop(self): """Pop a value off the character's stack, and set the character to that value.""" if len(self.stack) == 0: raise ShakespeareRuntimeError( "Tried to pop from an empty stack. Character: " + self.name ) self.value = self.stack.pop()
from .errors import ShakespeareRuntimeError from .utils import normalize_name class Character: """A character in an SPL play.""" def __init__(self, name): self.value = 0 self.stack = [] self.on_stage = False self.name = name @classmethod def from_dramatis_persona(cls, persona): return cls(normalize_name(persona.character)) def __str__(self): return f'{self.name} = {self.value} ({" ".join([str(v) for v in self.stack][::-1])})' def push(self, newValue): """Push a value onto the character's stack.""" self.stack.append(newValue) def pop(self): """Pop a value off the character's stack, and set the character to that value.""" if len(self.stack) == 0: raise ShakespeareRuntimeError( "Tried to pop from an empty stack. Character: " + self.name ) self.value = self.stack.pop()
Use utils to simplify code
Use utils to simplify code
Python
mit
zmbc/shakespearelang,zmbc/shakespearelang,zmbc/shakespearelang
from .errors import ShakespeareRuntimeError class Character: """A character in an SPL play.""" def __init__(self, name): self.value = 0 self.stack = [] self.on_stage = False self.name = name @classmethod def from_dramatis_persona(cls, persona): name = persona.character if not isinstance(name, str): name = " ".join(name) return cls(name) def __str__(self): return f'{self.name} = {self.value} ({" ".join([str(v) for v in self.stack][::-1])})' def push(self, newValue): """Push a value onto the character's stack.""" self.stack.append(newValue) def pop(self): """Pop a value off the character's stack, and set the character to that value.""" if len(self.stack) == 0: raise ShakespeareRuntimeError( "Tried to pop from an empty stack. Character: " + self.name ) self.value = self.stack.pop() Use utils to simplify code
from .errors import ShakespeareRuntimeError from .utils import normalize_name class Character: """A character in an SPL play.""" def __init__(self, name): self.value = 0 self.stack = [] self.on_stage = False self.name = name @classmethod def from_dramatis_persona(cls, persona): return cls(normalize_name(persona.character)) def __str__(self): return f'{self.name} = {self.value} ({" ".join([str(v) for v in self.stack][::-1])})' def push(self, newValue): """Push a value onto the character's stack.""" self.stack.append(newValue) def pop(self): """Pop a value off the character's stack, and set the character to that value.""" if len(self.stack) == 0: raise ShakespeareRuntimeError( "Tried to pop from an empty stack. Character: " + self.name ) self.value = self.stack.pop()
<commit_before>from .errors import ShakespeareRuntimeError class Character: """A character in an SPL play.""" def __init__(self, name): self.value = 0 self.stack = [] self.on_stage = False self.name = name @classmethod def from_dramatis_persona(cls, persona): name = persona.character if not isinstance(name, str): name = " ".join(name) return cls(name) def __str__(self): return f'{self.name} = {self.value} ({" ".join([str(v) for v in self.stack][::-1])})' def push(self, newValue): """Push a value onto the character's stack.""" self.stack.append(newValue) def pop(self): """Pop a value off the character's stack, and set the character to that value.""" if len(self.stack) == 0: raise ShakespeareRuntimeError( "Tried to pop from an empty stack. Character: " + self.name ) self.value = self.stack.pop() <commit_msg>Use utils to simplify code<commit_after>
from .errors import ShakespeareRuntimeError from .utils import normalize_name class Character: """A character in an SPL play.""" def __init__(self, name): self.value = 0 self.stack = [] self.on_stage = False self.name = name @classmethod def from_dramatis_persona(cls, persona): return cls(normalize_name(persona.character)) def __str__(self): return f'{self.name} = {self.value} ({" ".join([str(v) for v in self.stack][::-1])})' def push(self, newValue): """Push a value onto the character's stack.""" self.stack.append(newValue) def pop(self): """Pop a value off the character's stack, and set the character to that value.""" if len(self.stack) == 0: raise ShakespeareRuntimeError( "Tried to pop from an empty stack. Character: " + self.name ) self.value = self.stack.pop()
from .errors import ShakespeareRuntimeError class Character: """A character in an SPL play.""" def __init__(self, name): self.value = 0 self.stack = [] self.on_stage = False self.name = name @classmethod def from_dramatis_persona(cls, persona): name = persona.character if not isinstance(name, str): name = " ".join(name) return cls(name) def __str__(self): return f'{self.name} = {self.value} ({" ".join([str(v) for v in self.stack][::-1])})' def push(self, newValue): """Push a value onto the character's stack.""" self.stack.append(newValue) def pop(self): """Pop a value off the character's stack, and set the character to that value.""" if len(self.stack) == 0: raise ShakespeareRuntimeError( "Tried to pop from an empty stack. Character: " + self.name ) self.value = self.stack.pop() Use utils to simplify codefrom .errors import ShakespeareRuntimeError from .utils import normalize_name class Character: """A character in an SPL play.""" def __init__(self, name): self.value = 0 self.stack = [] self.on_stage = False self.name = name @classmethod def from_dramatis_persona(cls, persona): return cls(normalize_name(persona.character)) def __str__(self): return f'{self.name} = {self.value} ({" ".join([str(v) for v in self.stack][::-1])})' def push(self, newValue): """Push a value onto the character's stack.""" self.stack.append(newValue) def pop(self): """Pop a value off the character's stack, and set the character to that value.""" if len(self.stack) == 0: raise ShakespeareRuntimeError( "Tried to pop from an empty stack. Character: " + self.name ) self.value = self.stack.pop()
<commit_before>from .errors import ShakespeareRuntimeError class Character: """A character in an SPL play.""" def __init__(self, name): self.value = 0 self.stack = [] self.on_stage = False self.name = name @classmethod def from_dramatis_persona(cls, persona): name = persona.character if not isinstance(name, str): name = " ".join(name) return cls(name) def __str__(self): return f'{self.name} = {self.value} ({" ".join([str(v) for v in self.stack][::-1])})' def push(self, newValue): """Push a value onto the character's stack.""" self.stack.append(newValue) def pop(self): """Pop a value off the character's stack, and set the character to that value.""" if len(self.stack) == 0: raise ShakespeareRuntimeError( "Tried to pop from an empty stack. Character: " + self.name ) self.value = self.stack.pop() <commit_msg>Use utils to simplify code<commit_after>from .errors import ShakespeareRuntimeError from .utils import normalize_name class Character: """A character in an SPL play.""" def __init__(self, name): self.value = 0 self.stack = [] self.on_stage = False self.name = name @classmethod def from_dramatis_persona(cls, persona): return cls(normalize_name(persona.character)) def __str__(self): return f'{self.name} = {self.value} ({" ".join([str(v) for v in self.stack][::-1])})' def push(self, newValue): """Push a value onto the character's stack.""" self.stack.append(newValue) def pop(self): """Pop a value off the character's stack, and set the character to that value.""" if len(self.stack) == 0: raise ShakespeareRuntimeError( "Tried to pop from an empty stack. Character: " + self.name ) self.value = self.stack.pop()
21d5acb0ed340f15feccd5938ae51d47739f930a
falmer/commercial/queries.py
falmer/commercial/queries.py
import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.all()
import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.order_by('company_name').all()
Order offers by company name
Order offers by company name Closes #373
Python
mit
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.all() Order offers by company name Closes #373
import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.order_by('company_name').all()
<commit_before>import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.all() <commit_msg>Order offers by company name Closes #373<commit_after>
import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.order_by('company_name').all()
import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.all() Order offers by company name Closes #373import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.order_by('company_name').all()
<commit_before>import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.all() <commit_msg>Order offers by company name Closes #373<commit_after>import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.order_by('company_name').all()
d6bd21348c0758404e7cb884c639a7c618f4e281
tx_salaries/mixins.py
tx_salaries/mixins.py
from tx_people.mixins import * class DenormalizeOnSaveMixin(object): def save(self, denormalize=True, *args, **kwargs): obj = super(DenormalizeOnSaveMixin, self).save(*args, **kwargs) # TODO: Abstract into a general library if denormalize: for a in self._meta.get_all_related_objects(): if hasattr(a.model.objects, 'denormalize'): a.model.objects.denormalize(self) return obj
from tx_people.mixins import * class DenormalizeOnSaveMixin(object): """ Adjusts a model's save method to denormalize data where it can This works by going through all related objects and calling its ``denormalize`` manager method. Each of those methods should take one argument -- the model that's being saved. """ def save(self, denormalize=True, *args, **kwargs): obj = super(DenormalizeOnSaveMixin, self).save(*args, **kwargs) # TODO: Abstract into a general library if denormalize: for a in self._meta.get_all_related_objects(): if hasattr(a.model.objects, 'denormalize'): a.model.objects.denormalize(self) return obj
Add first pass at docs
Add first pass at docs
Python
apache-2.0
texastribune/tx_salaries,texastribune/tx_salaries
from tx_people.mixins import * class DenormalizeOnSaveMixin(object): def save(self, denormalize=True, *args, **kwargs): obj = super(DenormalizeOnSaveMixin, self).save(*args, **kwargs) # TODO: Abstract into a general library if denormalize: for a in self._meta.get_all_related_objects(): if hasattr(a.model.objects, 'denormalize'): a.model.objects.denormalize(self) return obj Add first pass at docs
from tx_people.mixins import * class DenormalizeOnSaveMixin(object): """ Adjusts a model's save method to denormalize data where it can This works by going through all related objects and calling its ``denormalize`` manager method. Each of those methods should take one argument -- the model that's being saved. """ def save(self, denormalize=True, *args, **kwargs): obj = super(DenormalizeOnSaveMixin, self).save(*args, **kwargs) # TODO: Abstract into a general library if denormalize: for a in self._meta.get_all_related_objects(): if hasattr(a.model.objects, 'denormalize'): a.model.objects.denormalize(self) return obj
<commit_before>from tx_people.mixins import * class DenormalizeOnSaveMixin(object): def save(self, denormalize=True, *args, **kwargs): obj = super(DenormalizeOnSaveMixin, self).save(*args, **kwargs) # TODO: Abstract into a general library if denormalize: for a in self._meta.get_all_related_objects(): if hasattr(a.model.objects, 'denormalize'): a.model.objects.denormalize(self) return obj <commit_msg>Add first pass at docs<commit_after>
from tx_people.mixins import * class DenormalizeOnSaveMixin(object): """ Adjusts a model's save method to denormalize data where it can This works by going through all related objects and calling its ``denormalize`` manager method. Each of those methods should take one argument -- the model that's being saved. """ def save(self, denormalize=True, *args, **kwargs): obj = super(DenormalizeOnSaveMixin, self).save(*args, **kwargs) # TODO: Abstract into a general library if denormalize: for a in self._meta.get_all_related_objects(): if hasattr(a.model.objects, 'denormalize'): a.model.objects.denormalize(self) return obj
from tx_people.mixins import * class DenormalizeOnSaveMixin(object): def save(self, denormalize=True, *args, **kwargs): obj = super(DenormalizeOnSaveMixin, self).save(*args, **kwargs) # TODO: Abstract into a general library if denormalize: for a in self._meta.get_all_related_objects(): if hasattr(a.model.objects, 'denormalize'): a.model.objects.denormalize(self) return obj Add first pass at docsfrom tx_people.mixins import * class DenormalizeOnSaveMixin(object): """ Adjusts a model's save method to denormalize data where it can This works by going through all related objects and calling its ``denormalize`` manager method. Each of those methods should take one argument -- the model that's being saved. """ def save(self, denormalize=True, *args, **kwargs): obj = super(DenormalizeOnSaveMixin, self).save(*args, **kwargs) # TODO: Abstract into a general library if denormalize: for a in self._meta.get_all_related_objects(): if hasattr(a.model.objects, 'denormalize'): a.model.objects.denormalize(self) return obj
<commit_before>from tx_people.mixins import * class DenormalizeOnSaveMixin(object): def save(self, denormalize=True, *args, **kwargs): obj = super(DenormalizeOnSaveMixin, self).save(*args, **kwargs) # TODO: Abstract into a general library if denormalize: for a in self._meta.get_all_related_objects(): if hasattr(a.model.objects, 'denormalize'): a.model.objects.denormalize(self) return obj <commit_msg>Add first pass at docs<commit_after>from tx_people.mixins import * class DenormalizeOnSaveMixin(object): """ Adjusts a model's save method to denormalize data where it can This works by going through all related objects and calling its ``denormalize`` manager method. Each of those methods should take one argument -- the model that's being saved. """ def save(self, denormalize=True, *args, **kwargs): obj = super(DenormalizeOnSaveMixin, self).save(*args, **kwargs) # TODO: Abstract into a general library if denormalize: for a in self._meta.get_all_related_objects(): if hasattr(a.model.objects, 'denormalize'): a.model.objects.denormalize(self) return obj
6c34a6c3e73a41cb94bc761a7001cfa9bba24eb3
combobox.py
combobox.py
import sys from PyQt5.QtCore import * from PyQt5.QtGui import * from PyQt5.QtWidgets import * class ComboBox(QWidget): def __init__(self, parent=None, items=[]): super(ComboBox, self).__init__(parent) layout = QHBoxLayout() self.cb = QComboBox() self.items = items self.cb.addItems(self.items) self.cb.currentIndexChanged.connect(parent.comboSelectionChanged) layout.addWidget(self.cb) self.setLayout(layout) def update_items(self, items): self.items = items self.cb.clear() self.cb.addItems(self.items)
try: from PyQt5.QtWidgets import QWidget, QHBoxLayout, QComboBox except ImportError: # needed for py3+qt4 # Ref: # http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html # http://stackoverflow.com/questions/21217399/pyqt4-qtcore-qvariant-object-instead-of-a-string if sys.version_info.major >= 3: import sip sip.setapi('QVariant', 2) from PyQt4.QtWidgets import QWidget, QHBoxLayout, QComboBox class ComboBox(QWidget): def __init__(self, parent=None, items=[]): super(ComboBox, self).__init__(parent) layout = QHBoxLayout() self.cb = QComboBox() self.items = items self.cb.addItems(self.items) self.cb.currentIndexChanged.connect(parent.comboSelectionChanged) layout.addWidget(self.cb) self.setLayout(layout) def update_items(self, items): self.items = items self.cb.clear() self.cb.addItems(self.items)
Update imports and added PyQt4 support
Update imports and added PyQt4 support
Python
mit
tzutalin/labelImg,tzutalin/labelImg
import sys from PyQt5.QtCore import * from PyQt5.QtGui import * from PyQt5.QtWidgets import * class ComboBox(QWidget): def __init__(self, parent=None, items=[]): super(ComboBox, self).__init__(parent) layout = QHBoxLayout() self.cb = QComboBox() self.items = items self.cb.addItems(self.items) self.cb.currentIndexChanged.connect(parent.comboSelectionChanged) layout.addWidget(self.cb) self.setLayout(layout) def update_items(self, items): self.items = items self.cb.clear() self.cb.addItems(self.items)Update imports and added PyQt4 support
try: from PyQt5.QtWidgets import QWidget, QHBoxLayout, QComboBox except ImportError: # needed for py3+qt4 # Ref: # http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html # http://stackoverflow.com/questions/21217399/pyqt4-qtcore-qvariant-object-instead-of-a-string if sys.version_info.major >= 3: import sip sip.setapi('QVariant', 2) from PyQt4.QtWidgets import QWidget, QHBoxLayout, QComboBox class ComboBox(QWidget): def __init__(self, parent=None, items=[]): super(ComboBox, self).__init__(parent) layout = QHBoxLayout() self.cb = QComboBox() self.items = items self.cb.addItems(self.items) self.cb.currentIndexChanged.connect(parent.comboSelectionChanged) layout.addWidget(self.cb) self.setLayout(layout) def update_items(self, items): self.items = items self.cb.clear() self.cb.addItems(self.items)
<commit_before>import sys from PyQt5.QtCore import * from PyQt5.QtGui import * from PyQt5.QtWidgets import * class ComboBox(QWidget): def __init__(self, parent=None, items=[]): super(ComboBox, self).__init__(parent) layout = QHBoxLayout() self.cb = QComboBox() self.items = items self.cb.addItems(self.items) self.cb.currentIndexChanged.connect(parent.comboSelectionChanged) layout.addWidget(self.cb) self.setLayout(layout) def update_items(self, items): self.items = items self.cb.clear() self.cb.addItems(self.items)<commit_msg>Update imports and added PyQt4 support<commit_after>
try: from PyQt5.QtWidgets import QWidget, QHBoxLayout, QComboBox except ImportError: # needed for py3+qt4 # Ref: # http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html # http://stackoverflow.com/questions/21217399/pyqt4-qtcore-qvariant-object-instead-of-a-string if sys.version_info.major >= 3: import sip sip.setapi('QVariant', 2) from PyQt4.QtWidgets import QWidget, QHBoxLayout, QComboBox class ComboBox(QWidget): def __init__(self, parent=None, items=[]): super(ComboBox, self).__init__(parent) layout = QHBoxLayout() self.cb = QComboBox() self.items = items self.cb.addItems(self.items) self.cb.currentIndexChanged.connect(parent.comboSelectionChanged) layout.addWidget(self.cb) self.setLayout(layout) def update_items(self, items): self.items = items self.cb.clear() self.cb.addItems(self.items)
import sys from PyQt5.QtCore import * from PyQt5.QtGui import * from PyQt5.QtWidgets import * class ComboBox(QWidget): def __init__(self, parent=None, items=[]): super(ComboBox, self).__init__(parent) layout = QHBoxLayout() self.cb = QComboBox() self.items = items self.cb.addItems(self.items) self.cb.currentIndexChanged.connect(parent.comboSelectionChanged) layout.addWidget(self.cb) self.setLayout(layout) def update_items(self, items): self.items = items self.cb.clear() self.cb.addItems(self.items)Update imports and added PyQt4 supporttry: from PyQt5.QtWidgets import QWidget, QHBoxLayout, QComboBox except ImportError: # needed for py3+qt4 # Ref: # http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html # http://stackoverflow.com/questions/21217399/pyqt4-qtcore-qvariant-object-instead-of-a-string if sys.version_info.major >= 3: import sip sip.setapi('QVariant', 2) from PyQt4.QtWidgets import QWidget, QHBoxLayout, QComboBox class ComboBox(QWidget): def __init__(self, parent=None, items=[]): super(ComboBox, self).__init__(parent) layout = QHBoxLayout() self.cb = QComboBox() self.items = items self.cb.addItems(self.items) self.cb.currentIndexChanged.connect(parent.comboSelectionChanged) layout.addWidget(self.cb) self.setLayout(layout) def update_items(self, items): self.items = items self.cb.clear() self.cb.addItems(self.items)
<commit_before>import sys from PyQt5.QtCore import * from PyQt5.QtGui import * from PyQt5.QtWidgets import * class ComboBox(QWidget): def __init__(self, parent=None, items=[]): super(ComboBox, self).__init__(parent) layout = QHBoxLayout() self.cb = QComboBox() self.items = items self.cb.addItems(self.items) self.cb.currentIndexChanged.connect(parent.comboSelectionChanged) layout.addWidget(self.cb) self.setLayout(layout) def update_items(self, items): self.items = items self.cb.clear() self.cb.addItems(self.items)<commit_msg>Update imports and added PyQt4 support<commit_after>try: from PyQt5.QtWidgets import QWidget, QHBoxLayout, QComboBox except ImportError: # needed for py3+qt4 # Ref: # http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html # http://stackoverflow.com/questions/21217399/pyqt4-qtcore-qvariant-object-instead-of-a-string if sys.version_info.major >= 3: import sip sip.setapi('QVariant', 2) from PyQt4.QtWidgets import QWidget, QHBoxLayout, QComboBox class ComboBox(QWidget): def __init__(self, parent=None, items=[]): super(ComboBox, self).__init__(parent) layout = QHBoxLayout() self.cb = QComboBox() self.items = items self.cb.addItems(self.items) self.cb.currentIndexChanged.connect(parent.comboSelectionChanged) layout.addWidget(self.cb) self.setLayout(layout) def update_items(self, items): self.items = items self.cb.clear() self.cb.addItems(self.items)
400e533893e1b2e71519480504104f38c616a1a7
ircstat/log.py
ircstat/log.py
# Copyright 2013 John Reese # Licensed under the MIT license import logging logging.addLevelName(logging.DEBUG, 'DBG') logging.addLevelName(logging.INFO, 'INF') logging.addLevelName(logging.WARNING, 'WRN') logging.addLevelName(logging.ERROR, 'ERR') sh = None def logger(name=None): global sh log = logging.getLogger(name) log.setLevel(logging.DEBUG) if sh is None: fm = logging.Formatter('%(levelname)s %(message)s') sh = logging.StreamHandler() sh.setLevel(logging.DEBUG) sh.setFormatter(fm) log.addHandler(sh) return log
# Copyright 2013 John Reese # Licensed under the MIT license import logging logging.addLevelName(logging.DEBUG, 'DBG') logging.addLevelName(logging.INFO, 'INF') logging.addLevelName(logging.WARNING, 'WRN') logging.addLevelName(logging.ERROR, 'ERR') sh = None def logger(name=None): global sh log = logging.getLogger(name) log.setLevel(logging.DEBUG) if sh is None: fm = logging.Formatter('%(message)s') sh = logging.StreamHandler() sh.setLevel(logging.INFO) sh.setFormatter(fm) log.addHandler(sh) return log def enable_debug(): fm = logging.Formatter('%(levelname)s %(message)s') sh.setLevel(logging.DEBUG) sh.setFormatter(fm)
Add method for debug output
Add method for debug output
Python
mit
jreese/ircstat,jreese/ircstat
# Copyright 2013 John Reese # Licensed under the MIT license import logging logging.addLevelName(logging.DEBUG, 'DBG') logging.addLevelName(logging.INFO, 'INF') logging.addLevelName(logging.WARNING, 'WRN') logging.addLevelName(logging.ERROR, 'ERR') sh = None def logger(name=None): global sh log = logging.getLogger(name) log.setLevel(logging.DEBUG) if sh is None: fm = logging.Formatter('%(levelname)s %(message)s') sh = logging.StreamHandler() sh.setLevel(logging.DEBUG) sh.setFormatter(fm) log.addHandler(sh) return log Add method for debug output
# Copyright 2013 John Reese # Licensed under the MIT license import logging logging.addLevelName(logging.DEBUG, 'DBG') logging.addLevelName(logging.INFO, 'INF') logging.addLevelName(logging.WARNING, 'WRN') logging.addLevelName(logging.ERROR, 'ERR') sh = None def logger(name=None): global sh log = logging.getLogger(name) log.setLevel(logging.DEBUG) if sh is None: fm = logging.Formatter('%(message)s') sh = logging.StreamHandler() sh.setLevel(logging.INFO) sh.setFormatter(fm) log.addHandler(sh) return log def enable_debug(): fm = logging.Formatter('%(levelname)s %(message)s') sh.setLevel(logging.DEBUG) sh.setFormatter(fm)
<commit_before># Copyright 2013 John Reese # Licensed under the MIT license import logging logging.addLevelName(logging.DEBUG, 'DBG') logging.addLevelName(logging.INFO, 'INF') logging.addLevelName(logging.WARNING, 'WRN') logging.addLevelName(logging.ERROR, 'ERR') sh = None def logger(name=None): global sh log = logging.getLogger(name) log.setLevel(logging.DEBUG) if sh is None: fm = logging.Formatter('%(levelname)s %(message)s') sh = logging.StreamHandler() sh.setLevel(logging.DEBUG) sh.setFormatter(fm) log.addHandler(sh) return log <commit_msg>Add method for debug output<commit_after>
# Copyright 2013 John Reese # Licensed under the MIT license import logging logging.addLevelName(logging.DEBUG, 'DBG') logging.addLevelName(logging.INFO, 'INF') logging.addLevelName(logging.WARNING, 'WRN') logging.addLevelName(logging.ERROR, 'ERR') sh = None def logger(name=None): global sh log = logging.getLogger(name) log.setLevel(logging.DEBUG) if sh is None: fm = logging.Formatter('%(message)s') sh = logging.StreamHandler() sh.setLevel(logging.INFO) sh.setFormatter(fm) log.addHandler(sh) return log def enable_debug(): fm = logging.Formatter('%(levelname)s %(message)s') sh.setLevel(logging.DEBUG) sh.setFormatter(fm)
# Copyright 2013 John Reese # Licensed under the MIT license import logging logging.addLevelName(logging.DEBUG, 'DBG') logging.addLevelName(logging.INFO, 'INF') logging.addLevelName(logging.WARNING, 'WRN') logging.addLevelName(logging.ERROR, 'ERR') sh = None def logger(name=None): global sh log = logging.getLogger(name) log.setLevel(logging.DEBUG) if sh is None: fm = logging.Formatter('%(levelname)s %(message)s') sh = logging.StreamHandler() sh.setLevel(logging.DEBUG) sh.setFormatter(fm) log.addHandler(sh) return log Add method for debug output# Copyright 2013 John Reese # Licensed under the MIT license import logging logging.addLevelName(logging.DEBUG, 'DBG') logging.addLevelName(logging.INFO, 'INF') logging.addLevelName(logging.WARNING, 'WRN') logging.addLevelName(logging.ERROR, 'ERR') sh = None def logger(name=None): global sh log = logging.getLogger(name) log.setLevel(logging.DEBUG) if sh is None: fm = logging.Formatter('%(message)s') sh = logging.StreamHandler() sh.setLevel(logging.INFO) sh.setFormatter(fm) log.addHandler(sh) return log def enable_debug(): fm = logging.Formatter('%(levelname)s %(message)s') sh.setLevel(logging.DEBUG) sh.setFormatter(fm)
<commit_before># Copyright 2013 John Reese # Licensed under the MIT license import logging logging.addLevelName(logging.DEBUG, 'DBG') logging.addLevelName(logging.INFO, 'INF') logging.addLevelName(logging.WARNING, 'WRN') logging.addLevelName(logging.ERROR, 'ERR') sh = None def logger(name=None): global sh log = logging.getLogger(name) log.setLevel(logging.DEBUG) if sh is None: fm = logging.Formatter('%(levelname)s %(message)s') sh = logging.StreamHandler() sh.setLevel(logging.DEBUG) sh.setFormatter(fm) log.addHandler(sh) return log <commit_msg>Add method for debug output<commit_after># Copyright 2013 John Reese # Licensed under the MIT license import logging logging.addLevelName(logging.DEBUG, 'DBG') logging.addLevelName(logging.INFO, 'INF') logging.addLevelName(logging.WARNING, 'WRN') logging.addLevelName(logging.ERROR, 'ERR') sh = None def logger(name=None): global sh log = logging.getLogger(name) log.setLevel(logging.DEBUG) if sh is None: fm = logging.Formatter('%(message)s') sh = logging.StreamHandler() sh.setLevel(logging.INFO) sh.setFormatter(fm) log.addHandler(sh) return log def enable_debug(): fm = logging.Formatter('%(levelname)s %(message)s') sh.setLevel(logging.DEBUG) sh.setFormatter(fm)
baabc63ffad0b9641bd3d68800a9db84fe4076d3
src/__init__.py
src/__init__.py
__version_info__ = ('1', '10', '3') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs
__version_info__ = ('1', '10', '3') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, notify_module_loaded, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs
Add post import hook discovery to public API.
Add post import hook discovery to public API.
Python
bsd-2-clause
GrahamDumpleton/wrapt,github4ry/wrapt,linglaiyao1314/wrapt,pombredanne/wrapt,wujuguang/wrapt,linglaiyao1314/wrapt,wujuguang/wrapt,akash1808/wrapt,pombredanne/wrapt,GrahamDumpleton/wrapt,github4ry/wrapt,akash1808/wrapt
__version_info__ = ('1', '10', '3') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs Add post import hook discovery to public API.
__version_info__ = ('1', '10', '3') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, notify_module_loaded, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs
<commit_before>__version_info__ = ('1', '10', '3') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs <commit_msg>Add post import hook discovery to public API.<commit_after>
__version_info__ = ('1', '10', '3') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, notify_module_loaded, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs
__version_info__ = ('1', '10', '3') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs Add post import hook discovery to public API.__version_info__ = ('1', '10', '3') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, notify_module_loaded, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs
<commit_before>__version_info__ = ('1', '10', '3') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs <commit_msg>Add post import hook discovery to public API.<commit_after>__version_info__ = ('1', '10', '3') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, notify_module_loaded, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs
62818c327997e804090ad8fab328e05410d65d89
resolwe/flow/tests/test_backend.py
resolwe/flow/tests/test_backend.py
# pylint: disable=missing-docstring from __future__ import absolute_import, division, print_function, unicode_literals import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engines.local import manager from resolwe.flow.models import Data, Process class BackendTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test') self.p = Process(slug='test-processor', name='Test Process', contributor=u, type='data:test', version=1) self.p.save() self.d = Data(slug='test-data', name='Test Data', contributor=u, process=self.p) self.d.save() data_path = settings.FLOW_EXECUTOR['DATA_PATH'] if os.path.exists(data_path): shutil.rmtree(data_path) os.makedirs(data_path) def test_manager(self): manager.communicate(verbosity=0) def test_dtlbash(self): self.p.slug = 'test-processor-dtlbash' self.p.run = {'script': """ gen-info \"Test processor info\" gen-warning \"Test processor warning\" echo '{"proc.info": "foo"}' """} self.p.save() self.d.slug = 'test-data-dtlbash' self.d.process = self.p self.d.save() self.d = Data(id=self.d.id)
# pylint: disable=missing-docstring from __future__ import absolute_import, division, print_function, unicode_literals import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engines.local import manager from resolwe.flow.models import Data, Process class BackendTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test') self.p = Process(slug='test-processor', name='Test Process', contributor=u, type='data:test', version=1) self.p.save() self.d = Data(slug='test-data', name='Test Data', contributor=u, process=self.p) self.d.save() def test_manager(self): manager.communicate(verbosity=0) def test_dtlbash(self): self.p.slug = 'test-processor-dtlbash' self.p.run = {'script': """ gen-info \"Test processor info\" gen-warning \"Test processor warning\" echo '{"proc.info": "foo"}' """} self.p.save() self.d.slug = 'test-data-dtlbash' self.d.process = self.p self.d.save() self.d = Data(id=self.d.id)
Remove (potentialy dangerous) data path recreation
Remove (potentialy dangerous) data path recreation
Python
apache-2.0
jberci/resolwe,genialis/resolwe,jberci/resolwe,genialis/resolwe
# pylint: disable=missing-docstring from __future__ import absolute_import, division, print_function, unicode_literals import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engines.local import manager from resolwe.flow.models import Data, Process class BackendTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test') self.p = Process(slug='test-processor', name='Test Process', contributor=u, type='data:test', version=1) self.p.save() self.d = Data(slug='test-data', name='Test Data', contributor=u, process=self.p) self.d.save() data_path = settings.FLOW_EXECUTOR['DATA_PATH'] if os.path.exists(data_path): shutil.rmtree(data_path) os.makedirs(data_path) def test_manager(self): manager.communicate(verbosity=0) def test_dtlbash(self): self.p.slug = 'test-processor-dtlbash' self.p.run = {'script': """ gen-info \"Test processor info\" gen-warning \"Test processor warning\" echo '{"proc.info": "foo"}' """} self.p.save() self.d.slug = 'test-data-dtlbash' self.d.process = self.p self.d.save() self.d = Data(id=self.d.id) Remove (potentialy dangerous) data path recreation
# pylint: disable=missing-docstring from __future__ import absolute_import, division, print_function, unicode_literals import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engines.local import manager from resolwe.flow.models import Data, Process class BackendTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test') self.p = Process(slug='test-processor', name='Test Process', contributor=u, type='data:test', version=1) self.p.save() self.d = Data(slug='test-data', name='Test Data', contributor=u, process=self.p) self.d.save() def test_manager(self): manager.communicate(verbosity=0) def test_dtlbash(self): self.p.slug = 'test-processor-dtlbash' self.p.run = {'script': """ gen-info \"Test processor info\" gen-warning \"Test processor warning\" echo '{"proc.info": "foo"}' """} self.p.save() self.d.slug = 'test-data-dtlbash' self.d.process = self.p self.d.save() self.d = Data(id=self.d.id)
<commit_before># pylint: disable=missing-docstring from __future__ import absolute_import, division, print_function, unicode_literals import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engines.local import manager from resolwe.flow.models import Data, Process class BackendTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test') self.p = Process(slug='test-processor', name='Test Process', contributor=u, type='data:test', version=1) self.p.save() self.d = Data(slug='test-data', name='Test Data', contributor=u, process=self.p) self.d.save() data_path = settings.FLOW_EXECUTOR['DATA_PATH'] if os.path.exists(data_path): shutil.rmtree(data_path) os.makedirs(data_path) def test_manager(self): manager.communicate(verbosity=0) def test_dtlbash(self): self.p.slug = 'test-processor-dtlbash' self.p.run = {'script': """ gen-info \"Test processor info\" gen-warning \"Test processor warning\" echo '{"proc.info": "foo"}' """} self.p.save() self.d.slug = 'test-data-dtlbash' self.d.process = self.p self.d.save() self.d = Data(id=self.d.id) <commit_msg>Remove (potentialy dangerous) data path recreation<commit_after>
# pylint: disable=missing-docstring from __future__ import absolute_import, division, print_function, unicode_literals import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engines.local import manager from resolwe.flow.models import Data, Process class BackendTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test') self.p = Process(slug='test-processor', name='Test Process', contributor=u, type='data:test', version=1) self.p.save() self.d = Data(slug='test-data', name='Test Data', contributor=u, process=self.p) self.d.save() def test_manager(self): manager.communicate(verbosity=0) def test_dtlbash(self): self.p.slug = 'test-processor-dtlbash' self.p.run = {'script': """ gen-info \"Test processor info\" gen-warning \"Test processor warning\" echo '{"proc.info": "foo"}' """} self.p.save() self.d.slug = 'test-data-dtlbash' self.d.process = self.p self.d.save() self.d = Data(id=self.d.id)
# pylint: disable=missing-docstring from __future__ import absolute_import, division, print_function, unicode_literals import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engines.local import manager from resolwe.flow.models import Data, Process class BackendTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test') self.p = Process(slug='test-processor', name='Test Process', contributor=u, type='data:test', version=1) self.p.save() self.d = Data(slug='test-data', name='Test Data', contributor=u, process=self.p) self.d.save() data_path = settings.FLOW_EXECUTOR['DATA_PATH'] if os.path.exists(data_path): shutil.rmtree(data_path) os.makedirs(data_path) def test_manager(self): manager.communicate(verbosity=0) def test_dtlbash(self): self.p.slug = 'test-processor-dtlbash' self.p.run = {'script': """ gen-info \"Test processor info\" gen-warning \"Test processor warning\" echo '{"proc.info": "foo"}' """} self.p.save() self.d.slug = 'test-data-dtlbash' self.d.process = self.p self.d.save() self.d = Data(id=self.d.id) Remove (potentialy dangerous) data path recreation# pylint: disable=missing-docstring from __future__ import absolute_import, division, print_function, unicode_literals import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engines.local import manager from resolwe.flow.models import Data, Process class BackendTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test') self.p = Process(slug='test-processor', name='Test Process', contributor=u, type='data:test', version=1) self.p.save() self.d = Data(slug='test-data', name='Test Data', contributor=u, process=self.p) self.d.save() def test_manager(self): manager.communicate(verbosity=0) def test_dtlbash(self): self.p.slug = 'test-processor-dtlbash' self.p.run = {'script': """ gen-info \"Test processor info\" gen-warning \"Test processor warning\" echo '{"proc.info": "foo"}' """} self.p.save() self.d.slug = 'test-data-dtlbash' self.d.process = self.p self.d.save() self.d = Data(id=self.d.id)
<commit_before># pylint: disable=missing-docstring from __future__ import absolute_import, division, print_function, unicode_literals import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engines.local import manager from resolwe.flow.models import Data, Process class BackendTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test') self.p = Process(slug='test-processor', name='Test Process', contributor=u, type='data:test', version=1) self.p.save() self.d = Data(slug='test-data', name='Test Data', contributor=u, process=self.p) self.d.save() data_path = settings.FLOW_EXECUTOR['DATA_PATH'] if os.path.exists(data_path): shutil.rmtree(data_path) os.makedirs(data_path) def test_manager(self): manager.communicate(verbosity=0) def test_dtlbash(self): self.p.slug = 'test-processor-dtlbash' self.p.run = {'script': """ gen-info \"Test processor info\" gen-warning \"Test processor warning\" echo '{"proc.info": "foo"}' """} self.p.save() self.d.slug = 'test-data-dtlbash' self.d.process = self.p self.d.save() self.d = Data(id=self.d.id) <commit_msg>Remove (potentialy dangerous) data path recreation<commit_after># pylint: disable=missing-docstring from __future__ import absolute_import, division, print_function, unicode_literals import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engines.local import manager from resolwe.flow.models import Data, Process class BackendTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', 'test@genialis.com', 'test') self.p = Process(slug='test-processor', name='Test Process', contributor=u, type='data:test', version=1) self.p.save() self.d = Data(slug='test-data', name='Test Data', contributor=u, process=self.p) self.d.save() def test_manager(self): manager.communicate(verbosity=0) def test_dtlbash(self): self.p.slug = 'test-processor-dtlbash' self.p.run = {'script': """ gen-info \"Test processor info\" gen-warning \"Test processor warning\" echo '{"proc.info": "foo"}' """} self.p.save() self.d.slug = 'test-data-dtlbash' self.d.process = self.p self.d.save() self.d = Data(id=self.d.id)
25ea87b810d717690679613251fbc262f11c021f
pajbot/modules/linefarming.py
pajbot/modules/linefarming.py
import logging from pajbot.managers.handler import HandlerManager from pajbot.models.user import User from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting log = logging.getLogger(__name__) class LineFarmingModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Line Farming" DESCRIPTION = "Keep track on the amount of lines users type in chat" ENABLED_DEFAULT = True CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="count_offline", label="Count lines in offline chat", type="boolean", required=True, default=False ) ] def on_pubmsg(self, source, **rest): if self.bot.is_online or self.settings["count_offline"] is True: # this funky syntax makes SQLAlchemy increment # the num_lines atomically with SET num_lines=("user".num_lines + 1) source.num_lines = User.num_lines + 1 def enable(self, bot): HandlerManager.add_handler("on_pubmsg", self.on_pubmsg) def disable(self, bot): HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg)
import logging from pajbot.managers.handler import HandlerManager from pajbot.models.user import User from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting log = logging.getLogger(__name__) class LineFarmingModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Line Farming" DESCRIPTION = "Keep track on the amount of lines users type in chat" ENABLED_DEFAULT = True CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="count_offline", label="Count lines in offline chat", type="boolean", required=True, default=False ) ] def on_pubmsg(self, source, **rest): if self.bot.is_online or self.settings["count_offline"] is True: source.num_lines += 1 def enable(self, bot): HandlerManager.add_handler("on_pubmsg", self.on_pubmsg) def disable(self, bot): HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg)
Revert to using += to increment user's lines Doing it the "atomic" way was causing str(user.num_lines) to become ""user".num_lines + :num_lines_1" which is not intended
Revert to using += to increment user's lines Doing it the "atomic" way was causing str(user.num_lines) to become ""user".num_lines + :num_lines_1" which is not intended
Python
mit
pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/pajbot,pajlada/tyggbot
import logging from pajbot.managers.handler import HandlerManager from pajbot.models.user import User from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting log = logging.getLogger(__name__) class LineFarmingModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Line Farming" DESCRIPTION = "Keep track on the amount of lines users type in chat" ENABLED_DEFAULT = True CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="count_offline", label="Count lines in offline chat", type="boolean", required=True, default=False ) ] def on_pubmsg(self, source, **rest): if self.bot.is_online or self.settings["count_offline"] is True: # this funky syntax makes SQLAlchemy increment # the num_lines atomically with SET num_lines=("user".num_lines + 1) source.num_lines = User.num_lines + 1 def enable(self, bot): HandlerManager.add_handler("on_pubmsg", self.on_pubmsg) def disable(self, bot): HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg) Revert to using += to increment user's lines Doing it the "atomic" way was causing str(user.num_lines) to become ""user".num_lines + :num_lines_1" which is not intended
import logging from pajbot.managers.handler import HandlerManager from pajbot.models.user import User from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting log = logging.getLogger(__name__) class LineFarmingModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Line Farming" DESCRIPTION = "Keep track on the amount of lines users type in chat" ENABLED_DEFAULT = True CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="count_offline", label="Count lines in offline chat", type="boolean", required=True, default=False ) ] def on_pubmsg(self, source, **rest): if self.bot.is_online or self.settings["count_offline"] is True: source.num_lines += 1 def enable(self, bot): HandlerManager.add_handler("on_pubmsg", self.on_pubmsg) def disable(self, bot): HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg)
<commit_before>import logging from pajbot.managers.handler import HandlerManager from pajbot.models.user import User from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting log = logging.getLogger(__name__) class LineFarmingModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Line Farming" DESCRIPTION = "Keep track on the amount of lines users type in chat" ENABLED_DEFAULT = True CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="count_offline", label="Count lines in offline chat", type="boolean", required=True, default=False ) ] def on_pubmsg(self, source, **rest): if self.bot.is_online or self.settings["count_offline"] is True: # this funky syntax makes SQLAlchemy increment # the num_lines atomically with SET num_lines=("user".num_lines + 1) source.num_lines = User.num_lines + 1 def enable(self, bot): HandlerManager.add_handler("on_pubmsg", self.on_pubmsg) def disable(self, bot): HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg) <commit_msg>Revert to using += to increment user's lines Doing it the "atomic" way was causing str(user.num_lines) to become ""user".num_lines + :num_lines_1" which is not intended<commit_after>
import logging from pajbot.managers.handler import HandlerManager from pajbot.models.user import User from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting log = logging.getLogger(__name__) class LineFarmingModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Line Farming" DESCRIPTION = "Keep track on the amount of lines users type in chat" ENABLED_DEFAULT = True CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="count_offline", label="Count lines in offline chat", type="boolean", required=True, default=False ) ] def on_pubmsg(self, source, **rest): if self.bot.is_online or self.settings["count_offline"] is True: source.num_lines += 1 def enable(self, bot): HandlerManager.add_handler("on_pubmsg", self.on_pubmsg) def disable(self, bot): HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg)
import logging from pajbot.managers.handler import HandlerManager from pajbot.models.user import User from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting log = logging.getLogger(__name__) class LineFarmingModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Line Farming" DESCRIPTION = "Keep track on the amount of lines users type in chat" ENABLED_DEFAULT = True CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="count_offline", label="Count lines in offline chat", type="boolean", required=True, default=False ) ] def on_pubmsg(self, source, **rest): if self.bot.is_online or self.settings["count_offline"] is True: # this funky syntax makes SQLAlchemy increment # the num_lines atomically with SET num_lines=("user".num_lines + 1) source.num_lines = User.num_lines + 1 def enable(self, bot): HandlerManager.add_handler("on_pubmsg", self.on_pubmsg) def disable(self, bot): HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg) Revert to using += to increment user's lines Doing it the "atomic" way was causing str(user.num_lines) to become ""user".num_lines + :num_lines_1" which is not intendedimport logging from pajbot.managers.handler import HandlerManager from pajbot.models.user import User from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting log = logging.getLogger(__name__) class LineFarmingModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Line Farming" DESCRIPTION = "Keep track on the amount of lines users type in chat" ENABLED_DEFAULT = True CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="count_offline", label="Count lines in offline chat", type="boolean", required=True, default=False ) ] def on_pubmsg(self, source, **rest): if self.bot.is_online or self.settings["count_offline"] is True: source.num_lines += 1 def enable(self, bot): HandlerManager.add_handler("on_pubmsg", self.on_pubmsg) def disable(self, bot): HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg)
<commit_before>import logging from pajbot.managers.handler import HandlerManager from pajbot.models.user import User from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting log = logging.getLogger(__name__) class LineFarmingModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Line Farming" DESCRIPTION = "Keep track on the amount of lines users type in chat" ENABLED_DEFAULT = True CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="count_offline", label="Count lines in offline chat", type="boolean", required=True, default=False ) ] def on_pubmsg(self, source, **rest): if self.bot.is_online or self.settings["count_offline"] is True: # this funky syntax makes SQLAlchemy increment # the num_lines atomically with SET num_lines=("user".num_lines + 1) source.num_lines = User.num_lines + 1 def enable(self, bot): HandlerManager.add_handler("on_pubmsg", self.on_pubmsg) def disable(self, bot): HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg) <commit_msg>Revert to using += to increment user's lines Doing it the "atomic" way was causing str(user.num_lines) to become ""user".num_lines + :num_lines_1" which is not intended<commit_after>import logging from pajbot.managers.handler import HandlerManager from pajbot.models.user import User from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting log = logging.getLogger(__name__) class LineFarmingModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Line Farming" DESCRIPTION = "Keep track on the amount of lines users type in chat" ENABLED_DEFAULT = True CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="count_offline", label="Count lines in offline chat", type="boolean", required=True, default=False ) ] def on_pubmsg(self, source, **rest): if self.bot.is_online or self.settings["count_offline"] is True: source.num_lines += 1 def enable(self, bot): HandlerManager.add_handler("on_pubmsg", self.on_pubmsg) def disable(self, bot): HandlerManager.remove_handler("on_pubmsg", self.on_pubmsg)
2b3406a46625fd5350200dcb6d2dc32224d3c609
warehouse/defaults.py
warehouse/defaults.py
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The amount of time (in seconds) that synchronizing each project can take # before timing out. SYNCHRONIZATION_TIMEOUT = 60 * 15 # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", }
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", }
Remove a no longer used setting
Remove a no longer used setting With the removal of eventlet there is no longer a mechanism for timing out a synchronization.
Python
bsd-2-clause
davidfischer/warehouse
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The amount of time (in seconds) that synchronizing each project can take # before timing out. SYNCHRONIZATION_TIMEOUT = 60 * 15 # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", } Remove a no longer used setting With the removal of eventlet there is no longer a mechanism for timing out a synchronization.
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", }
<commit_before>from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The amount of time (in seconds) that synchronizing each project can take # before timing out. SYNCHRONIZATION_TIMEOUT = 60 * 15 # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", } <commit_msg>Remove a no longer used setting With the removal of eventlet there is no longer a mechanism for timing out a synchronization.<commit_after>
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", }
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The amount of time (in seconds) that synchronizing each project can take # before timing out. SYNCHRONIZATION_TIMEOUT = 60 * 15 # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", } Remove a no longer used setting With the removal of eventlet there is no longer a mechanism for timing out a synchronization.from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", }
<commit_before>from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The amount of time (in seconds) that synchronizing each project can take # before timing out. SYNCHRONIZATION_TIMEOUT = 60 * 15 # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", } <commit_msg>Remove a no longer used setting With the removal of eventlet there is no longer a mechanism for timing out a synchronization.<commit_after>from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", }
9d66600518ec05dae2be62da0bbe2c15ddccce9d
spicedham/__init__.py
spicedham/__init__.py
from pkg_resources import iter_entry_points from spicedham.config import load_config _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ global _plugins if _plugins == None: load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0
from pkg_resources import iter_entry_points from spicedham.config import load_config from spicedham.backend import load_backend _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ global _plugins if _plugins == None: # In order to use the plugins config and backend must be loaded. load_backend() load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0
Make sure to load_backend as part of load_plugins
Make sure to load_backend as part of load_plugins
Python
mpl-2.0
mozilla/spicedham,mozilla/spicedham
from pkg_resources import iter_entry_points from spicedham.config import load_config _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ global _plugins if _plugins == None: load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0 Make sure to load_backend as part of load_plugins
from pkg_resources import iter_entry_points from spicedham.config import load_config from spicedham.backend import load_backend _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ global _plugins if _plugins == None: # In order to use the plugins config and backend must be loaded. load_backend() load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0
<commit_before>from pkg_resources import iter_entry_points from spicedham.config import load_config _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ global _plugins if _plugins == None: load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0 <commit_msg>Make sure to load_backend as part of load_plugins<commit_after>
from pkg_resources import iter_entry_points from spicedham.config import load_config from spicedham.backend import load_backend _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ global _plugins if _plugins == None: # In order to use the plugins config and backend must be loaded. load_backend() load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0
from pkg_resources import iter_entry_points from spicedham.config import load_config _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ global _plugins if _plugins == None: load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0 Make sure to load_backend as part of load_pluginsfrom pkg_resources import iter_entry_points from spicedham.config import load_config from spicedham.backend import load_backend _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ global _plugins if _plugins == None: # In order to use the plugins config and backend must be loaded. load_backend() load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0
<commit_before>from pkg_resources import iter_entry_points from spicedham.config import load_config _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ global _plugins if _plugins == None: load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0 <commit_msg>Make sure to load_backend as part of load_plugins<commit_after>from pkg_resources import iter_entry_points from spicedham.config import load_config from spicedham.backend import load_backend _plugins = None def load_plugins(): """ If not already loaded, load plugins. """ global _plugins if _plugins == None: # In order to use the plugins config and backend must be loaded. load_backend() load_config() _plugins = [] for plugin in iter_entry_points(group='spicedham.classifiers', name=None): pluginClass = plugin.load() _plugins.append(pluginClass()) def train(tag, training_data, is_spam): """ Calls each plugin's train function. """ for plugin in _plugins: plugin.train(tag, training_data, is_spam) def classify(tag, classification_data): """ Calls each plugin's classify function and averages the results. """ average_score = 0 total = 0 for plugin in _plugins: value = plugin.classify(tag, classification_data) # Skip _plugins which give a score of None if value != None: total += 1 average_score += value # On rare occasions no _plugins will give scores. If so, return 0 if total > 0: return average_score / total else: return 0
d66f4a429a0e584b1ce45ca652a27ecd6c372e8c
climate_data/migrations/0024_auto_20170623_0308.py
climate_data/migrations/0024_auto_20170623_0308.py
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-23 03:08 from __future__ import unicode_literals from django.db import migrations # noinspection PyUnusedLocal def add_station_sensor_link_to_reading(apps, schema_editor): # noinspection PyPep8Naming Reading = apps.get_model('climate_data', 'Reading') # noinspection PyPep8Naming StationSensorLink = apps.get_model('climate_data', 'StationSensorLink') for reading in Reading.objects.all(): reading.station_sensor_link = StationSensorLink.objects.filter(station=reading.station, sensor=reading.sensor)\ .first() reading.save() class Migration(migrations.Migration): dependencies = [ ('climate_data', '0023_reading_station_sensor_link'), ] operations = [ migrations.RunPython(add_station_sensor_link_to_reading), ]
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-23 03:08 from __future__ import unicode_literals from django.db import migrations # noinspection PyUnusedLocal def add_station_sensor_link_to_reading(apps, schema_editor): # noinspection PyPep8Naming Reading = apps.get_model('climate_data', 'Reading') # noinspection PyPep8Naming StationSensorLink = apps.get_model('climate_data', 'StationSensorLink') offset = 0 pagesize = 5000 count = Reading.objects.all().count() while offset < count: for reading in Reading.objects.all()[offset:offset+pagesize].iterator(): reading.station_sensor_link = StationSensorLink.objects.filter( station=reading.station, sensor=reading.sensor ).first() reading.save() offset += pagesize class Migration(migrations.Migration): dependencies = [ ('climate_data', '0023_reading_station_sensor_link'), ] operations = [ migrations.RunPython(add_station_sensor_link_to_reading), ]
Improve station-sensor link field addition to reading model migration using a paging system to prevent the migration being killed automatically.
Improve station-sensor link field addition to reading model migration using a paging system to prevent the migration being killed automatically.
Python
apache-2.0
qubs/data-centre,qubs/climate-data-api,qubs/climate-data-api,qubs/data-centre
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-23 03:08 from __future__ import unicode_literals from django.db import migrations # noinspection PyUnusedLocal def add_station_sensor_link_to_reading(apps, schema_editor): # noinspection PyPep8Naming Reading = apps.get_model('climate_data', 'Reading') # noinspection PyPep8Naming StationSensorLink = apps.get_model('climate_data', 'StationSensorLink') for reading in Reading.objects.all(): reading.station_sensor_link = StationSensorLink.objects.filter(station=reading.station, sensor=reading.sensor)\ .first() reading.save() class Migration(migrations.Migration): dependencies = [ ('climate_data', '0023_reading_station_sensor_link'), ] operations = [ migrations.RunPython(add_station_sensor_link_to_reading), ] Improve station-sensor link field addition to reading model migration using a paging system to prevent the migration being killed automatically.
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-23 03:08 from __future__ import unicode_literals from django.db import migrations # noinspection PyUnusedLocal def add_station_sensor_link_to_reading(apps, schema_editor): # noinspection PyPep8Naming Reading = apps.get_model('climate_data', 'Reading') # noinspection PyPep8Naming StationSensorLink = apps.get_model('climate_data', 'StationSensorLink') offset = 0 pagesize = 5000 count = Reading.objects.all().count() while offset < count: for reading in Reading.objects.all()[offset:offset+pagesize].iterator(): reading.station_sensor_link = StationSensorLink.objects.filter( station=reading.station, sensor=reading.sensor ).first() reading.save() offset += pagesize class Migration(migrations.Migration): dependencies = [ ('climate_data', '0023_reading_station_sensor_link'), ] operations = [ migrations.RunPython(add_station_sensor_link_to_reading), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-23 03:08 from __future__ import unicode_literals from django.db import migrations # noinspection PyUnusedLocal def add_station_sensor_link_to_reading(apps, schema_editor): # noinspection PyPep8Naming Reading = apps.get_model('climate_data', 'Reading') # noinspection PyPep8Naming StationSensorLink = apps.get_model('climate_data', 'StationSensorLink') for reading in Reading.objects.all(): reading.station_sensor_link = StationSensorLink.objects.filter(station=reading.station, sensor=reading.sensor)\ .first() reading.save() class Migration(migrations.Migration): dependencies = [ ('climate_data', '0023_reading_station_sensor_link'), ] operations = [ migrations.RunPython(add_station_sensor_link_to_reading), ] <commit_msg>Improve station-sensor link field addition to reading model migration using a paging system to prevent the migration being killed automatically.<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-23 03:08 from __future__ import unicode_literals from django.db import migrations # noinspection PyUnusedLocal def add_station_sensor_link_to_reading(apps, schema_editor): # noinspection PyPep8Naming Reading = apps.get_model('climate_data', 'Reading') # noinspection PyPep8Naming StationSensorLink = apps.get_model('climate_data', 'StationSensorLink') offset = 0 pagesize = 5000 count = Reading.objects.all().count() while offset < count: for reading in Reading.objects.all()[offset:offset+pagesize].iterator(): reading.station_sensor_link = StationSensorLink.objects.filter( station=reading.station, sensor=reading.sensor ).first() reading.save() offset += pagesize class Migration(migrations.Migration): dependencies = [ ('climate_data', '0023_reading_station_sensor_link'), ] operations = [ migrations.RunPython(add_station_sensor_link_to_reading), ]
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-23 03:08 from __future__ import unicode_literals from django.db import migrations # noinspection PyUnusedLocal def add_station_sensor_link_to_reading(apps, schema_editor): # noinspection PyPep8Naming Reading = apps.get_model('climate_data', 'Reading') # noinspection PyPep8Naming StationSensorLink = apps.get_model('climate_data', 'StationSensorLink') for reading in Reading.objects.all(): reading.station_sensor_link = StationSensorLink.objects.filter(station=reading.station, sensor=reading.sensor)\ .first() reading.save() class Migration(migrations.Migration): dependencies = [ ('climate_data', '0023_reading_station_sensor_link'), ] operations = [ migrations.RunPython(add_station_sensor_link_to_reading), ] Improve station-sensor link field addition to reading model migration using a paging system to prevent the migration being killed automatically.# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-23 03:08 from __future__ import unicode_literals from django.db import migrations # noinspection PyUnusedLocal def add_station_sensor_link_to_reading(apps, schema_editor): # noinspection PyPep8Naming Reading = apps.get_model('climate_data', 'Reading') # noinspection PyPep8Naming StationSensorLink = apps.get_model('climate_data', 'StationSensorLink') offset = 0 pagesize = 5000 count = Reading.objects.all().count() while offset < count: for reading in Reading.objects.all()[offset:offset+pagesize].iterator(): reading.station_sensor_link = StationSensorLink.objects.filter( station=reading.station, sensor=reading.sensor ).first() reading.save() offset += pagesize class Migration(migrations.Migration): dependencies = [ ('climate_data', '0023_reading_station_sensor_link'), ] operations = [ migrations.RunPython(add_station_sensor_link_to_reading), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-23 03:08 from __future__ import unicode_literals from django.db import migrations # noinspection PyUnusedLocal def add_station_sensor_link_to_reading(apps, schema_editor): # noinspection PyPep8Naming Reading = apps.get_model('climate_data', 'Reading') # noinspection PyPep8Naming StationSensorLink = apps.get_model('climate_data', 'StationSensorLink') for reading in Reading.objects.all(): reading.station_sensor_link = StationSensorLink.objects.filter(station=reading.station, sensor=reading.sensor)\ .first() reading.save() class Migration(migrations.Migration): dependencies = [ ('climate_data', '0023_reading_station_sensor_link'), ] operations = [ migrations.RunPython(add_station_sensor_link_to_reading), ] <commit_msg>Improve station-sensor link field addition to reading model migration using a paging system to prevent the migration being killed automatically.<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-06-23 03:08 from __future__ import unicode_literals from django.db import migrations # noinspection PyUnusedLocal def add_station_sensor_link_to_reading(apps, schema_editor): # noinspection PyPep8Naming Reading = apps.get_model('climate_data', 'Reading') # noinspection PyPep8Naming StationSensorLink = apps.get_model('climate_data', 'StationSensorLink') offset = 0 pagesize = 5000 count = Reading.objects.all().count() while offset < count: for reading in Reading.objects.all()[offset:offset+pagesize].iterator(): reading.station_sensor_link = StationSensorLink.objects.filter( station=reading.station, sensor=reading.sensor ).first() reading.save() offset += pagesize class Migration(migrations.Migration): dependencies = [ ('climate_data', '0023_reading_station_sensor_link'), ] operations = [ migrations.RunPython(add_station_sensor_link_to_reading), ]
6fa2d67e27fcbd0cc8ff5858e4038e14a0ab8ae1
bika/lims/skins/bika/guard_receive_transition.py
bika/lims/skins/bika/guard_receive_transition.py
## Script (Python) "guard_receive_transition" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters= ##title= ## from DateTime import DateTime workflow = context.portal_workflow # False if object is cancelled if workflow.getInfoFor(context, 'cancellation_state', "active") == "cancelled": return False if context.portal_type == 'AnalysisRequest': # False if our Sample's SamplingDate is the future if context.getSample().getSamplingDate() > DateTime(): return False # False if any Field Analyses in any of our sample's ARs have no result. for ar in context.getSample().getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False elif context.portal_type == 'Sample': # False if our SamplingDate is the future if context.getSamplingDate() > DateTime(): return False # False if any of this Sample's ARs have Field Analyses without results. for ar in context.getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False return True
## Script (Python) "guard_receive_transition" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters= ##title= ## from DateTime import DateTime workflow = context.portal_workflow # False if object is cancelled if workflow.getInfoFor(context, 'cancellation_state', "active") == "cancelled": return False if context.portal_type == 'AnalysisRequest': # False if any Field Analyses in any of our sample's ARs have no result. for ar in context.getSample().getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False elif context.portal_type == 'Sample': # False if any of this Sample's ARs have Field Analyses without results. for ar in context.getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False return True
Allow receive of future-dated samples
Allow receive of future-dated samples
Python
agpl-3.0
veroc/Bika-LIMS,DeBortoliWines/Bika-LIMS,veroc/Bika-LIMS,anneline/Bika-LIMS,labsanmartin/Bika-LIMS,veroc/Bika-LIMS,DeBortoliWines/Bika-LIMS,labsanmartin/Bika-LIMS,anneline/Bika-LIMS,DeBortoliWines/Bika-LIMS,labsanmartin/Bika-LIMS,anneline/Bika-LIMS,rockfruit/bika.lims,rockfruit/bika.lims
## Script (Python) "guard_receive_transition" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters= ##title= ## from DateTime import DateTime workflow = context.portal_workflow # False if object is cancelled if workflow.getInfoFor(context, 'cancellation_state', "active") == "cancelled": return False if context.portal_type == 'AnalysisRequest': # False if our Sample's SamplingDate is the future if context.getSample().getSamplingDate() > DateTime(): return False # False if any Field Analyses in any of our sample's ARs have no result. for ar in context.getSample().getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False elif context.portal_type == 'Sample': # False if our SamplingDate is the future if context.getSamplingDate() > DateTime(): return False # False if any of this Sample's ARs have Field Analyses without results. for ar in context.getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False return True Allow receive of future-dated samples
## Script (Python) "guard_receive_transition" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters= ##title= ## from DateTime import DateTime workflow = context.portal_workflow # False if object is cancelled if workflow.getInfoFor(context, 'cancellation_state', "active") == "cancelled": return False if context.portal_type == 'AnalysisRequest': # False if any Field Analyses in any of our sample's ARs have no result. for ar in context.getSample().getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False elif context.portal_type == 'Sample': # False if any of this Sample's ARs have Field Analyses without results. for ar in context.getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False return True
<commit_before>## Script (Python) "guard_receive_transition" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters= ##title= ## from DateTime import DateTime workflow = context.portal_workflow # False if object is cancelled if workflow.getInfoFor(context, 'cancellation_state', "active") == "cancelled": return False if context.portal_type == 'AnalysisRequest': # False if our Sample's SamplingDate is the future if context.getSample().getSamplingDate() > DateTime(): return False # False if any Field Analyses in any of our sample's ARs have no result. for ar in context.getSample().getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False elif context.portal_type == 'Sample': # False if our SamplingDate is the future if context.getSamplingDate() > DateTime(): return False # False if any of this Sample's ARs have Field Analyses without results. for ar in context.getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False return True <commit_msg>Allow receive of future-dated samples<commit_after>
## Script (Python) "guard_receive_transition" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters= ##title= ## from DateTime import DateTime workflow = context.portal_workflow # False if object is cancelled if workflow.getInfoFor(context, 'cancellation_state', "active") == "cancelled": return False if context.portal_type == 'AnalysisRequest': # False if any Field Analyses in any of our sample's ARs have no result. for ar in context.getSample().getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False elif context.portal_type == 'Sample': # False if any of this Sample's ARs have Field Analyses without results. for ar in context.getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False return True
## Script (Python) "guard_receive_transition" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters= ##title= ## from DateTime import DateTime workflow = context.portal_workflow # False if object is cancelled if workflow.getInfoFor(context, 'cancellation_state', "active") == "cancelled": return False if context.portal_type == 'AnalysisRequest': # False if our Sample's SamplingDate is the future if context.getSample().getSamplingDate() > DateTime(): return False # False if any Field Analyses in any of our sample's ARs have no result. for ar in context.getSample().getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False elif context.portal_type == 'Sample': # False if our SamplingDate is the future if context.getSamplingDate() > DateTime(): return False # False if any of this Sample's ARs have Field Analyses without results. for ar in context.getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False return True Allow receive of future-dated samples## Script (Python) "guard_receive_transition" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters= ##title= ## from DateTime import DateTime workflow = context.portal_workflow # False if object is cancelled if workflow.getInfoFor(context, 'cancellation_state', "active") == "cancelled": return False if context.portal_type == 'AnalysisRequest': # False if any Field Analyses in any of our sample's ARs have no result. for ar in context.getSample().getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False elif context.portal_type == 'Sample': # False if any of this Sample's ARs have Field Analyses without results. for ar in context.getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False return True
<commit_before>## Script (Python) "guard_receive_transition" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters= ##title= ## from DateTime import DateTime workflow = context.portal_workflow # False if object is cancelled if workflow.getInfoFor(context, 'cancellation_state', "active") == "cancelled": return False if context.portal_type == 'AnalysisRequest': # False if our Sample's SamplingDate is the future if context.getSample().getSamplingDate() > DateTime(): return False # False if any Field Analyses in any of our sample's ARs have no result. for ar in context.getSample().getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False elif context.portal_type == 'Sample': # False if our SamplingDate is the future if context.getSamplingDate() > DateTime(): return False # False if any of this Sample's ARs have Field Analyses without results. for ar in context.getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False return True <commit_msg>Allow receive of future-dated samples<commit_after>## Script (Python) "guard_receive_transition" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters= ##title= ## from DateTime import DateTime workflow = context.portal_workflow # False if object is cancelled if workflow.getInfoFor(context, 'cancellation_state', "active") == "cancelled": return False if context.portal_type == 'AnalysisRequest': # False if any Field Analyses in any of our sample's ARs have no result. for ar in context.getSample().getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False elif context.portal_type == 'Sample': # False if any of this Sample's ARs have Field Analyses without results. for ar in context.getAnalysisRequests(): if [a for a in ar.getAnalyses(getPointOfCapture='field') if a.getObject().getResult() == '']: return False return True
b0c4a58f3002f2f0971c6b254af443773b965d4e
InvenTree/company/urls.py
InvenTree/company/urls.py
""" URL lookup for Company app """ from django.conf.urls import url, include from . import views company_detail_urls = [ url(r'^thumb-download/', views.CompanyImageDownloadFromURL.as_view(), name='company-image-download'), # Any other URL url(r'^.*$', views.CompanyDetail.as_view(), name='company-detail'), ] company_urls = [ url(r'^(?P<pk>\d+)/', include(company_detail_urls)), url(r'suppliers/', views.CompanyIndex.as_view(), name='supplier-index'), url(r'manufacturers/', views.CompanyIndex.as_view(), name='manufacturer-index'), url(r'customers/', views.CompanyIndex.as_view(), name='customer-index'), # Redirect any other patterns to the 'company' index which displays all companies url(r'^.*$', views.CompanyIndex.as_view(), name='company-index'), ] manufacturer_part_urls = [ url(r'^(?P<pk>\d+)/', include([ url('^.*$', views.ManufacturerPartDetail.as_view(template_name='company/manufacturer_part.html'), name='manufacturer-part-detail'), ])), ] supplier_part_urls = [ url('^.*$', views.SupplierPartDetail.as_view(template_name='company/supplier_part.html'), name='supplier-part-detail'), ]
""" URL lookup for Company app """ from django.conf.urls import url, include from . import views company_detail_urls = [ url(r'^thumb-download/', views.CompanyImageDownloadFromURL.as_view(), name='company-image-download'), # Any other URL url(r'^.*$', views.CompanyDetail.as_view(), name='company-detail'), ] company_urls = [ url(r'^(?P<pk>\d+)/', include(company_detail_urls)), url(r'suppliers/', views.CompanyIndex.as_view(), name='supplier-index'), url(r'manufacturers/', views.CompanyIndex.as_view(), name='manufacturer-index'), url(r'customers/', views.CompanyIndex.as_view(), name='customer-index'), # Redirect any other patterns to the 'company' index which displays all companies url(r'^.*$', views.CompanyIndex.as_view(), name='company-index'), ] manufacturer_part_urls = [ url(r'^(?P<pk>\d+)/', views.ManufacturerPartDetail.as_view(template_name='company/manufacturer_part.html'), name='manufacturer-part-detail'), ] supplier_part_urls = [ url(r'^(?P<pk>\d+)/', views.SupplierPartDetail.as_view(template_name='company/supplier_part.html'), name='supplier-part-detail'), ]
Fix URL patterns for ManufacturerPart and SupplierPart
Fix URL patterns for ManufacturerPart and SupplierPart
Python
mit
SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree
""" URL lookup for Company app """ from django.conf.urls import url, include from . import views company_detail_urls = [ url(r'^thumb-download/', views.CompanyImageDownloadFromURL.as_view(), name='company-image-download'), # Any other URL url(r'^.*$', views.CompanyDetail.as_view(), name='company-detail'), ] company_urls = [ url(r'^(?P<pk>\d+)/', include(company_detail_urls)), url(r'suppliers/', views.CompanyIndex.as_view(), name='supplier-index'), url(r'manufacturers/', views.CompanyIndex.as_view(), name='manufacturer-index'), url(r'customers/', views.CompanyIndex.as_view(), name='customer-index'), # Redirect any other patterns to the 'company' index which displays all companies url(r'^.*$', views.CompanyIndex.as_view(), name='company-index'), ] manufacturer_part_urls = [ url(r'^(?P<pk>\d+)/', include([ url('^.*$', views.ManufacturerPartDetail.as_view(template_name='company/manufacturer_part.html'), name='manufacturer-part-detail'), ])), ] supplier_part_urls = [ url('^.*$', views.SupplierPartDetail.as_view(template_name='company/supplier_part.html'), name='supplier-part-detail'), ] Fix URL patterns for ManufacturerPart and SupplierPart
""" URL lookup for Company app """ from django.conf.urls import url, include from . import views company_detail_urls = [ url(r'^thumb-download/', views.CompanyImageDownloadFromURL.as_view(), name='company-image-download'), # Any other URL url(r'^.*$', views.CompanyDetail.as_view(), name='company-detail'), ] company_urls = [ url(r'^(?P<pk>\d+)/', include(company_detail_urls)), url(r'suppliers/', views.CompanyIndex.as_view(), name='supplier-index'), url(r'manufacturers/', views.CompanyIndex.as_view(), name='manufacturer-index'), url(r'customers/', views.CompanyIndex.as_view(), name='customer-index'), # Redirect any other patterns to the 'company' index which displays all companies url(r'^.*$', views.CompanyIndex.as_view(), name='company-index'), ] manufacturer_part_urls = [ url(r'^(?P<pk>\d+)/', views.ManufacturerPartDetail.as_view(template_name='company/manufacturer_part.html'), name='manufacturer-part-detail'), ] supplier_part_urls = [ url(r'^(?P<pk>\d+)/', views.SupplierPartDetail.as_view(template_name='company/supplier_part.html'), name='supplier-part-detail'), ]
<commit_before>""" URL lookup for Company app """ from django.conf.urls import url, include from . import views company_detail_urls = [ url(r'^thumb-download/', views.CompanyImageDownloadFromURL.as_view(), name='company-image-download'), # Any other URL url(r'^.*$', views.CompanyDetail.as_view(), name='company-detail'), ] company_urls = [ url(r'^(?P<pk>\d+)/', include(company_detail_urls)), url(r'suppliers/', views.CompanyIndex.as_view(), name='supplier-index'), url(r'manufacturers/', views.CompanyIndex.as_view(), name='manufacturer-index'), url(r'customers/', views.CompanyIndex.as_view(), name='customer-index'), # Redirect any other patterns to the 'company' index which displays all companies url(r'^.*$', views.CompanyIndex.as_view(), name='company-index'), ] manufacturer_part_urls = [ url(r'^(?P<pk>\d+)/', include([ url('^.*$', views.ManufacturerPartDetail.as_view(template_name='company/manufacturer_part.html'), name='manufacturer-part-detail'), ])), ] supplier_part_urls = [ url('^.*$', views.SupplierPartDetail.as_view(template_name='company/supplier_part.html'), name='supplier-part-detail'), ] <commit_msg>Fix URL patterns for ManufacturerPart and SupplierPart<commit_after>
""" URL lookup for Company app """ from django.conf.urls import url, include from . import views company_detail_urls = [ url(r'^thumb-download/', views.CompanyImageDownloadFromURL.as_view(), name='company-image-download'), # Any other URL url(r'^.*$', views.CompanyDetail.as_view(), name='company-detail'), ] company_urls = [ url(r'^(?P<pk>\d+)/', include(company_detail_urls)), url(r'suppliers/', views.CompanyIndex.as_view(), name='supplier-index'), url(r'manufacturers/', views.CompanyIndex.as_view(), name='manufacturer-index'), url(r'customers/', views.CompanyIndex.as_view(), name='customer-index'), # Redirect any other patterns to the 'company' index which displays all companies url(r'^.*$', views.CompanyIndex.as_view(), name='company-index'), ] manufacturer_part_urls = [ url(r'^(?P<pk>\d+)/', views.ManufacturerPartDetail.as_view(template_name='company/manufacturer_part.html'), name='manufacturer-part-detail'), ] supplier_part_urls = [ url(r'^(?P<pk>\d+)/', views.SupplierPartDetail.as_view(template_name='company/supplier_part.html'), name='supplier-part-detail'), ]
""" URL lookup for Company app """ from django.conf.urls import url, include from . import views company_detail_urls = [ url(r'^thumb-download/', views.CompanyImageDownloadFromURL.as_view(), name='company-image-download'), # Any other URL url(r'^.*$', views.CompanyDetail.as_view(), name='company-detail'), ] company_urls = [ url(r'^(?P<pk>\d+)/', include(company_detail_urls)), url(r'suppliers/', views.CompanyIndex.as_view(), name='supplier-index'), url(r'manufacturers/', views.CompanyIndex.as_view(), name='manufacturer-index'), url(r'customers/', views.CompanyIndex.as_view(), name='customer-index'), # Redirect any other patterns to the 'company' index which displays all companies url(r'^.*$', views.CompanyIndex.as_view(), name='company-index'), ] manufacturer_part_urls = [ url(r'^(?P<pk>\d+)/', include([ url('^.*$', views.ManufacturerPartDetail.as_view(template_name='company/manufacturer_part.html'), name='manufacturer-part-detail'), ])), ] supplier_part_urls = [ url('^.*$', views.SupplierPartDetail.as_view(template_name='company/supplier_part.html'), name='supplier-part-detail'), ] Fix URL patterns for ManufacturerPart and SupplierPart""" URL lookup for Company app """ from django.conf.urls import url, include from . import views company_detail_urls = [ url(r'^thumb-download/', views.CompanyImageDownloadFromURL.as_view(), name='company-image-download'), # Any other URL url(r'^.*$', views.CompanyDetail.as_view(), name='company-detail'), ] company_urls = [ url(r'^(?P<pk>\d+)/', include(company_detail_urls)), url(r'suppliers/', views.CompanyIndex.as_view(), name='supplier-index'), url(r'manufacturers/', views.CompanyIndex.as_view(), name='manufacturer-index'), url(r'customers/', views.CompanyIndex.as_view(), name='customer-index'), # Redirect any other patterns to the 'company' index which displays all companies url(r'^.*$', views.CompanyIndex.as_view(), name='company-index'), ] manufacturer_part_urls = [ url(r'^(?P<pk>\d+)/', views.ManufacturerPartDetail.as_view(template_name='company/manufacturer_part.html'), name='manufacturer-part-detail'), ] supplier_part_urls = [ url(r'^(?P<pk>\d+)/', views.SupplierPartDetail.as_view(template_name='company/supplier_part.html'), name='supplier-part-detail'), ]
<commit_before>""" URL lookup for Company app """ from django.conf.urls import url, include from . import views company_detail_urls = [ url(r'^thumb-download/', views.CompanyImageDownloadFromURL.as_view(), name='company-image-download'), # Any other URL url(r'^.*$', views.CompanyDetail.as_view(), name='company-detail'), ] company_urls = [ url(r'^(?P<pk>\d+)/', include(company_detail_urls)), url(r'suppliers/', views.CompanyIndex.as_view(), name='supplier-index'), url(r'manufacturers/', views.CompanyIndex.as_view(), name='manufacturer-index'), url(r'customers/', views.CompanyIndex.as_view(), name='customer-index'), # Redirect any other patterns to the 'company' index which displays all companies url(r'^.*$', views.CompanyIndex.as_view(), name='company-index'), ] manufacturer_part_urls = [ url(r'^(?P<pk>\d+)/', include([ url('^.*$', views.ManufacturerPartDetail.as_view(template_name='company/manufacturer_part.html'), name='manufacturer-part-detail'), ])), ] supplier_part_urls = [ url('^.*$', views.SupplierPartDetail.as_view(template_name='company/supplier_part.html'), name='supplier-part-detail'), ] <commit_msg>Fix URL patterns for ManufacturerPart and SupplierPart<commit_after>""" URL lookup for Company app """ from django.conf.urls import url, include from . import views company_detail_urls = [ url(r'^thumb-download/', views.CompanyImageDownloadFromURL.as_view(), name='company-image-download'), # Any other URL url(r'^.*$', views.CompanyDetail.as_view(), name='company-detail'), ] company_urls = [ url(r'^(?P<pk>\d+)/', include(company_detail_urls)), url(r'suppliers/', views.CompanyIndex.as_view(), name='supplier-index'), url(r'manufacturers/', views.CompanyIndex.as_view(), name='manufacturer-index'), url(r'customers/', views.CompanyIndex.as_view(), name='customer-index'), # Redirect any other patterns to the 'company' index which displays all companies url(r'^.*$', views.CompanyIndex.as_view(), name='company-index'), ] manufacturer_part_urls = [ url(r'^(?P<pk>\d+)/', views.ManufacturerPartDetail.as_view(template_name='company/manufacturer_part.html'), name='manufacturer-part-detail'), ] supplier_part_urls = [ url(r'^(?P<pk>\d+)/', views.SupplierPartDetail.as_view(template_name='company/supplier_part.html'), name='supplier-part-detail'), ]
994b5be1862a74e9773eeea806d9051a3b5dc29e
framework/auth/campaigns.py
framework/auth/campaigns.py
import httplib as http from framework.exceptions import HTTPError from website import mails VALID_CAMPAIGNS = ( 'prereg', ) EMAIL_TEMPLATE_MAP = { 'prereg': mails.CONFIRM_EMAIL_PREREG } def email_template_for_campaign(campaign, default=None): if campaign in VALID_CAMPAIGNS: try: return EMAIL_TEMPLATE_MAP[campaign] except KeyError as e: if default: return default else: raise e return default def campaign_for_user(user): campaigns = [tag for tag in user.system_tags if tag in VALID_CAMPAIGNS] if campaigns: return campaigns[0] def campaign_url_for(campaign): # Defined inside this function to ensure a request context REDIRECT_MAP = { 'prereg': '/prereg/' } if campaign not in VALID_CAMPAIGNS: raise HTTPError(http.BAD_REQUEST) else: try: return REDIRECT_MAP[campaign] except KeyError: raise HTTPError(http.NOT_FOUND)
import httplib as http from framework.exceptions import HTTPError from website import mails VALID_CAMPAIGNS = ( 'prereg', ) EMAIL_TEMPLATE_MAP = { 'prereg': mails.CONFIRM_EMAIL_PREREG } def email_template_for_campaign(campaign, default=None): if campaign in VALID_CAMPAIGNS: try: return EMAIL_TEMPLATE_MAP[campaign] except KeyError as e: if default: return default else: raise e return default def campaign_for_user(user): campaigns = [tag for tag in user.system_tags if tag in VALID_CAMPAIGNS] if campaigns: # TODO: This is a bit of a one-off to support the Prereg Challenge. # We should think more about the campaigns architecture and in # particular define the behavior if the user has more than one # campagin tag in their system_tags. return campaigns[0] def campaign_url_for(campaign): # Defined inside this function to ensure a request context REDIRECT_MAP = { 'prereg': '/prereg/' } if campaign not in VALID_CAMPAIGNS: raise HTTPError(http.BAD_REQUEST) else: try: return REDIRECT_MAP[campaign] except KeyError: raise HTTPError(http.NOT_FOUND)
Add docstring for one-offish campaign_for_user
Add docstring for one-offish campaign_for_user
Python
apache-2.0
icereval/osf.io,brianjgeiger/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,abought/osf.io,asanfilippo7/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,alexschiller/osf.io,amyshi188/osf.io,ticklemepierce/osf.io,sloria/osf.io,mluo613/osf.io,asanfilippo7/osf.io,hmoco/osf.io,danielneis/osf.io,acshi/osf.io,danielneis/osf.io,KAsante95/osf.io,aaxelb/osf.io,jnayak1/osf.io,zachjanicki/osf.io,mattclark/osf.io,baylee-d/osf.io,leb2dg/osf.io,adlius/osf.io,DanielSBrown/osf.io,zachjanicki/osf.io,emetsger/osf.io,erinspace/osf.io,GageGaskins/osf.io,wearpants/osf.io,mfraezz/osf.io,hmoco/osf.io,chrisseto/osf.io,asanfilippo7/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,doublebits/osf.io,CenterForOpenScience/osf.io,alexschiller/osf.io,mfraezz/osf.io,billyhunt/osf.io,zamattiac/osf.io,mluke93/osf.io,RomanZWang/osf.io,KAsante95/osf.io,TomHeatwole/osf.io,saradbowman/osf.io,hmoco/osf.io,mfraezz/osf.io,Ghalko/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,Nesiehr/osf.io,chrisseto/osf.io,caneruguz/osf.io,KAsante95/osf.io,billyhunt/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,zachjanicki/osf.io,cwisecarver/osf.io,zamattiac/osf.io,KAsante95/osf.io,zamattiac/osf.io,aaxelb/osf.io,SSJohns/osf.io,leb2dg/osf.io,leb2dg/osf.io,billyhunt/osf.io,kch8qx/osf.io,felliott/osf.io,kwierman/osf.io,mluke93/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,chennan47/osf.io,kch8qx/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,mattclark/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,danielneis/osf.io,monikagrabowska/osf.io,ticklemepierce/osf.io,kch8qx/osf.io,TomBaxter/osf.io,felliott/osf.io,leb2dg/osf.io,adlius/osf.io,asanfilippo7/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,felliott/osf.io,GageGaskins/osf.io,ticklemepierce/osf.io,emetsger/osf.io,samanehsan/osf.io,chrisseto/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,emetsger/osf.io,binoculars/osf.io,baylee-d/osf.io,chennan47/osf.io,rdhyee/osf.io,pattisdr/osf.io,wearpants/osf.io,Johnetordoff/osf.io,doublebits/osf.io,KAsante95/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,baylee-d/osf.io,kch8qx/osf.io,cslzchen/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,acshi/osf.io,binoculars/osf.io,TomBaxter/osf.io,brandonPurvis/osf.io,abought/osf.io,erinspace/osf.io,samanehsan/osf.io,mluo613/osf.io,RomanZWang/osf.io,Ghalko/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,billyhunt/osf.io,icereval/osf.io,felliott/osf.io,monikagrabowska/osf.io,sloria/osf.io,acshi/osf.io,brandonPurvis/osf.io,HalcyonChimera/osf.io,emetsger/osf.io,brandonPurvis/osf.io,jnayak1/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,jnayak1/osf.io,hmoco/osf.io,mluo613/osf.io,samchrisinger/osf.io,zamattiac/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,DanielSBrown/osf.io,acshi/osf.io,adlius/osf.io,caneruguz/osf.io,alexschiller/osf.io,laurenrevere/osf.io,billyhunt/osf.io,rdhyee/osf.io,RomanZWang/osf.io,mluo613/osf.io,chennan47/osf.io,mattclark/osf.io,mluke93/osf.io,HalcyonChimera/osf.io,jnayak1/osf.io,wearpants/osf.io,acshi/osf.io,caseyrollins/osf.io,Nesiehr/osf.io,kwierman/osf.io,adlius/osf.io,aaxelb/osf.io,doublebits/osf.io,GageGaskins/osf.io,amyshi188/osf.io,doublebits/osf.io,caseyrollins/osf.io,TomBaxter/osf.io,rdhyee/osf.io,sloria/osf.io,wearpants/osf.io,mluke93/osf.io,cwisecarver/osf.io,alexschiller/osf.io,abought/osf.io,TomHeatwole/osf.io,aaxelb/osf.io,zachjanicki/osf.io,kwierman/osf.io,abought/osf.io,danielneis/osf.io,doublebits/osf.io,samanehsan/osf.io,mfraezz/osf.io,mluo613/osf.io,samanehsan/osf.io,crcresearch/osf.io,ticklemepierce/osf.io,amyshi188/osf.io,TomHeatwole/osf.io,cslzchen/osf.io,pattisdr/osf.io,Nesiehr/osf.io,SSJohns/osf.io,caneruguz/osf.io,kwierman/osf.io,erinspace/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,Ghalko/osf.io,cslzchen/osf.io,RomanZWang/osf.io,brandonPurvis/osf.io,saradbowman/osf.io,brandonPurvis/osf.io,icereval/osf.io
import httplib as http from framework.exceptions import HTTPError from website import mails VALID_CAMPAIGNS = ( 'prereg', ) EMAIL_TEMPLATE_MAP = { 'prereg': mails.CONFIRM_EMAIL_PREREG } def email_template_for_campaign(campaign, default=None): if campaign in VALID_CAMPAIGNS: try: return EMAIL_TEMPLATE_MAP[campaign] except KeyError as e: if default: return default else: raise e return default def campaign_for_user(user): campaigns = [tag for tag in user.system_tags if tag in VALID_CAMPAIGNS] if campaigns: return campaigns[0] def campaign_url_for(campaign): # Defined inside this function to ensure a request context REDIRECT_MAP = { 'prereg': '/prereg/' } if campaign not in VALID_CAMPAIGNS: raise HTTPError(http.BAD_REQUEST) else: try: return REDIRECT_MAP[campaign] except KeyError: raise HTTPError(http.NOT_FOUND) Add docstring for one-offish campaign_for_user
import httplib as http from framework.exceptions import HTTPError from website import mails VALID_CAMPAIGNS = ( 'prereg', ) EMAIL_TEMPLATE_MAP = { 'prereg': mails.CONFIRM_EMAIL_PREREG } def email_template_for_campaign(campaign, default=None): if campaign in VALID_CAMPAIGNS: try: return EMAIL_TEMPLATE_MAP[campaign] except KeyError as e: if default: return default else: raise e return default def campaign_for_user(user): campaigns = [tag for tag in user.system_tags if tag in VALID_CAMPAIGNS] if campaigns: # TODO: This is a bit of a one-off to support the Prereg Challenge. # We should think more about the campaigns architecture and in # particular define the behavior if the user has more than one # campagin tag in their system_tags. return campaigns[0] def campaign_url_for(campaign): # Defined inside this function to ensure a request context REDIRECT_MAP = { 'prereg': '/prereg/' } if campaign not in VALID_CAMPAIGNS: raise HTTPError(http.BAD_REQUEST) else: try: return REDIRECT_MAP[campaign] except KeyError: raise HTTPError(http.NOT_FOUND)
<commit_before>import httplib as http from framework.exceptions import HTTPError from website import mails VALID_CAMPAIGNS = ( 'prereg', ) EMAIL_TEMPLATE_MAP = { 'prereg': mails.CONFIRM_EMAIL_PREREG } def email_template_for_campaign(campaign, default=None): if campaign in VALID_CAMPAIGNS: try: return EMAIL_TEMPLATE_MAP[campaign] except KeyError as e: if default: return default else: raise e return default def campaign_for_user(user): campaigns = [tag for tag in user.system_tags if tag in VALID_CAMPAIGNS] if campaigns: return campaigns[0] def campaign_url_for(campaign): # Defined inside this function to ensure a request context REDIRECT_MAP = { 'prereg': '/prereg/' } if campaign not in VALID_CAMPAIGNS: raise HTTPError(http.BAD_REQUEST) else: try: return REDIRECT_MAP[campaign] except KeyError: raise HTTPError(http.NOT_FOUND) <commit_msg>Add docstring for one-offish campaign_for_user<commit_after>
import httplib as http from framework.exceptions import HTTPError from website import mails VALID_CAMPAIGNS = ( 'prereg', ) EMAIL_TEMPLATE_MAP = { 'prereg': mails.CONFIRM_EMAIL_PREREG } def email_template_for_campaign(campaign, default=None): if campaign in VALID_CAMPAIGNS: try: return EMAIL_TEMPLATE_MAP[campaign] except KeyError as e: if default: return default else: raise e return default def campaign_for_user(user): campaigns = [tag for tag in user.system_tags if tag in VALID_CAMPAIGNS] if campaigns: # TODO: This is a bit of a one-off to support the Prereg Challenge. # We should think more about the campaigns architecture and in # particular define the behavior if the user has more than one # campagin tag in their system_tags. return campaigns[0] def campaign_url_for(campaign): # Defined inside this function to ensure a request context REDIRECT_MAP = { 'prereg': '/prereg/' } if campaign not in VALID_CAMPAIGNS: raise HTTPError(http.BAD_REQUEST) else: try: return REDIRECT_MAP[campaign] except KeyError: raise HTTPError(http.NOT_FOUND)
import httplib as http from framework.exceptions import HTTPError from website import mails VALID_CAMPAIGNS = ( 'prereg', ) EMAIL_TEMPLATE_MAP = { 'prereg': mails.CONFIRM_EMAIL_PREREG } def email_template_for_campaign(campaign, default=None): if campaign in VALID_CAMPAIGNS: try: return EMAIL_TEMPLATE_MAP[campaign] except KeyError as e: if default: return default else: raise e return default def campaign_for_user(user): campaigns = [tag for tag in user.system_tags if tag in VALID_CAMPAIGNS] if campaigns: return campaigns[0] def campaign_url_for(campaign): # Defined inside this function to ensure a request context REDIRECT_MAP = { 'prereg': '/prereg/' } if campaign not in VALID_CAMPAIGNS: raise HTTPError(http.BAD_REQUEST) else: try: return REDIRECT_MAP[campaign] except KeyError: raise HTTPError(http.NOT_FOUND) Add docstring for one-offish campaign_for_userimport httplib as http from framework.exceptions import HTTPError from website import mails VALID_CAMPAIGNS = ( 'prereg', ) EMAIL_TEMPLATE_MAP = { 'prereg': mails.CONFIRM_EMAIL_PREREG } def email_template_for_campaign(campaign, default=None): if campaign in VALID_CAMPAIGNS: try: return EMAIL_TEMPLATE_MAP[campaign] except KeyError as e: if default: return default else: raise e return default def campaign_for_user(user): campaigns = [tag for tag in user.system_tags if tag in VALID_CAMPAIGNS] if campaigns: # TODO: This is a bit of a one-off to support the Prereg Challenge. # We should think more about the campaigns architecture and in # particular define the behavior if the user has more than one # campagin tag in their system_tags. return campaigns[0] def campaign_url_for(campaign): # Defined inside this function to ensure a request context REDIRECT_MAP = { 'prereg': '/prereg/' } if campaign not in VALID_CAMPAIGNS: raise HTTPError(http.BAD_REQUEST) else: try: return REDIRECT_MAP[campaign] except KeyError: raise HTTPError(http.NOT_FOUND)
<commit_before>import httplib as http from framework.exceptions import HTTPError from website import mails VALID_CAMPAIGNS = ( 'prereg', ) EMAIL_TEMPLATE_MAP = { 'prereg': mails.CONFIRM_EMAIL_PREREG } def email_template_for_campaign(campaign, default=None): if campaign in VALID_CAMPAIGNS: try: return EMAIL_TEMPLATE_MAP[campaign] except KeyError as e: if default: return default else: raise e return default def campaign_for_user(user): campaigns = [tag for tag in user.system_tags if tag in VALID_CAMPAIGNS] if campaigns: return campaigns[0] def campaign_url_for(campaign): # Defined inside this function to ensure a request context REDIRECT_MAP = { 'prereg': '/prereg/' } if campaign not in VALID_CAMPAIGNS: raise HTTPError(http.BAD_REQUEST) else: try: return REDIRECT_MAP[campaign] except KeyError: raise HTTPError(http.NOT_FOUND) <commit_msg>Add docstring for one-offish campaign_for_user<commit_after>import httplib as http from framework.exceptions import HTTPError from website import mails VALID_CAMPAIGNS = ( 'prereg', ) EMAIL_TEMPLATE_MAP = { 'prereg': mails.CONFIRM_EMAIL_PREREG } def email_template_for_campaign(campaign, default=None): if campaign in VALID_CAMPAIGNS: try: return EMAIL_TEMPLATE_MAP[campaign] except KeyError as e: if default: return default else: raise e return default def campaign_for_user(user): campaigns = [tag for tag in user.system_tags if tag in VALID_CAMPAIGNS] if campaigns: # TODO: This is a bit of a one-off to support the Prereg Challenge. # We should think more about the campaigns architecture and in # particular define the behavior if the user has more than one # campagin tag in their system_tags. return campaigns[0] def campaign_url_for(campaign): # Defined inside this function to ensure a request context REDIRECT_MAP = { 'prereg': '/prereg/' } if campaign not in VALID_CAMPAIGNS: raise HTTPError(http.BAD_REQUEST) else: try: return REDIRECT_MAP[campaign] except KeyError: raise HTTPError(http.NOT_FOUND)
4e801fe6d88d35ca8b850a092acfde404bdfa287
wsgi.py
wsgi.py
# -*- coding: utf-8 -*- """ wsgi ~~~~ entrypoint wsgi script """ from werkzeug.serving import run_simple from werkzeug.wsgi import DispatcherMiddleware from graphics_service import app application = app.create_app() if __name__ == "__main__": run_simple('0.0.0.0', 4000, application, use_reloader=False, use_debugger=True)
# -*- coding: utf-8 -*- """ wsgi ~~~~ entrypoint wsgi script """ from werkzeug.serving import run_simple from werkzeug.middleware.dispatcher import DispatcherMiddleware from graphics_service import app application = app.create_app() if __name__ == "__main__": run_simple('0.0.0.0', 4000, application, use_reloader=False, use_debugger=True)
Update importing werkzeug middleware to support python3
Update importing werkzeug middleware to support python3
Python
mit
adsabs/graphics_service
# -*- coding: utf-8 -*- """ wsgi ~~~~ entrypoint wsgi script """ from werkzeug.serving import run_simple from werkzeug.wsgi import DispatcherMiddleware from graphics_service import app application = app.create_app() if __name__ == "__main__": run_simple('0.0.0.0', 4000, application, use_reloader=False, use_debugger=True) Update importing werkzeug middleware to support python3
# -*- coding: utf-8 -*- """ wsgi ~~~~ entrypoint wsgi script """ from werkzeug.serving import run_simple from werkzeug.middleware.dispatcher import DispatcherMiddleware from graphics_service import app application = app.create_app() if __name__ == "__main__": run_simple('0.0.0.0', 4000, application, use_reloader=False, use_debugger=True)
<commit_before># -*- coding: utf-8 -*- """ wsgi ~~~~ entrypoint wsgi script """ from werkzeug.serving import run_simple from werkzeug.wsgi import DispatcherMiddleware from graphics_service import app application = app.create_app() if __name__ == "__main__": run_simple('0.0.0.0', 4000, application, use_reloader=False, use_debugger=True) <commit_msg>Update importing werkzeug middleware to support python3<commit_after>
# -*- coding: utf-8 -*- """ wsgi ~~~~ entrypoint wsgi script """ from werkzeug.serving import run_simple from werkzeug.middleware.dispatcher import DispatcherMiddleware from graphics_service import app application = app.create_app() if __name__ == "__main__": run_simple('0.0.0.0', 4000, application, use_reloader=False, use_debugger=True)
# -*- coding: utf-8 -*- """ wsgi ~~~~ entrypoint wsgi script """ from werkzeug.serving import run_simple from werkzeug.wsgi import DispatcherMiddleware from graphics_service import app application = app.create_app() if __name__ == "__main__": run_simple('0.0.0.0', 4000, application, use_reloader=False, use_debugger=True) Update importing werkzeug middleware to support python3# -*- coding: utf-8 -*- """ wsgi ~~~~ entrypoint wsgi script """ from werkzeug.serving import run_simple from werkzeug.middleware.dispatcher import DispatcherMiddleware from graphics_service import app application = app.create_app() if __name__ == "__main__": run_simple('0.0.0.0', 4000, application, use_reloader=False, use_debugger=True)
<commit_before># -*- coding: utf-8 -*- """ wsgi ~~~~ entrypoint wsgi script """ from werkzeug.serving import run_simple from werkzeug.wsgi import DispatcherMiddleware from graphics_service import app application = app.create_app() if __name__ == "__main__": run_simple('0.0.0.0', 4000, application, use_reloader=False, use_debugger=True) <commit_msg>Update importing werkzeug middleware to support python3<commit_after># -*- coding: utf-8 -*- """ wsgi ~~~~ entrypoint wsgi script """ from werkzeug.serving import run_simple from werkzeug.middleware.dispatcher import DispatcherMiddleware from graphics_service import app application = app.create_app() if __name__ == "__main__": run_simple('0.0.0.0', 4000, application, use_reloader=False, use_debugger=True)
96b3911faadc22a07176c9338420ac8cd9fb06e5
tests/test_vector2_scale.py
tests/test_vector2_scale.py
import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0
import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import angle_isclose, vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 @given(x=vectors(max_magnitude=1e75), length=floats(min_value=0, max_value=1e75)) def test_scale_aligned(x: Vector2, length: float): """Test that the length of x.scale_to(length) is length.""" assume(length > 0) try: assert angle_isclose(x.scale_to(length).angle(x), 0) except ZeroDivisionError: assert x == (0, 0)
Test that scaling doesn't rotate vectors
Test that scaling doesn't rotate vectors
Python
artistic-2.0
ppb/ppb-vector,ppb/ppb-vector
import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 Test that scaling doesn't rotate vectors
import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import angle_isclose, vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 @given(x=vectors(max_magnitude=1e75), length=floats(min_value=0, max_value=1e75)) def test_scale_aligned(x: Vector2, length: float): """Test that the length of x.scale_to(length) is length.""" assume(length > 0) try: assert angle_isclose(x.scale_to(length).angle(x), 0) except ZeroDivisionError: assert x == (0, 0)
<commit_before>import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 <commit_msg>Test that scaling doesn't rotate vectors<commit_after>
import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import angle_isclose, vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 @given(x=vectors(max_magnitude=1e75), length=floats(min_value=0, max_value=1e75)) def test_scale_aligned(x: Vector2, length: float): """Test that the length of x.scale_to(length) is length.""" assume(length > 0) try: assert angle_isclose(x.scale_to(length).angle(x), 0) except ZeroDivisionError: assert x == (0, 0)
import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 Test that scaling doesn't rotate vectorsimport pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import angle_isclose, vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 @given(x=vectors(max_magnitude=1e75), length=floats(min_value=0, max_value=1e75)) def test_scale_aligned(x: Vector2, length: float): """Test that the length of x.scale_to(length) is length.""" assume(length > 0) try: assert angle_isclose(x.scale_to(length).angle(x), 0) except ZeroDivisionError: assert x == (0, 0)
<commit_before>import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 <commit_msg>Test that scaling doesn't rotate vectors<commit_after>import pytest # type: ignore from hypothesis import assume, given from hypothesis.strategies import floats from math import isclose from utils import angle_isclose, vectors from ppb_vector import Vector2 @given(x=vectors(max_magnitude=1e75), l=floats(min_value=-1e75, max_value=1e75)) def test_scale_to_length(x: Vector2, l: float): """Test that the length of x.scale_to(l) is l.""" try: assert isclose(x.scale_to(l).length, l) except ZeroDivisionError: assert x == (0, 0) except ValueError: assert l < 0 @given(x=vectors(max_magnitude=1e75), length=floats(min_value=0, max_value=1e75)) def test_scale_aligned(x: Vector2, length: float): """Test that the length of x.scale_to(length) is length.""" assume(length > 0) try: assert angle_isclose(x.scale_to(length).angle(x), 0) except ZeroDivisionError: assert x == (0, 0)
6ad6de83e9cb8bf71ba2b525b22fa760093ab552
userprofile/migrations/0034_auto_20200304_2205.py
userprofile/migrations/0034_auto_20200304_2205.py
# Generated by Django 3.0.2 on 2020-03-04 22:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('userprofile', '0033_category_color_hex'), ] operations = [ migrations.RemoveField( model_name='category', name='color_hex', ), migrations.AddField( model_name='category', name='color', field=models.CharField(default='hs-green', max_length=7), ), ]
# Generated by Django 3.0.2 on 2020-03-04 22:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('userprofile', '0033_category_color_hex'), ] operations = [ migrations.RemoveField( model_name='category', name='color_hex', ), migrations.AddField( model_name='category', name='color', field=models.CharField(default='green', max_length=7), ), ]
Change default in migration file to be short enough
Change default in migration file to be short enough
Python
mit
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
# Generated by Django 3.0.2 on 2020-03-04 22:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('userprofile', '0033_category_color_hex'), ] operations = [ migrations.RemoveField( model_name='category', name='color_hex', ), migrations.AddField( model_name='category', name='color', field=models.CharField(default='hs-green', max_length=7), ), ] Change default in migration file to be short enough
# Generated by Django 3.0.2 on 2020-03-04 22:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('userprofile', '0033_category_color_hex'), ] operations = [ migrations.RemoveField( model_name='category', name='color_hex', ), migrations.AddField( model_name='category', name='color', field=models.CharField(default='green', max_length=7), ), ]
<commit_before># Generated by Django 3.0.2 on 2020-03-04 22:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('userprofile', '0033_category_color_hex'), ] operations = [ migrations.RemoveField( model_name='category', name='color_hex', ), migrations.AddField( model_name='category', name='color', field=models.CharField(default='hs-green', max_length=7), ), ] <commit_msg>Change default in migration file to be short enough<commit_after>
# Generated by Django 3.0.2 on 2020-03-04 22:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('userprofile', '0033_category_color_hex'), ] operations = [ migrations.RemoveField( model_name='category', name='color_hex', ), migrations.AddField( model_name='category', name='color', field=models.CharField(default='green', max_length=7), ), ]
# Generated by Django 3.0.2 on 2020-03-04 22:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('userprofile', '0033_category_color_hex'), ] operations = [ migrations.RemoveField( model_name='category', name='color_hex', ), migrations.AddField( model_name='category', name='color', field=models.CharField(default='hs-green', max_length=7), ), ] Change default in migration file to be short enough# Generated by Django 3.0.2 on 2020-03-04 22:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('userprofile', '0033_category_color_hex'), ] operations = [ migrations.RemoveField( model_name='category', name='color_hex', ), migrations.AddField( model_name='category', name='color', field=models.CharField(default='green', max_length=7), ), ]
<commit_before># Generated by Django 3.0.2 on 2020-03-04 22:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('userprofile', '0033_category_color_hex'), ] operations = [ migrations.RemoveField( model_name='category', name='color_hex', ), migrations.AddField( model_name='category', name='color', field=models.CharField(default='hs-green', max_length=7), ), ] <commit_msg>Change default in migration file to be short enough<commit_after># Generated by Django 3.0.2 on 2020-03-04 22:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('userprofile', '0033_category_color_hex'), ] operations = [ migrations.RemoveField( model_name='category', name='color_hex', ), migrations.AddField( model_name='category', name='color', field=models.CharField(default='green', max_length=7), ), ]
c49aa5dc8ccaf821fe6c5d43ea6026e968b175e5
makedist.py
makedist.py
# Template makedist.py file # Set WEBSITE to the name of the web site that this package will be # deposited in. The URL will always be: # http://$WEBSITE/$PACKAGE/ WEBSITE = 'untroubled.org' # If LISTSUB is set, makedist will add a note regarding mailing list # subscription. LISTSUB = 'bgware-subscribe@lists.em.ca' LISTURL = 'http://lists.em.ca/?list=bgware' # Set EXTRAS to a list of any extra files that should go into the # base directory in the destination site. EXTRAS = [ 'ANNOUNCEMENT', 'ChangeLog', 'NEWS', 'README', 'TODO', ] # Set RPMUPLOAD to a list of additional hostname/path destinations to # which to upload the source and binary RPMs. RPMUPLOAD = [ #( 'incoming.redhat.com', 'libc6' ), ] # Set LISTS to the mailing list(s) to send the announcement to LISTS = [ 'bgware@lists.em.ca', ] # Run any extra commands to prepare the source tree (such as making # documentation) here.
# Template makedist.py file # Set WEBSITE to the name of the web site that this package will be # deposited in. The URL will always be: # http://$WEBSITE/$PACKAGE/ WEBSITE = 'untroubled.org' # If LISTSUB is set, makedist will add a note regarding mailing list # subscription. LISTSUB = 'bgware-subscribe@lists.em.ca' LISTURL = 'http://lists.em.ca/?list=bgware' # Set EXTRAS to a list of any extra files that should go into the # base directory in the destination site. EXTRAS = [ 'ANNOUNCEMENT', 'ChangeLog', 'NEWS', 'README', 'TODO', 'doc/html', ] # Set RPMUPLOAD to a list of additional hostname/path destinations to # which to upload the source and binary RPMs. RPMUPLOAD = [ #( 'incoming.redhat.com', 'libc6' ), ] # Set LISTS to the mailing list(s) to send the announcement to LISTS = [ 'bgware@lists.em.ca', ] # Run any extra commands to prepare the source tree (such as making # documentation) here.
Add the documentation to the web site.
Add the documentation to the web site.
Python
lgpl-2.1
bruceg/bglibs,bruceg/bglibs,bruceg/bglibs,bruceg/bglibs,bruceg/bglibs,bruceg/bglibs
# Template makedist.py file # Set WEBSITE to the name of the web site that this package will be # deposited in. The URL will always be: # http://$WEBSITE/$PACKAGE/ WEBSITE = 'untroubled.org' # If LISTSUB is set, makedist will add a note regarding mailing list # subscription. LISTSUB = 'bgware-subscribe@lists.em.ca' LISTURL = 'http://lists.em.ca/?list=bgware' # Set EXTRAS to a list of any extra files that should go into the # base directory in the destination site. EXTRAS = [ 'ANNOUNCEMENT', 'ChangeLog', 'NEWS', 'README', 'TODO', ] # Set RPMUPLOAD to a list of additional hostname/path destinations to # which to upload the source and binary RPMs. RPMUPLOAD = [ #( 'incoming.redhat.com', 'libc6' ), ] # Set LISTS to the mailing list(s) to send the announcement to LISTS = [ 'bgware@lists.em.ca', ] # Run any extra commands to prepare the source tree (such as making # documentation) here. Add the documentation to the web site.
# Template makedist.py file # Set WEBSITE to the name of the web site that this package will be # deposited in. The URL will always be: # http://$WEBSITE/$PACKAGE/ WEBSITE = 'untroubled.org' # If LISTSUB is set, makedist will add a note regarding mailing list # subscription. LISTSUB = 'bgware-subscribe@lists.em.ca' LISTURL = 'http://lists.em.ca/?list=bgware' # Set EXTRAS to a list of any extra files that should go into the # base directory in the destination site. EXTRAS = [ 'ANNOUNCEMENT', 'ChangeLog', 'NEWS', 'README', 'TODO', 'doc/html', ] # Set RPMUPLOAD to a list of additional hostname/path destinations to # which to upload the source and binary RPMs. RPMUPLOAD = [ #( 'incoming.redhat.com', 'libc6' ), ] # Set LISTS to the mailing list(s) to send the announcement to LISTS = [ 'bgware@lists.em.ca', ] # Run any extra commands to prepare the source tree (such as making # documentation) here.
<commit_before># Template makedist.py file # Set WEBSITE to the name of the web site that this package will be # deposited in. The URL will always be: # http://$WEBSITE/$PACKAGE/ WEBSITE = 'untroubled.org' # If LISTSUB is set, makedist will add a note regarding mailing list # subscription. LISTSUB = 'bgware-subscribe@lists.em.ca' LISTURL = 'http://lists.em.ca/?list=bgware' # Set EXTRAS to a list of any extra files that should go into the # base directory in the destination site. EXTRAS = [ 'ANNOUNCEMENT', 'ChangeLog', 'NEWS', 'README', 'TODO', ] # Set RPMUPLOAD to a list of additional hostname/path destinations to # which to upload the source and binary RPMs. RPMUPLOAD = [ #( 'incoming.redhat.com', 'libc6' ), ] # Set LISTS to the mailing list(s) to send the announcement to LISTS = [ 'bgware@lists.em.ca', ] # Run any extra commands to prepare the source tree (such as making # documentation) here. <commit_msg>Add the documentation to the web site.<commit_after>
# Template makedist.py file # Set WEBSITE to the name of the web site that this package will be # deposited in. The URL will always be: # http://$WEBSITE/$PACKAGE/ WEBSITE = 'untroubled.org' # If LISTSUB is set, makedist will add a note regarding mailing list # subscription. LISTSUB = 'bgware-subscribe@lists.em.ca' LISTURL = 'http://lists.em.ca/?list=bgware' # Set EXTRAS to a list of any extra files that should go into the # base directory in the destination site. EXTRAS = [ 'ANNOUNCEMENT', 'ChangeLog', 'NEWS', 'README', 'TODO', 'doc/html', ] # Set RPMUPLOAD to a list of additional hostname/path destinations to # which to upload the source and binary RPMs. RPMUPLOAD = [ #( 'incoming.redhat.com', 'libc6' ), ] # Set LISTS to the mailing list(s) to send the announcement to LISTS = [ 'bgware@lists.em.ca', ] # Run any extra commands to prepare the source tree (such as making # documentation) here.
# Template makedist.py file # Set WEBSITE to the name of the web site that this package will be # deposited in. The URL will always be: # http://$WEBSITE/$PACKAGE/ WEBSITE = 'untroubled.org' # If LISTSUB is set, makedist will add a note regarding mailing list # subscription. LISTSUB = 'bgware-subscribe@lists.em.ca' LISTURL = 'http://lists.em.ca/?list=bgware' # Set EXTRAS to a list of any extra files that should go into the # base directory in the destination site. EXTRAS = [ 'ANNOUNCEMENT', 'ChangeLog', 'NEWS', 'README', 'TODO', ] # Set RPMUPLOAD to a list of additional hostname/path destinations to # which to upload the source and binary RPMs. RPMUPLOAD = [ #( 'incoming.redhat.com', 'libc6' ), ] # Set LISTS to the mailing list(s) to send the announcement to LISTS = [ 'bgware@lists.em.ca', ] # Run any extra commands to prepare the source tree (such as making # documentation) here. Add the documentation to the web site.# Template makedist.py file # Set WEBSITE to the name of the web site that this package will be # deposited in. The URL will always be: # http://$WEBSITE/$PACKAGE/ WEBSITE = 'untroubled.org' # If LISTSUB is set, makedist will add a note regarding mailing list # subscription. LISTSUB = 'bgware-subscribe@lists.em.ca' LISTURL = 'http://lists.em.ca/?list=bgware' # Set EXTRAS to a list of any extra files that should go into the # base directory in the destination site. EXTRAS = [ 'ANNOUNCEMENT', 'ChangeLog', 'NEWS', 'README', 'TODO', 'doc/html', ] # Set RPMUPLOAD to a list of additional hostname/path destinations to # which to upload the source and binary RPMs. RPMUPLOAD = [ #( 'incoming.redhat.com', 'libc6' ), ] # Set LISTS to the mailing list(s) to send the announcement to LISTS = [ 'bgware@lists.em.ca', ] # Run any extra commands to prepare the source tree (such as making # documentation) here.
<commit_before># Template makedist.py file # Set WEBSITE to the name of the web site that this package will be # deposited in. The URL will always be: # http://$WEBSITE/$PACKAGE/ WEBSITE = 'untroubled.org' # If LISTSUB is set, makedist will add a note regarding mailing list # subscription. LISTSUB = 'bgware-subscribe@lists.em.ca' LISTURL = 'http://lists.em.ca/?list=bgware' # Set EXTRAS to a list of any extra files that should go into the # base directory in the destination site. EXTRAS = [ 'ANNOUNCEMENT', 'ChangeLog', 'NEWS', 'README', 'TODO', ] # Set RPMUPLOAD to a list of additional hostname/path destinations to # which to upload the source and binary RPMs. RPMUPLOAD = [ #( 'incoming.redhat.com', 'libc6' ), ] # Set LISTS to the mailing list(s) to send the announcement to LISTS = [ 'bgware@lists.em.ca', ] # Run any extra commands to prepare the source tree (such as making # documentation) here. <commit_msg>Add the documentation to the web site.<commit_after># Template makedist.py file # Set WEBSITE to the name of the web site that this package will be # deposited in. The URL will always be: # http://$WEBSITE/$PACKAGE/ WEBSITE = 'untroubled.org' # If LISTSUB is set, makedist will add a note regarding mailing list # subscription. LISTSUB = 'bgware-subscribe@lists.em.ca' LISTURL = 'http://lists.em.ca/?list=bgware' # Set EXTRAS to a list of any extra files that should go into the # base directory in the destination site. EXTRAS = [ 'ANNOUNCEMENT', 'ChangeLog', 'NEWS', 'README', 'TODO', 'doc/html', ] # Set RPMUPLOAD to a list of additional hostname/path destinations to # which to upload the source and binary RPMs. RPMUPLOAD = [ #( 'incoming.redhat.com', 'libc6' ), ] # Set LISTS to the mailing list(s) to send the announcement to LISTS = [ 'bgware@lists.em.ca', ] # Run any extra commands to prepare the source tree (such as making # documentation) here.
d3caf80485da78c8eb050ff4d9e33a2ee6c8feda
tests/rietveld/test_event_handler.py
tests/rietveld/test_event_handler.py
from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode_exception(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover
from __future__ import absolute_import, print_function import pytest from addie.rietveld import event_handler @pytest.fixture def rietveld_event_handler(qtbot): return event_handler def test_evt_change_gss_mode_exception(qtbot, rietveld_event_handler): """Test we can extract a bank id from bank workspace name""" with pytest.raises(NotImplementedError) as e: rietveld_event_handler.evt_change_gss_mode(None)
Refactor rietveld.event_handler test to use pytest-qt
Refactor rietveld.event_handler test to use pytest-qt
Python
mit
neutrons/FastGR,neutrons/FastGR,neutrons/FastGR
from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode_exception(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover Refactor rietveld.event_handler test to use pytest-qt
from __future__ import absolute_import, print_function import pytest from addie.rietveld import event_handler @pytest.fixture def rietveld_event_handler(qtbot): return event_handler def test_evt_change_gss_mode_exception(qtbot, rietveld_event_handler): """Test we can extract a bank id from bank workspace name""" with pytest.raises(NotImplementedError) as e: rietveld_event_handler.evt_change_gss_mode(None)
<commit_before>from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode_exception(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover <commit_msg>Refactor rietveld.event_handler test to use pytest-qt<commit_after>
from __future__ import absolute_import, print_function import pytest from addie.rietveld import event_handler @pytest.fixture def rietveld_event_handler(qtbot): return event_handler def test_evt_change_gss_mode_exception(qtbot, rietveld_event_handler): """Test we can extract a bank id from bank workspace name""" with pytest.raises(NotImplementedError) as e: rietveld_event_handler.evt_change_gss_mode(None)
from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode_exception(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover Refactor rietveld.event_handler test to use pytest-qtfrom __future__ import absolute_import, print_function import pytest from addie.rietveld import event_handler @pytest.fixture def rietveld_event_handler(qtbot): return event_handler def test_evt_change_gss_mode_exception(qtbot, rietveld_event_handler): """Test we can extract a bank id from bank workspace name""" with pytest.raises(NotImplementedError) as e: rietveld_event_handler.evt_change_gss_mode(None)
<commit_before>from __future__ import absolute_import, print_function import unittest from qtpy.QtWidgets import QApplication from addie.rietveld import event_handler class RietveldEventHandlerTests(unittest.TestCase): def setUp(self): self.main_window = QApplication([]) ''' def tearDown(self): self.main_window.quit() ''' def test_evt_change_gss_mode_exception(self): """Test we can extract a bank id from bank workspace name""" f = event_handler.evt_change_gss_mode self.assertRaises(NotImplementedError, f, None) if __name__ == '__main__': unittest.main() # pragma: no cover <commit_msg>Refactor rietveld.event_handler test to use pytest-qt<commit_after>from __future__ import absolute_import, print_function import pytest from addie.rietveld import event_handler @pytest.fixture def rietveld_event_handler(qtbot): return event_handler def test_evt_change_gss_mode_exception(qtbot, rietveld_event_handler): """Test we can extract a bank id from bank workspace name""" with pytest.raises(NotImplementedError) as e: rietveld_event_handler.evt_change_gss_mode(None)
4e2fd123b77572bdf74938d08f3e84ccfa15af36
pycargr/cli.py
pycargr/cli.py
import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main()
import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'json' or output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main()
Support both json and stdout the same
Support both json and stdout the same
Python
mit
Florents-Tselai/PyCarGr
import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main() Support both json and stdout the same
import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'json' or output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main()
<commit_before>import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main() <commit_msg>Support both json and stdout the same<commit_after>
import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'json' or output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main()
import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main() Support both json and stdout the sameimport csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'json' or output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main()
<commit_before>import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main() <commit_msg>Support both json and stdout the same<commit_after>import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'json' or output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main()
e9a0d9a2d64b00328f99d526db9cc67cad478760
src/mmmblog/models.py
src/mmmblog/models.py
from django.db import models FORMAT_CHOICES = ( ('html', 'Raw HTML'), ('markdown', 'Markdown'), ) class Blog(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) content = models.TextField() format = models.CharField(max_length=10, choices=FORMAT_CHOICES, default='html') published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def get_absolute_url(self): return '/blog/%i/' % self.id class Link(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) url = models.CharField(max_length=200) # URLField doesn't allow ftp:// ! extra = models.TextField(blank=True) published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title
from django.db import models from django.db.models import signals from staticgenerator import quick_publish, quick_delete FORMAT_CHOICES = ( ('html', 'Raw HTML'), ('markdown', 'Markdown'), ) class Blog(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) content = models.TextField() format = models.CharField(max_length=10, choices=FORMAT_CHOICES, default='html') published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def get_absolute_url(self): return '/blog/%i/' % self.id class Link(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) url = models.CharField(max_length=200) # URLField doesn't allow ftp:// ! extra = models.TextField(blank=True) published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def publish_blog(sender, **kwargs): instance = kwargs.get('instance') quick_publish('/') quick_publish('/blog/') quick_publish('/blog/%i/' % instance.id) def unpublish_blog(sender, **kwargs): instance = kwargs.get('instance') quick_delete('/blog/%i/' % instance.id) quick_publish('/') signals.post_save.connect(publish_blog, sender=Blog) signals.post_delete.connect(unpublish_blog, sender=Blog)
Add signals to call staticgenerator
Add signals to call staticgenerator
Python
mit
fajran/mmmblog
from django.db import models FORMAT_CHOICES = ( ('html', 'Raw HTML'), ('markdown', 'Markdown'), ) class Blog(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) content = models.TextField() format = models.CharField(max_length=10, choices=FORMAT_CHOICES, default='html') published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def get_absolute_url(self): return '/blog/%i/' % self.id class Link(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) url = models.CharField(max_length=200) # URLField doesn't allow ftp:// ! extra = models.TextField(blank=True) published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title Add signals to call staticgenerator
from django.db import models from django.db.models import signals from staticgenerator import quick_publish, quick_delete FORMAT_CHOICES = ( ('html', 'Raw HTML'), ('markdown', 'Markdown'), ) class Blog(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) content = models.TextField() format = models.CharField(max_length=10, choices=FORMAT_CHOICES, default='html') published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def get_absolute_url(self): return '/blog/%i/' % self.id class Link(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) url = models.CharField(max_length=200) # URLField doesn't allow ftp:// ! extra = models.TextField(blank=True) published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def publish_blog(sender, **kwargs): instance = kwargs.get('instance') quick_publish('/') quick_publish('/blog/') quick_publish('/blog/%i/' % instance.id) def unpublish_blog(sender, **kwargs): instance = kwargs.get('instance') quick_delete('/blog/%i/' % instance.id) quick_publish('/') signals.post_save.connect(publish_blog, sender=Blog) signals.post_delete.connect(unpublish_blog, sender=Blog)
<commit_before> from django.db import models FORMAT_CHOICES = ( ('html', 'Raw HTML'), ('markdown', 'Markdown'), ) class Blog(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) content = models.TextField() format = models.CharField(max_length=10, choices=FORMAT_CHOICES, default='html') published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def get_absolute_url(self): return '/blog/%i/' % self.id class Link(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) url = models.CharField(max_length=200) # URLField doesn't allow ftp:// ! extra = models.TextField(blank=True) published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title <commit_msg>Add signals to call staticgenerator<commit_after>
from django.db import models from django.db.models import signals from staticgenerator import quick_publish, quick_delete FORMAT_CHOICES = ( ('html', 'Raw HTML'), ('markdown', 'Markdown'), ) class Blog(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) content = models.TextField() format = models.CharField(max_length=10, choices=FORMAT_CHOICES, default='html') published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def get_absolute_url(self): return '/blog/%i/' % self.id class Link(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) url = models.CharField(max_length=200) # URLField doesn't allow ftp:// ! extra = models.TextField(blank=True) published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def publish_blog(sender, **kwargs): instance = kwargs.get('instance') quick_publish('/') quick_publish('/blog/') quick_publish('/blog/%i/' % instance.id) def unpublish_blog(sender, **kwargs): instance = kwargs.get('instance') quick_delete('/blog/%i/' % instance.id) quick_publish('/') signals.post_save.connect(publish_blog, sender=Blog) signals.post_delete.connect(unpublish_blog, sender=Blog)
from django.db import models FORMAT_CHOICES = ( ('html', 'Raw HTML'), ('markdown', 'Markdown'), ) class Blog(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) content = models.TextField() format = models.CharField(max_length=10, choices=FORMAT_CHOICES, default='html') published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def get_absolute_url(self): return '/blog/%i/' % self.id class Link(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) url = models.CharField(max_length=200) # URLField doesn't allow ftp:// ! extra = models.TextField(blank=True) published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title Add signals to call staticgenerator from django.db import models from django.db.models import signals from staticgenerator import quick_publish, quick_delete FORMAT_CHOICES = ( ('html', 'Raw HTML'), ('markdown', 'Markdown'), ) class Blog(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) content = models.TextField() format = models.CharField(max_length=10, choices=FORMAT_CHOICES, default='html') published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def get_absolute_url(self): return '/blog/%i/' % self.id class Link(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) url = models.CharField(max_length=200) # URLField doesn't allow ftp:// ! extra = models.TextField(blank=True) published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def publish_blog(sender, **kwargs): instance = kwargs.get('instance') quick_publish('/') quick_publish('/blog/') quick_publish('/blog/%i/' % instance.id) def unpublish_blog(sender, **kwargs): instance = kwargs.get('instance') quick_delete('/blog/%i/' % instance.id) quick_publish('/') signals.post_save.connect(publish_blog, sender=Blog) signals.post_delete.connect(unpublish_blog, sender=Blog)
<commit_before> from django.db import models FORMAT_CHOICES = ( ('html', 'Raw HTML'), ('markdown', 'Markdown'), ) class Blog(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) content = models.TextField() format = models.CharField(max_length=10, choices=FORMAT_CHOICES, default='html') published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def get_absolute_url(self): return '/blog/%i/' % self.id class Link(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) url = models.CharField(max_length=200) # URLField doesn't allow ftp:// ! extra = models.TextField(blank=True) published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title <commit_msg>Add signals to call staticgenerator<commit_after> from django.db import models from django.db.models import signals from staticgenerator import quick_publish, quick_delete FORMAT_CHOICES = ( ('html', 'Raw HTML'), ('markdown', 'Markdown'), ) class Blog(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) content = models.TextField() format = models.CharField(max_length=10, choices=FORMAT_CHOICES, default='html') published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def get_absolute_url(self): return '/blog/%i/' % self.id class Link(models.Model): date = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=250) url = models.CharField(max_length=200) # URLField doesn't allow ftp:// ! extra = models.TextField(blank=True) published = models.BooleanField(default=True) sticky = models.BooleanField(default=False) def __unicode__(self): return self.title def publish_blog(sender, **kwargs): instance = kwargs.get('instance') quick_publish('/') quick_publish('/blog/') quick_publish('/blog/%i/' % instance.id) def unpublish_blog(sender, **kwargs): instance = kwargs.get('instance') quick_delete('/blog/%i/' % instance.id) quick_publish('/') signals.post_save.connect(publish_blog, sender=Blog) signals.post_delete.connect(unpublish_blog, sender=Blog)
2cadf26e34a5cfaa59b6ae67065dd257fd45cfe5
students/psbriant/session010/timing_cm.py
students/psbriant/session010/timing_cm.py
#!/usr/bin/env python """ Timing context manager """ class Timer: def __enter__(self): pass
#!/usr/bin/env python """ Timing context manager """ import time class Timer: def __enter__(self): self.start = time.time() def __exit__(self, exc_type, exc_val, exc_tb): print("elapsed time:", time.time() - self.start)
Add enter and exit methods.
Add enter and exit methods.
Python
unlicense
UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016
#!/usr/bin/env python """ Timing context manager """ class Timer: def __enter__(self): pass Add enter and exit methods.
#!/usr/bin/env python """ Timing context manager """ import time class Timer: def __enter__(self): self.start = time.time() def __exit__(self, exc_type, exc_val, exc_tb): print("elapsed time:", time.time() - self.start)
<commit_before>#!/usr/bin/env python """ Timing context manager """ class Timer: def __enter__(self): pass <commit_msg>Add enter and exit methods.<commit_after>
#!/usr/bin/env python """ Timing context manager """ import time class Timer: def __enter__(self): self.start = time.time() def __exit__(self, exc_type, exc_val, exc_tb): print("elapsed time:", time.time() - self.start)
#!/usr/bin/env python """ Timing context manager """ class Timer: def __enter__(self): pass Add enter and exit methods.#!/usr/bin/env python """ Timing context manager """ import time class Timer: def __enter__(self): self.start = time.time() def __exit__(self, exc_type, exc_val, exc_tb): print("elapsed time:", time.time() - self.start)
<commit_before>#!/usr/bin/env python """ Timing context manager """ class Timer: def __enter__(self): pass <commit_msg>Add enter and exit methods.<commit_after>#!/usr/bin/env python """ Timing context manager """ import time class Timer: def __enter__(self): self.start = time.time() def __exit__(self, exc_type, exc_val, exc_tb): print("elapsed time:", time.time() - self.start)
15621b2d1dc58998f4e9f84ec8f4ef2c50458868
openerp/tests/addons/test_translation_import/models.py
openerp/tests/addons/test_translation_import/models.py
# -*- coding: utf-8 -*- import openerp from openerp.tools.translate import _ class m(openerp.osv.osv.Model): """ A model to provide source strings. """ _name = 'test.translation.import' _columns = { 'name': openerp.osv.fields.char( '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB', size=32, help='Efgh'), } _('Ijkl') # With the name label above, this source string should be generated twice. _('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# -*- coding: utf-8 -*- import openerp from openerp.tools.translate import _ class m(openerp.osv.orm.TransientModel): """ A model to provide source strings. """ _name = 'test.translation.import' _columns = { 'name': openerp.osv.fields.char( '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB', size=32, help='Efgh'), } _('Ijkl') # With the name label above, this source string should be generated twice. _('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Use TransientModel for the dummy model used in translation testing
[IMP] Use TransientModel for the dummy model used in translation testing
Python
agpl-3.0
akretion/openerp-server,akretion/openerp-server,akretion/openerp-server
# -*- coding: utf-8 -*- import openerp from openerp.tools.translate import _ class m(openerp.osv.osv.Model): """ A model to provide source strings. """ _name = 'test.translation.import' _columns = { 'name': openerp.osv.fields.char( '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB', size=32, help='Efgh'), } _('Ijkl') # With the name label above, this source string should be generated twice. _('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: [IMP] Use TransientModel for the dummy model used in translation testing
# -*- coding: utf-8 -*- import openerp from openerp.tools.translate import _ class m(openerp.osv.orm.TransientModel): """ A model to provide source strings. """ _name = 'test.translation.import' _columns = { 'name': openerp.osv.fields.char( '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB', size=32, help='Efgh'), } _('Ijkl') # With the name label above, this source string should be generated twice. _('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_before># -*- coding: utf-8 -*- import openerp from openerp.tools.translate import _ class m(openerp.osv.osv.Model): """ A model to provide source strings. """ _name = 'test.translation.import' _columns = { 'name': openerp.osv.fields.char( '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB', size=32, help='Efgh'), } _('Ijkl') # With the name label above, this source string should be generated twice. _('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: <commit_msg>[IMP] Use TransientModel for the dummy model used in translation testing<commit_after>
# -*- coding: utf-8 -*- import openerp from openerp.tools.translate import _ class m(openerp.osv.orm.TransientModel): """ A model to provide source strings. """ _name = 'test.translation.import' _columns = { 'name': openerp.osv.fields.char( '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB', size=32, help='Efgh'), } _('Ijkl') # With the name label above, this source string should be generated twice. _('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# -*- coding: utf-8 -*- import openerp from openerp.tools.translate import _ class m(openerp.osv.osv.Model): """ A model to provide source strings. """ _name = 'test.translation.import' _columns = { 'name': openerp.osv.fields.char( '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB', size=32, help='Efgh'), } _('Ijkl') # With the name label above, this source string should be generated twice. _('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: [IMP] Use TransientModel for the dummy model used in translation testing# -*- coding: utf-8 -*- import openerp from openerp.tools.translate import _ class m(openerp.osv.orm.TransientModel): """ A model to provide source strings. """ _name = 'test.translation.import' _columns = { 'name': openerp.osv.fields.char( '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB', size=32, help='Efgh'), } _('Ijkl') # With the name label above, this source string should be generated twice. _('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_before># -*- coding: utf-8 -*- import openerp from openerp.tools.translate import _ class m(openerp.osv.osv.Model): """ A model to provide source strings. """ _name = 'test.translation.import' _columns = { 'name': openerp.osv.fields.char( '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB', size=32, help='Efgh'), } _('Ijkl') # With the name label above, this source string should be generated twice. _('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: <commit_msg>[IMP] Use TransientModel for the dummy model used in translation testing<commit_after># -*- coding: utf-8 -*- import openerp from openerp.tools.translate import _ class m(openerp.osv.orm.TransientModel): """ A model to provide source strings. """ _name = 'test.translation.import' _columns = { 'name': openerp.osv.fields.char( '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB', size=32, help='Efgh'), } _('Ijkl') # With the name label above, this source string should be generated twice. _('1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
daf9c8e39cd141194f8000cb3b8f4694e96401ed
pep438/core.py
pep438/core.py
"""Core pep438 utility functions""" from __future__ import unicode_literals import requests import xmlrpclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
"""Core pep438 utility functions""" from __future__ import unicode_literals import requests try: import xmlrpclib except: import xmlrpc.client as xmlprclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
Fix broxen import in Python 3
Fix broxen import in Python 3
Python
mit
treyhunner/pep438
"""Core pep438 utility functions""" from __future__ import unicode_literals import requests import xmlrpclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)] Fix broxen import in Python 3
"""Core pep438 utility functions""" from __future__ import unicode_literals import requests try: import xmlrpclib except: import xmlrpc.client as xmlprclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
<commit_before>"""Core pep438 utility functions""" from __future__ import unicode_literals import requests import xmlrpclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)] <commit_msg>Fix broxen import in Python 3<commit_after>
"""Core pep438 utility functions""" from __future__ import unicode_literals import requests try: import xmlrpclib except: import xmlrpc.client as xmlprclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
"""Core pep438 utility functions""" from __future__ import unicode_literals import requests import xmlrpclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)] Fix broxen import in Python 3"""Core pep438 utility functions""" from __future__ import unicode_literals import requests try: import xmlrpclib except: import xmlrpc.client as xmlprclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
<commit_before>"""Core pep438 utility functions""" from __future__ import unicode_literals import requests import xmlrpclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)] <commit_msg>Fix broxen import in Python 3<commit_after>"""Core pep438 utility functions""" from __future__ import unicode_literals import requests try: import xmlrpclib except: import xmlrpc.client as xmlprclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
b03e1b75099cb46e40f7dcf85dc61e8718aa292d
slack_log_handler/__init__.py
slack_log_handler/__init__.py
import json import traceback from logging import Handler from slacker import Slacker class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = str(record.getMessage()) attachments = [{ 'fallback': message, 'color': 'danger', 'text': '\n'.join(traceback.format_exception(*record.exc_info)) }] self.slack_chat.chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) )
import json import traceback from logging import Handler, CRITICAL, ERROR, WARNING from slacker import Slacker ERROR_COLOR = 'danger' # color name is built in to Slack API WARNING_COLOR = 'warning' # color name is built in to Slack API INFO_COLOR = '#439FE0' COLORS = { CRITICAL: ERROR_COLOR, ERROR: ERROR_COLOR, WARNING: WARNING_COLOR } class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = str(record.getMessage()) attachments = [{ 'fallback': message, 'color': COLORS.get(self.level, INFO_COLOR), 'text': '\n'.join(traceback.format_exception(*record.exc_info)) }] self.slack_chat.chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) )
Set attachment color based on log level
Set attachment color based on log level
Python
apache-2.0
mathiasose/slacker_log_handler
import json import traceback from logging import Handler from slacker import Slacker class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = str(record.getMessage()) attachments = [{ 'fallback': message, 'color': 'danger', 'text': '\n'.join(traceback.format_exception(*record.exc_info)) }] self.slack_chat.chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) ) Set attachment color based on log level
import json import traceback from logging import Handler, CRITICAL, ERROR, WARNING from slacker import Slacker ERROR_COLOR = 'danger' # color name is built in to Slack API WARNING_COLOR = 'warning' # color name is built in to Slack API INFO_COLOR = '#439FE0' COLORS = { CRITICAL: ERROR_COLOR, ERROR: ERROR_COLOR, WARNING: WARNING_COLOR } class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = str(record.getMessage()) attachments = [{ 'fallback': message, 'color': COLORS.get(self.level, INFO_COLOR), 'text': '\n'.join(traceback.format_exception(*record.exc_info)) }] self.slack_chat.chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) )
<commit_before>import json import traceback from logging import Handler from slacker import Slacker class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = str(record.getMessage()) attachments = [{ 'fallback': message, 'color': 'danger', 'text': '\n'.join(traceback.format_exception(*record.exc_info)) }] self.slack_chat.chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) ) <commit_msg>Set attachment color based on log level<commit_after>
import json import traceback from logging import Handler, CRITICAL, ERROR, WARNING from slacker import Slacker ERROR_COLOR = 'danger' # color name is built in to Slack API WARNING_COLOR = 'warning' # color name is built in to Slack API INFO_COLOR = '#439FE0' COLORS = { CRITICAL: ERROR_COLOR, ERROR: ERROR_COLOR, WARNING: WARNING_COLOR } class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = str(record.getMessage()) attachments = [{ 'fallback': message, 'color': COLORS.get(self.level, INFO_COLOR), 'text': '\n'.join(traceback.format_exception(*record.exc_info)) }] self.slack_chat.chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) )
import json import traceback from logging import Handler from slacker import Slacker class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = str(record.getMessage()) attachments = [{ 'fallback': message, 'color': 'danger', 'text': '\n'.join(traceback.format_exception(*record.exc_info)) }] self.slack_chat.chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) ) Set attachment color based on log levelimport json import traceback from logging import Handler, CRITICAL, ERROR, WARNING from slacker import Slacker ERROR_COLOR = 'danger' # color name is built in to Slack API WARNING_COLOR = 'warning' # color name is built in to Slack API INFO_COLOR = '#439FE0' COLORS = { CRITICAL: ERROR_COLOR, ERROR: ERROR_COLOR, WARNING: WARNING_COLOR } class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = str(record.getMessage()) attachments = [{ 'fallback': message, 'color': COLORS.get(self.level, INFO_COLOR), 'text': '\n'.join(traceback.format_exception(*record.exc_info)) }] self.slack_chat.chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) )
<commit_before>import json import traceback from logging import Handler from slacker import Slacker class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = str(record.getMessage()) attachments = [{ 'fallback': message, 'color': 'danger', 'text': '\n'.join(traceback.format_exception(*record.exc_info)) }] self.slack_chat.chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) ) <commit_msg>Set attachment color based on log level<commit_after>import json import traceback from logging import Handler, CRITICAL, ERROR, WARNING from slacker import Slacker ERROR_COLOR = 'danger' # color name is built in to Slack API WARNING_COLOR = 'warning' # color name is built in to Slack API INFO_COLOR = '#439FE0' COLORS = { CRITICAL: ERROR_COLOR, ERROR: ERROR_COLOR, WARNING: WARNING_COLOR } class SlackLogHandler(Handler): def __init__(self, api_key, channel, stack_trace=False, username='Python logger', icon_url=None, icon_emoji=None): Handler.__init__(self) self.slack_chat = Slacker(api_key) self.channel = channel self.stack_trace = stack_trace self.username = username self.icon_url = icon_url self.icon_emoji = icon_emoji if (icon_emoji or icon_url) else ':heavy_exclamation_mark:' if not self.channel.startswith('#'): self.channel = '#' + self.channel def emit(self, record): message = str(record.getMessage()) attachments = [{ 'fallback': message, 'color': COLORS.get(self.level, INFO_COLOR), 'text': '\n'.join(traceback.format_exception(*record.exc_info)) }] self.slack_chat.chat.post_message( text=message, channel=self.channel, username=self.username, icon_url=self.icon_url, icon_emoji=self.icon_emoji, attachments=json.dumps(attachments) )
d5f650cc6932e585a848cdd9aaa257342c90a983
publishconf.py
publishconf.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = '' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYTICS = 'UA-54747412-1'
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = '' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYTICS_UNIVERSAL = 'UA-54747412-1'
Switch to GA universal id.
Switch to GA universal id.
Python
mit
donnemartin/outdated-donnemartin.github.io,donnemartin/outdated-donnemartin.github.io,donnemartin/outdated-donnemartin.github.io,donnemartin/outdated-donnemartin.github.io,donnemartin/outdated-donnemartin.github.io
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = '' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYTICS = 'UA-54747412-1' Switch to GA universal id.
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = '' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYTICS_UNIVERSAL = 'UA-54747412-1'
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = '' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYTICS = 'UA-54747412-1' <commit_msg>Switch to GA universal id.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = '' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYTICS_UNIVERSAL = 'UA-54747412-1'
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = '' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYTICS = 'UA-54747412-1' Switch to GA universal id.#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = '' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYTICS_UNIVERSAL = 'UA-54747412-1'
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = '' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYTICS = 'UA-54747412-1' <commit_msg>Switch to GA universal id.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = '' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" GOOGLE_ANALYTICS_UNIVERSAL = 'UA-54747412-1'
20654d833deb332dbbe683e6d4e38cef1cc58dd3
webcomix/tests/test_comic_availability.py
webcomix/tests/test_comic_availability.py
import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): first_pages = Comic.verify_xpath(*comic_info) check_first_pages(first_pages)
import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): comic = Comic(comic_name, *comic_info) first_pages = comic.verify_xpath() check_first_pages(first_pages)
Refactor comic availability test to reflect changes to Comic class
Refactor comic availability test to reflect changes to Comic class
Python
mit
J-CPelletier/WebComicToCBZ,J-CPelletier/webcomix,J-CPelletier/webcomix
import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): first_pages = Comic.verify_xpath(*comic_info) check_first_pages(first_pages) Refactor comic availability test to reflect changes to Comic class
import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): comic = Comic(comic_name, *comic_info) first_pages = comic.verify_xpath() check_first_pages(first_pages)
<commit_before>import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): first_pages = Comic.verify_xpath(*comic_info) check_first_pages(first_pages) <commit_msg>Refactor comic availability test to reflect changes to Comic class<commit_after>
import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): comic = Comic(comic_name, *comic_info) first_pages = comic.verify_xpath() check_first_pages(first_pages)
import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): first_pages = Comic.verify_xpath(*comic_info) check_first_pages(first_pages) Refactor comic availability test to reflect changes to Comic classimport pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): comic = Comic(comic_name, *comic_info) first_pages = comic.verify_xpath() check_first_pages(first_pages)
<commit_before>import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): first_pages = Comic.verify_xpath(*comic_info) check_first_pages(first_pages) <commit_msg>Refactor comic availability test to reflect changes to Comic class<commit_after>import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): comic = Comic(comic_name, *comic_info) first_pages = comic.verify_xpath() check_first_pages(first_pages)
38775f06c2285f3d12b9f4a0bc70bded29dce274
hbmqtt/utils.py
hbmqtt/utils.py
# Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. def not_in_dict_or_none(dict, key): """ Check if a key exists in a map and if it's not None :param dict: map to look for key :param key: key to find :return: true if key is in dict and not None """ if key not in dict or dict[key] is None: return True else: return False
# Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. def not_in_dict_or_none(dict, key): """ Check if a key exists in a map and if it's not None :param dict: map to look for key :param key: key to find :return: true if key is in dict and not None """ if key not in dict or dict[key] is None: return True else: return False def format_client_message(session=None, address=None, port=None, id=None): if session: return "(client @=%s:%d id=%s)" % (session.remote_address, session.remote_port, session.client_id) else: return "(client @=%s:%d id=%s)" % (address, port, id)
Add method for formatting client info (address, port, id)
Add method for formatting client info (address, port, id)
Python
mit
beerfactory/hbmqtt
# Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. def not_in_dict_or_none(dict, key): """ Check if a key exists in a map and if it's not None :param dict: map to look for key :param key: key to find :return: true if key is in dict and not None """ if key not in dict or dict[key] is None: return True else: return FalseAdd method for formatting client info (address, port, id)
# Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. def not_in_dict_or_none(dict, key): """ Check if a key exists in a map and if it's not None :param dict: map to look for key :param key: key to find :return: true if key is in dict and not None """ if key not in dict or dict[key] is None: return True else: return False def format_client_message(session=None, address=None, port=None, id=None): if session: return "(client @=%s:%d id=%s)" % (session.remote_address, session.remote_port, session.client_id) else: return "(client @=%s:%d id=%s)" % (address, port, id)
<commit_before># Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. def not_in_dict_or_none(dict, key): """ Check if a key exists in a map and if it's not None :param dict: map to look for key :param key: key to find :return: true if key is in dict and not None """ if key not in dict or dict[key] is None: return True else: return False<commit_msg>Add method for formatting client info (address, port, id)<commit_after>
# Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. def not_in_dict_or_none(dict, key): """ Check if a key exists in a map and if it's not None :param dict: map to look for key :param key: key to find :return: true if key is in dict and not None """ if key not in dict or dict[key] is None: return True else: return False def format_client_message(session=None, address=None, port=None, id=None): if session: return "(client @=%s:%d id=%s)" % (session.remote_address, session.remote_port, session.client_id) else: return "(client @=%s:%d id=%s)" % (address, port, id)
# Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. def not_in_dict_or_none(dict, key): """ Check if a key exists in a map and if it's not None :param dict: map to look for key :param key: key to find :return: true if key is in dict and not None """ if key not in dict or dict[key] is None: return True else: return FalseAdd method for formatting client info (address, port, id)# Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. def not_in_dict_or_none(dict, key): """ Check if a key exists in a map and if it's not None :param dict: map to look for key :param key: key to find :return: true if key is in dict and not None """ if key not in dict or dict[key] is None: return True else: return False def format_client_message(session=None, address=None, port=None, id=None): if session: return "(client @=%s:%d id=%s)" % (session.remote_address, session.remote_port, session.client_id) else: return "(client @=%s:%d id=%s)" % (address, port, id)
<commit_before># Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. def not_in_dict_or_none(dict, key): """ Check if a key exists in a map and if it's not None :param dict: map to look for key :param key: key to find :return: true if key is in dict and not None """ if key not in dict or dict[key] is None: return True else: return False<commit_msg>Add method for formatting client info (address, port, id)<commit_after># Copyright (c) 2015 Nicolas JOUANIN # # See the file license.txt for copying permission. def not_in_dict_or_none(dict, key): """ Check if a key exists in a map and if it's not None :param dict: map to look for key :param key: key to find :return: true if key is in dict and not None """ if key not in dict or dict[key] is None: return True else: return False def format_client_message(session=None, address=None, port=None, id=None): if session: return "(client @=%s:%d id=%s)" % (session.remote_address, session.remote_port, session.client_id) else: return "(client @=%s:%d id=%s)" % (address, port, id)
1307d737a73122d948fd106ca39274b7cf505f89
Lib/test/test_threading.py
Lib/test/test_threading.py
# Very rudimentary test of threading module # Create a bunch of threads, let each do some work, wait until all are done from test_support import verbose import random import threading import time numtasks = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() running = 0 class TestThread(threading.Thread): def run(self): global running delay = random.random() * numtasks if verbose: print 'task', self.getName(), 'will run for', round(delay, 1), 'sec' sema.acquire() mutex.acquire() running = running + 1 if verbose: print running, 'tasks are running' mutex.release() time.sleep(delay) if verbose: print 'task', self.getName(), 'done' mutex.acquire() running = running - 1 if verbose: print self.getName(), 'is finished.', running, 'tasks are running' mutex.release() sema.release() threads = [] def starttasks(): for i in range(numtasks): t = TestThread(name="<thread %d>"%i) threads.append(t) t.start() starttasks() print 'waiting for all tasks to complete' for t in threads: t.join() print 'all tasks done'
# Very rudimentary test of threading module # Create a bunch of threads, let each do some work, wait until all are done from test_support import verbose import random import threading import time # This takes about n/3 seconds to run (about n/3 clumps of tasks, times # about 1 second per clump). numtasks = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() running = 0 class TestThread(threading.Thread): def run(self): global running delay = random.random() * 2 if verbose: print 'task', self.getName(), 'will run for', delay, 'sec' sema.acquire() mutex.acquire() running = running + 1 if verbose: print running, 'tasks are running' mutex.release() time.sleep(delay) if verbose: print 'task', self.getName(), 'done' mutex.acquire() running = running - 1 if verbose: print self.getName(), 'is finished.', running, 'tasks are running' mutex.release() sema.release() threads = [] def starttasks(): for i in range(numtasks): t = TestThread(name="<thread %d>"%i) threads.append(t) t.start() starttasks() if verbose: print 'waiting for all tasks to complete' for t in threads: t.join() if verbose: print 'all tasks done'
Test failed because these was no expected-output file, but always printed to stdout. Repaired by not printing at all except in verbose mode.
Test failed because these was no expected-output file, but always printed to stdout. Repaired by not printing at all except in verbose mode. Made the test about 6x faster -- envelope analysis showed it took time proportional to the square of the # of tasks. Now it's linear.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
# Very rudimentary test of threading module # Create a bunch of threads, let each do some work, wait until all are done from test_support import verbose import random import threading import time numtasks = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() running = 0 class TestThread(threading.Thread): def run(self): global running delay = random.random() * numtasks if verbose: print 'task', self.getName(), 'will run for', round(delay, 1), 'sec' sema.acquire() mutex.acquire() running = running + 1 if verbose: print running, 'tasks are running' mutex.release() time.sleep(delay) if verbose: print 'task', self.getName(), 'done' mutex.acquire() running = running - 1 if verbose: print self.getName(), 'is finished.', running, 'tasks are running' mutex.release() sema.release() threads = [] def starttasks(): for i in range(numtasks): t = TestThread(name="<thread %d>"%i) threads.append(t) t.start() starttasks() print 'waiting for all tasks to complete' for t in threads: t.join() print 'all tasks done' Test failed because these was no expected-output file, but always printed to stdout. Repaired by not printing at all except in verbose mode. Made the test about 6x faster -- envelope analysis showed it took time proportional to the square of the # of tasks. Now it's linear.
# Very rudimentary test of threading module # Create a bunch of threads, let each do some work, wait until all are done from test_support import verbose import random import threading import time # This takes about n/3 seconds to run (about n/3 clumps of tasks, times # about 1 second per clump). numtasks = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() running = 0 class TestThread(threading.Thread): def run(self): global running delay = random.random() * 2 if verbose: print 'task', self.getName(), 'will run for', delay, 'sec' sema.acquire() mutex.acquire() running = running + 1 if verbose: print running, 'tasks are running' mutex.release() time.sleep(delay) if verbose: print 'task', self.getName(), 'done' mutex.acquire() running = running - 1 if verbose: print self.getName(), 'is finished.', running, 'tasks are running' mutex.release() sema.release() threads = [] def starttasks(): for i in range(numtasks): t = TestThread(name="<thread %d>"%i) threads.append(t) t.start() starttasks() if verbose: print 'waiting for all tasks to complete' for t in threads: t.join() if verbose: print 'all tasks done'
<commit_before># Very rudimentary test of threading module # Create a bunch of threads, let each do some work, wait until all are done from test_support import verbose import random import threading import time numtasks = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() running = 0 class TestThread(threading.Thread): def run(self): global running delay = random.random() * numtasks if verbose: print 'task', self.getName(), 'will run for', round(delay, 1), 'sec' sema.acquire() mutex.acquire() running = running + 1 if verbose: print running, 'tasks are running' mutex.release() time.sleep(delay) if verbose: print 'task', self.getName(), 'done' mutex.acquire() running = running - 1 if verbose: print self.getName(), 'is finished.', running, 'tasks are running' mutex.release() sema.release() threads = [] def starttasks(): for i in range(numtasks): t = TestThread(name="<thread %d>"%i) threads.append(t) t.start() starttasks() print 'waiting for all tasks to complete' for t in threads: t.join() print 'all tasks done' <commit_msg>Test failed because these was no expected-output file, but always printed to stdout. Repaired by not printing at all except in verbose mode. Made the test about 6x faster -- envelope analysis showed it took time proportional to the square of the # of tasks. Now it's linear.<commit_after>
# Very rudimentary test of threading module # Create a bunch of threads, let each do some work, wait until all are done from test_support import verbose import random import threading import time # This takes about n/3 seconds to run (about n/3 clumps of tasks, times # about 1 second per clump). numtasks = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() running = 0 class TestThread(threading.Thread): def run(self): global running delay = random.random() * 2 if verbose: print 'task', self.getName(), 'will run for', delay, 'sec' sema.acquire() mutex.acquire() running = running + 1 if verbose: print running, 'tasks are running' mutex.release() time.sleep(delay) if verbose: print 'task', self.getName(), 'done' mutex.acquire() running = running - 1 if verbose: print self.getName(), 'is finished.', running, 'tasks are running' mutex.release() sema.release() threads = [] def starttasks(): for i in range(numtasks): t = TestThread(name="<thread %d>"%i) threads.append(t) t.start() starttasks() if verbose: print 'waiting for all tasks to complete' for t in threads: t.join() if verbose: print 'all tasks done'
# Very rudimentary test of threading module # Create a bunch of threads, let each do some work, wait until all are done from test_support import verbose import random import threading import time numtasks = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() running = 0 class TestThread(threading.Thread): def run(self): global running delay = random.random() * numtasks if verbose: print 'task', self.getName(), 'will run for', round(delay, 1), 'sec' sema.acquire() mutex.acquire() running = running + 1 if verbose: print running, 'tasks are running' mutex.release() time.sleep(delay) if verbose: print 'task', self.getName(), 'done' mutex.acquire() running = running - 1 if verbose: print self.getName(), 'is finished.', running, 'tasks are running' mutex.release() sema.release() threads = [] def starttasks(): for i in range(numtasks): t = TestThread(name="<thread %d>"%i) threads.append(t) t.start() starttasks() print 'waiting for all tasks to complete' for t in threads: t.join() print 'all tasks done' Test failed because these was no expected-output file, but always printed to stdout. Repaired by not printing at all except in verbose mode. Made the test about 6x faster -- envelope analysis showed it took time proportional to the square of the # of tasks. Now it's linear.# Very rudimentary test of threading module # Create a bunch of threads, let each do some work, wait until all are done from test_support import verbose import random import threading import time # This takes about n/3 seconds to run (about n/3 clumps of tasks, times # about 1 second per clump). numtasks = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() running = 0 class TestThread(threading.Thread): def run(self): global running delay = random.random() * 2 if verbose: print 'task', self.getName(), 'will run for', delay, 'sec' sema.acquire() mutex.acquire() running = running + 1 if verbose: print running, 'tasks are running' mutex.release() time.sleep(delay) if verbose: print 'task', self.getName(), 'done' mutex.acquire() running = running - 1 if verbose: print self.getName(), 'is finished.', running, 'tasks are running' mutex.release() sema.release() threads = [] def starttasks(): for i in range(numtasks): t = TestThread(name="<thread %d>"%i) threads.append(t) t.start() starttasks() if verbose: print 'waiting for all tasks to complete' for t in threads: t.join() if verbose: print 'all tasks done'
<commit_before># Very rudimentary test of threading module # Create a bunch of threads, let each do some work, wait until all are done from test_support import verbose import random import threading import time numtasks = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() running = 0 class TestThread(threading.Thread): def run(self): global running delay = random.random() * numtasks if verbose: print 'task', self.getName(), 'will run for', round(delay, 1), 'sec' sema.acquire() mutex.acquire() running = running + 1 if verbose: print running, 'tasks are running' mutex.release() time.sleep(delay) if verbose: print 'task', self.getName(), 'done' mutex.acquire() running = running - 1 if verbose: print self.getName(), 'is finished.', running, 'tasks are running' mutex.release() sema.release() threads = [] def starttasks(): for i in range(numtasks): t = TestThread(name="<thread %d>"%i) threads.append(t) t.start() starttasks() print 'waiting for all tasks to complete' for t in threads: t.join() print 'all tasks done' <commit_msg>Test failed because these was no expected-output file, but always printed to stdout. Repaired by not printing at all except in verbose mode. Made the test about 6x faster -- envelope analysis showed it took time proportional to the square of the # of tasks. Now it's linear.<commit_after># Very rudimentary test of threading module # Create a bunch of threads, let each do some work, wait until all are done from test_support import verbose import random import threading import time # This takes about n/3 seconds to run (about n/3 clumps of tasks, times # about 1 second per clump). numtasks = 10 # no more than 3 of the 10 can run at once sema = threading.BoundedSemaphore(value=3) mutex = threading.RLock() running = 0 class TestThread(threading.Thread): def run(self): global running delay = random.random() * 2 if verbose: print 'task', self.getName(), 'will run for', delay, 'sec' sema.acquire() mutex.acquire() running = running + 1 if verbose: print running, 'tasks are running' mutex.release() time.sleep(delay) if verbose: print 'task', self.getName(), 'done' mutex.acquire() running = running - 1 if verbose: print self.getName(), 'is finished.', running, 'tasks are running' mutex.release() sema.release() threads = [] def starttasks(): for i in range(numtasks): t = TestThread(name="<thread %d>"%i) threads.append(t) t.start() starttasks() if verbose: print 'waiting for all tasks to complete' for t in threads: t.join() if verbose: print 'all tasks done'