commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
f8eb5325b03f09a0b207680c29ba4a8cff89bec8
v2functions/sbqueue-trigger-sbqueue-out-binding/__init__.py
v2functions/sbqueue-trigger-sbqueue-out-binding/__init__.py
import logging import azure.functions as func def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): body = msgIn.get_body().decode('utf-8') logging.info(f'Processed Service Bus Queue message: {body}') msgOut.set(msgbody)
import logging import azure.functions as func def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): body = msgIn.get_body().decode('utf-8') logging.info(f'Processed Service Bus Queue message: {body}') msgOut.set(body)
Fix var name in service bus function
Fix var name in service bus function ...i am a horrible programmer
Python
mit
yokawasa/azure-functions-python-samples,yokawasa/azure-functions-python-samples,yokawasa/azure-functions-python-samples
import logging import azure.functions as func def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): body = msgIn.get_body().decode('utf-8') logging.info(f'Processed Service Bus Queue message: {body}') msgOut.set(msgbody) Fix var name in service bus function ...i am a horrible programmer
import logging import azure.functions as func def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): body = msgIn.get_body().decode('utf-8') logging.info(f'Processed Service Bus Queue message: {body}') msgOut.set(body)
<commit_before>import logging import azure.functions as func def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): body = msgIn.get_body().decode('utf-8') logging.info(f'Processed Service Bus Queue message: {body}') msgOut.set(msgbody) <commit_msg>Fix var name in service bus function ...i am a horrible programmer<commit_after>
import logging import azure.functions as func def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): body = msgIn.get_body().decode('utf-8') logging.info(f'Processed Service Bus Queue message: {body}') msgOut.set(body)
import logging import azure.functions as func def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): body = msgIn.get_body().decode('utf-8') logging.info(f'Processed Service Bus Queue message: {body}') msgOut.set(msgbody) Fix var name in service bus function ...i am a horrible programmerimport logging import azure.functions as func def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): body = msgIn.get_body().decode('utf-8') logging.info(f'Processed Service Bus Queue message: {body}') msgOut.set(body)
<commit_before>import logging import azure.functions as func def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): body = msgIn.get_body().decode('utf-8') logging.info(f'Processed Service Bus Queue message: {body}') msgOut.set(msgbody) <commit_msg>Fix var name in service bus function ...i am a horrible programmer<commit_after>import logging import azure.functions as func def main(msgIn: func.ServiceBusMessage, msgOut: func.Out[str]): body = msgIn.get_body().decode('utf-8') logging.info(f'Processed Service Bus Queue message: {body}') msgOut.set(body)
dc18e64cd4ecaf624f62438a307cebe14bfbbad8
slack/views.py
slack/views.py
from flask import Flask, request import requests from urllib import urlencode app = Flask(__name__) @app.route("/", methods=['POST']) def meme(): form = request.form.to_dict() slackbot = form["slackbot"] text = form["text"] channel = form["channel_name"] text = text[:-1] if text[-1] == ";" else text params = text.split(";") params = [x.strip().replace(" ", "-") for x in params] params = [urlencode(x) for x in params] if not len(params) == 3: response = "Your syntax should be in the form: /meme template; top; bottom;" else: template = params[0] top = params[1] bottom = params[2] response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom) url = "https://neo4j.slack.com/services/hooks/slackbot?token={0}&channel=%23{1}".format(slackbot, channel) requests.post(url, data=response)
from flask import Flask, request import requests from urllib import urlencode app = Flask(__name__) @app.route("/") def meme(): slackbot = request.args["slackbot"] text = request.args["text"] channel = request.args["channel_name"] text = text[:-1] if text[-1] == ";" else text params = text.split(";") params = [x.strip().replace(" ", "-") for x in params] params = [urlencode(x) for x in params] if not len(params) == 3: response = "Your syntax should be in the form: /meme template; top; bottom;" else: template = params[0] top = params[1] bottom = params[2] response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom) url = "https://neo4j.slack.com/services/hooks/slackbot?token={0}&channel=%23{1}".format(slackbot, channel) requests.post(url, data=response)
Use a GET request instead
Use a GET request instead
Python
mit
joeynebula/slack-meme,tezzutezzu/slack-meme,DuaneGarber/slack-meme,nicolewhite/slack-meme
from flask import Flask, request import requests from urllib import urlencode app = Flask(__name__) @app.route("/", methods=['POST']) def meme(): form = request.form.to_dict() slackbot = form["slackbot"] text = form["text"] channel = form["channel_name"] text = text[:-1] if text[-1] == ";" else text params = text.split(";") params = [x.strip().replace(" ", "-") for x in params] params = [urlencode(x) for x in params] if not len(params) == 3: response = "Your syntax should be in the form: /meme template; top; bottom;" else: template = params[0] top = params[1] bottom = params[2] response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom) url = "https://neo4j.slack.com/services/hooks/slackbot?token={0}&channel=%23{1}".format(slackbot, channel) requests.post(url, data=response)Use a GET request instead
from flask import Flask, request import requests from urllib import urlencode app = Flask(__name__) @app.route("/") def meme(): slackbot = request.args["slackbot"] text = request.args["text"] channel = request.args["channel_name"] text = text[:-1] if text[-1] == ";" else text params = text.split(";") params = [x.strip().replace(" ", "-") for x in params] params = [urlencode(x) for x in params] if not len(params) == 3: response = "Your syntax should be in the form: /meme template; top; bottom;" else: template = params[0] top = params[1] bottom = params[2] response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom) url = "https://neo4j.slack.com/services/hooks/slackbot?token={0}&channel=%23{1}".format(slackbot, channel) requests.post(url, data=response)
<commit_before>from flask import Flask, request import requests from urllib import urlencode app = Flask(__name__) @app.route("/", methods=['POST']) def meme(): form = request.form.to_dict() slackbot = form["slackbot"] text = form["text"] channel = form["channel_name"] text = text[:-1] if text[-1] == ";" else text params = text.split(";") params = [x.strip().replace(" ", "-") for x in params] params = [urlencode(x) for x in params] if not len(params) == 3: response = "Your syntax should be in the form: /meme template; top; bottom;" else: template = params[0] top = params[1] bottom = params[2] response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom) url = "https://neo4j.slack.com/services/hooks/slackbot?token={0}&channel=%23{1}".format(slackbot, channel) requests.post(url, data=response)<commit_msg>Use a GET request instead<commit_after>
from flask import Flask, request import requests from urllib import urlencode app = Flask(__name__) @app.route("/") def meme(): slackbot = request.args["slackbot"] text = request.args["text"] channel = request.args["channel_name"] text = text[:-1] if text[-1] == ";" else text params = text.split(";") params = [x.strip().replace(" ", "-") for x in params] params = [urlencode(x) for x in params] if not len(params) == 3: response = "Your syntax should be in the form: /meme template; top; bottom;" else: template = params[0] top = params[1] bottom = params[2] response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom) url = "https://neo4j.slack.com/services/hooks/slackbot?token={0}&channel=%23{1}".format(slackbot, channel) requests.post(url, data=response)
from flask import Flask, request import requests from urllib import urlencode app = Flask(__name__) @app.route("/", methods=['POST']) def meme(): form = request.form.to_dict() slackbot = form["slackbot"] text = form["text"] channel = form["channel_name"] text = text[:-1] if text[-1] == ";" else text params = text.split(";") params = [x.strip().replace(" ", "-") for x in params] params = [urlencode(x) for x in params] if not len(params) == 3: response = "Your syntax should be in the form: /meme template; top; bottom;" else: template = params[0] top = params[1] bottom = params[2] response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom) url = "https://neo4j.slack.com/services/hooks/slackbot?token={0}&channel=%23{1}".format(slackbot, channel) requests.post(url, data=response)Use a GET request insteadfrom flask import Flask, request import requests from urllib import urlencode app = Flask(__name__) @app.route("/") def meme(): slackbot = request.args["slackbot"] text = request.args["text"] channel = request.args["channel_name"] text = text[:-1] if text[-1] == ";" else text params = text.split(";") params = [x.strip().replace(" ", "-") for x in params] params = [urlencode(x) for x in params] if not len(params) == 3: response = "Your syntax should be in the form: /meme template; top; bottom;" else: template = params[0] top = params[1] bottom = params[2] response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom) url = "https://neo4j.slack.com/services/hooks/slackbot?token={0}&channel=%23{1}".format(slackbot, channel) requests.post(url, data=response)
<commit_before>from flask import Flask, request import requests from urllib import urlencode app = Flask(__name__) @app.route("/", methods=['POST']) def meme(): form = request.form.to_dict() slackbot = form["slackbot"] text = form["text"] channel = form["channel_name"] text = text[:-1] if text[-1] == ";" else text params = text.split(";") params = [x.strip().replace(" ", "-") for x in params] params = [urlencode(x) for x in params] if not len(params) == 3: response = "Your syntax should be in the form: /meme template; top; bottom;" else: template = params[0] top = params[1] bottom = params[2] response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom) url = "https://neo4j.slack.com/services/hooks/slackbot?token={0}&channel=%23{1}".format(slackbot, channel) requests.post(url, data=response)<commit_msg>Use a GET request instead<commit_after>from flask import Flask, request import requests from urllib import urlencode app = Flask(__name__) @app.route("/") def meme(): slackbot = request.args["slackbot"] text = request.args["text"] channel = request.args["channel_name"] text = text[:-1] if text[-1] == ";" else text params = text.split(";") params = [x.strip().replace(" ", "-") for x in params] params = [urlencode(x) for x in params] if not len(params) == 3: response = "Your syntax should be in the form: /meme template; top; bottom;" else: template = params[0] top = params[1] bottom = params[2] response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom) url = "https://neo4j.slack.com/services/hooks/slackbot?token={0}&channel=%23{1}".format(slackbot, channel) requests.post(url, data=response)
acaefa673edbbaa89dd51444a90e5c61bd952cc3
Demo/scripts/mpzpi.py
Demo/scripts/mpzpi.py
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys def main(): k, a, b, a1, b1 = 2, 4, 1, 12, 4 while 1: # Next approximation p, q, k = k*k, 2*k+1, k+1 a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a/b, a1/b1 while d == d1: output(d) a, a1 = 10*(a%b), 10*(a1%b1) d, d1 = a/b, a1/b1 def output(d): # Use write() to avoid spaces between the digits # Use int(d) to avoid a trailing L after each digit sys.stdout.write(`int(d)`) # Flush so the output is seen immediately sys.stdout.flush() main()
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys from mpz import mpz def main(): mpzone, mpztwo, mpzten = mpz(1), mpz(2), mpz(10) k, a, b, a1, b1 = mpz(2), mpz(4), mpz(1), mpz(12), mpz(4) while 1: # Next approximation p, q, k = k*k, mpztwo*k+mpzone, k+mpzone a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a/b, a1/b1 while d == d1: output(d) a, a1 = mpzten*(a%b), mpzten*(a1%b1) d, d1 = a/b, a1/b1 def output(d): # Use write() to avoid spaces between the digits # Use int(d) to avoid a trailing L after each digit sys.stdout.write(`int(d)`) # Flush so the output is seen immediately sys.stdout.flush() main()
Revert previous change which didn't make sense the next day :-)
Revert previous change which didn't make sense the next day :-)
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys def main(): k, a, b, a1, b1 = 2, 4, 1, 12, 4 while 1: # Next approximation p, q, k = k*k, 2*k+1, k+1 a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a/b, a1/b1 while d == d1: output(d) a, a1 = 10*(a%b), 10*(a1%b1) d, d1 = a/b, a1/b1 def output(d): # Use write() to avoid spaces between the digits # Use int(d) to avoid a trailing L after each digit sys.stdout.write(`int(d)`) # Flush so the output is seen immediately sys.stdout.flush() main() Revert previous change which didn't make sense the next day :-)
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys from mpz import mpz def main(): mpzone, mpztwo, mpzten = mpz(1), mpz(2), mpz(10) k, a, b, a1, b1 = mpz(2), mpz(4), mpz(1), mpz(12), mpz(4) while 1: # Next approximation p, q, k = k*k, mpztwo*k+mpzone, k+mpzone a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a/b, a1/b1 while d == d1: output(d) a, a1 = mpzten*(a%b), mpzten*(a1%b1) d, d1 = a/b, a1/b1 def output(d): # Use write() to avoid spaces between the digits # Use int(d) to avoid a trailing L after each digit sys.stdout.write(`int(d)`) # Flush so the output is seen immediately sys.stdout.flush() main()
<commit_before>#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys def main(): k, a, b, a1, b1 = 2, 4, 1, 12, 4 while 1: # Next approximation p, q, k = k*k, 2*k+1, k+1 a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a/b, a1/b1 while d == d1: output(d) a, a1 = 10*(a%b), 10*(a1%b1) d, d1 = a/b, a1/b1 def output(d): # Use write() to avoid spaces between the digits # Use int(d) to avoid a trailing L after each digit sys.stdout.write(`int(d)`) # Flush so the output is seen immediately sys.stdout.flush() main() <commit_msg>Revert previous change which didn't make sense the next day :-)<commit_after>
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys from mpz import mpz def main(): mpzone, mpztwo, mpzten = mpz(1), mpz(2), mpz(10) k, a, b, a1, b1 = mpz(2), mpz(4), mpz(1), mpz(12), mpz(4) while 1: # Next approximation p, q, k = k*k, mpztwo*k+mpzone, k+mpzone a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a/b, a1/b1 while d == d1: output(d) a, a1 = mpzten*(a%b), mpzten*(a1%b1) d, d1 = a/b, a1/b1 def output(d): # Use write() to avoid spaces between the digits # Use int(d) to avoid a trailing L after each digit sys.stdout.write(`int(d)`) # Flush so the output is seen immediately sys.stdout.flush() main()
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys def main(): k, a, b, a1, b1 = 2, 4, 1, 12, 4 while 1: # Next approximation p, q, k = k*k, 2*k+1, k+1 a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a/b, a1/b1 while d == d1: output(d) a, a1 = 10*(a%b), 10*(a1%b1) d, d1 = a/b, a1/b1 def output(d): # Use write() to avoid spaces between the digits # Use int(d) to avoid a trailing L after each digit sys.stdout.write(`int(d)`) # Flush so the output is seen immediately sys.stdout.flush() main() Revert previous change which didn't make sense the next day :-)#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys from mpz import mpz def main(): mpzone, mpztwo, mpzten = mpz(1), mpz(2), mpz(10) k, a, b, a1, b1 = mpz(2), mpz(4), mpz(1), mpz(12), mpz(4) while 1: # Next approximation p, q, k = k*k, mpztwo*k+mpzone, k+mpzone a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a/b, a1/b1 while d == d1: output(d) a, a1 = mpzten*(a%b), mpzten*(a1%b1) d, d1 = a/b, a1/b1 def output(d): # Use write() to avoid spaces between the digits # Use int(d) to avoid a trailing L after each digit sys.stdout.write(`int(d)`) # Flush so the output is seen immediately sys.stdout.flush() main()
<commit_before>#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys def main(): k, a, b, a1, b1 = 2, 4, 1, 12, 4 while 1: # Next approximation p, q, k = k*k, 2*k+1, k+1 a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a/b, a1/b1 while d == d1: output(d) a, a1 = 10*(a%b), 10*(a1%b1) d, d1 = a/b, a1/b1 def output(d): # Use write() to avoid spaces between the digits # Use int(d) to avoid a trailing L after each digit sys.stdout.write(`int(d)`) # Flush so the output is seen immediately sys.stdout.flush() main() <commit_msg>Revert previous change which didn't make sense the next day :-)<commit_after>#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys from mpz import mpz def main(): mpzone, mpztwo, mpzten = mpz(1), mpz(2), mpz(10) k, a, b, a1, b1 = mpz(2), mpz(4), mpz(1), mpz(12), mpz(4) while 1: # Next approximation p, q, k = k*k, mpztwo*k+mpzone, k+mpzone a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a/b, a1/b1 while d == d1: output(d) a, a1 = mpzten*(a%b), mpzten*(a1%b1) d, d1 = a/b, a1/b1 def output(d): # Use write() to avoid spaces between the digits # Use int(d) to avoid a trailing L after each digit sys.stdout.write(`int(d)`) # Flush so the output is seen immediately sys.stdout.flush() main()
e24674011454ce60bf1c4582af25262ae277771c
spreadchimp.py
spreadchimp.py
import os import xlrd import xlwt # Assumes the directory with the workbook is relative to the script's location. directory = 'workbooks/' workbook = '' for dirpath, dirnames, filenames in os.walk(directory): for files in filenames: workbook = (dirpath + files) ''' Test films include: Repulsion The Crying Game Saint Joan My LIfe as a Zucchini ''' # Collects the names of all the films for which individual workbooks need to be # created. films = [] wbs = int(input('Number of workbooks to create: ')) for wb in range(0, wbs): film = input('Name the film: ') films.append(film) def create_workbook(): '''Creates a workbook. Arguments: none Returns: book = The workbook that was opened. ''' book = xlwt.Workbook() sheet = book.add_sheet('Contacts') return book def save_workbook(book, film): '''Saves the workbook created in create_workbook(). Arguments: book = The object returned by calling create_workbook(). film = The name of the film that will part of the workbook's name. Returns: none ''' book.save('{0} contacts'.format(film)) for film in films: save_workbook(create_workbook(), film)
import os import xlrd import xlwt # Assumes the directory with the workbook is relative to the script's location. directory = 'workbooks/' workbook = '' for dirpath, dirnames, filenames in os.walk(directory): for files in filenames: workbook = (dirpath + files) ''' Test films include: Repulsion The Crying Game Saint Joan My LIfe as a Zucchini ''' # Collects the names of all the films for which individual workbooks need to be # created. films = [] wbs = int(input('Number of workbooks to create: ')) for wb in range(0, wbs): film = input('Name the film: ') films.append(film) def create_workbook(): '''Creates a workbook. Arguments: none Returns: book = The workbook that was opened. ''' headers = ['First Name', 'Last Name', 'Email', 'Phone', 'Full address'] book = xlwt.Workbook() sheet = book.add_sheet('Contacts') column_number = 0 for header in headers: sheet.write(0, column_number, header) column_number += 1 return book def save_workbook(book, film): '''Saves the workbook created in create_workbook(). Arguments: book = The object returned by calling create_workbook(). film = The name of the film that will part of the workbook's name. Returns: none ''' book.save('{0} contacts.xls'.format(film)) for film in films: save_workbook(create_workbook(), film)
Add header row to worksheet in workbooks
Add header row to worksheet in workbooks
Python
mit
deadlyraptor/reels
import os import xlrd import xlwt # Assumes the directory with the workbook is relative to the script's location. directory = 'workbooks/' workbook = '' for dirpath, dirnames, filenames in os.walk(directory): for files in filenames: workbook = (dirpath + files) ''' Test films include: Repulsion The Crying Game Saint Joan My LIfe as a Zucchini ''' # Collects the names of all the films for which individual workbooks need to be # created. films = [] wbs = int(input('Number of workbooks to create: ')) for wb in range(0, wbs): film = input('Name the film: ') films.append(film) def create_workbook(): '''Creates a workbook. Arguments: none Returns: book = The workbook that was opened. ''' book = xlwt.Workbook() sheet = book.add_sheet('Contacts') return book def save_workbook(book, film): '''Saves the workbook created in create_workbook(). Arguments: book = The object returned by calling create_workbook(). film = The name of the film that will part of the workbook's name. Returns: none ''' book.save('{0} contacts'.format(film)) for film in films: save_workbook(create_workbook(), film) Add header row to worksheet in workbooks
import os import xlrd import xlwt # Assumes the directory with the workbook is relative to the script's location. directory = 'workbooks/' workbook = '' for dirpath, dirnames, filenames in os.walk(directory): for files in filenames: workbook = (dirpath + files) ''' Test films include: Repulsion The Crying Game Saint Joan My LIfe as a Zucchini ''' # Collects the names of all the films for which individual workbooks need to be # created. films = [] wbs = int(input('Number of workbooks to create: ')) for wb in range(0, wbs): film = input('Name the film: ') films.append(film) def create_workbook(): '''Creates a workbook. Arguments: none Returns: book = The workbook that was opened. ''' headers = ['First Name', 'Last Name', 'Email', 'Phone', 'Full address'] book = xlwt.Workbook() sheet = book.add_sheet('Contacts') column_number = 0 for header in headers: sheet.write(0, column_number, header) column_number += 1 return book def save_workbook(book, film): '''Saves the workbook created in create_workbook(). Arguments: book = The object returned by calling create_workbook(). film = The name of the film that will part of the workbook's name. Returns: none ''' book.save('{0} contacts.xls'.format(film)) for film in films: save_workbook(create_workbook(), film)
<commit_before>import os import xlrd import xlwt # Assumes the directory with the workbook is relative to the script's location. directory = 'workbooks/' workbook = '' for dirpath, dirnames, filenames in os.walk(directory): for files in filenames: workbook = (dirpath + files) ''' Test films include: Repulsion The Crying Game Saint Joan My LIfe as a Zucchini ''' # Collects the names of all the films for which individual workbooks need to be # created. films = [] wbs = int(input('Number of workbooks to create: ')) for wb in range(0, wbs): film = input('Name the film: ') films.append(film) def create_workbook(): '''Creates a workbook. Arguments: none Returns: book = The workbook that was opened. ''' book = xlwt.Workbook() sheet = book.add_sheet('Contacts') return book def save_workbook(book, film): '''Saves the workbook created in create_workbook(). Arguments: book = The object returned by calling create_workbook(). film = The name of the film that will part of the workbook's name. Returns: none ''' book.save('{0} contacts'.format(film)) for film in films: save_workbook(create_workbook(), film) <commit_msg>Add header row to worksheet in workbooks<commit_after>
import os import xlrd import xlwt # Assumes the directory with the workbook is relative to the script's location. directory = 'workbooks/' workbook = '' for dirpath, dirnames, filenames in os.walk(directory): for files in filenames: workbook = (dirpath + files) ''' Test films include: Repulsion The Crying Game Saint Joan My LIfe as a Zucchini ''' # Collects the names of all the films for which individual workbooks need to be # created. films = [] wbs = int(input('Number of workbooks to create: ')) for wb in range(0, wbs): film = input('Name the film: ') films.append(film) def create_workbook(): '''Creates a workbook. Arguments: none Returns: book = The workbook that was opened. ''' headers = ['First Name', 'Last Name', 'Email', 'Phone', 'Full address'] book = xlwt.Workbook() sheet = book.add_sheet('Contacts') column_number = 0 for header in headers: sheet.write(0, column_number, header) column_number += 1 return book def save_workbook(book, film): '''Saves the workbook created in create_workbook(). Arguments: book = The object returned by calling create_workbook(). film = The name of the film that will part of the workbook's name. Returns: none ''' book.save('{0} contacts.xls'.format(film)) for film in films: save_workbook(create_workbook(), film)
import os import xlrd import xlwt # Assumes the directory with the workbook is relative to the script's location. directory = 'workbooks/' workbook = '' for dirpath, dirnames, filenames in os.walk(directory): for files in filenames: workbook = (dirpath + files) ''' Test films include: Repulsion The Crying Game Saint Joan My LIfe as a Zucchini ''' # Collects the names of all the films for which individual workbooks need to be # created. films = [] wbs = int(input('Number of workbooks to create: ')) for wb in range(0, wbs): film = input('Name the film: ') films.append(film) def create_workbook(): '''Creates a workbook. Arguments: none Returns: book = The workbook that was opened. ''' book = xlwt.Workbook() sheet = book.add_sheet('Contacts') return book def save_workbook(book, film): '''Saves the workbook created in create_workbook(). Arguments: book = The object returned by calling create_workbook(). film = The name of the film that will part of the workbook's name. Returns: none ''' book.save('{0} contacts'.format(film)) for film in films: save_workbook(create_workbook(), film) Add header row to worksheet in workbooksimport os import xlrd import xlwt # Assumes the directory with the workbook is relative to the script's location. directory = 'workbooks/' workbook = '' for dirpath, dirnames, filenames in os.walk(directory): for files in filenames: workbook = (dirpath + files) ''' Test films include: Repulsion The Crying Game Saint Joan My LIfe as a Zucchini ''' # Collects the names of all the films for which individual workbooks need to be # created. films = [] wbs = int(input('Number of workbooks to create: ')) for wb in range(0, wbs): film = input('Name the film: ') films.append(film) def create_workbook(): '''Creates a workbook. Arguments: none Returns: book = The workbook that was opened. ''' headers = ['First Name', 'Last Name', 'Email', 'Phone', 'Full address'] book = xlwt.Workbook() sheet = book.add_sheet('Contacts') column_number = 0 for header in headers: sheet.write(0, column_number, header) column_number += 1 return book def save_workbook(book, film): '''Saves the workbook created in create_workbook(). Arguments: book = The object returned by calling create_workbook(). film = The name of the film that will part of the workbook's name. Returns: none ''' book.save('{0} contacts.xls'.format(film)) for film in films: save_workbook(create_workbook(), film)
<commit_before>import os import xlrd import xlwt # Assumes the directory with the workbook is relative to the script's location. directory = 'workbooks/' workbook = '' for dirpath, dirnames, filenames in os.walk(directory): for files in filenames: workbook = (dirpath + files) ''' Test films include: Repulsion The Crying Game Saint Joan My LIfe as a Zucchini ''' # Collects the names of all the films for which individual workbooks need to be # created. films = [] wbs = int(input('Number of workbooks to create: ')) for wb in range(0, wbs): film = input('Name the film: ') films.append(film) def create_workbook(): '''Creates a workbook. Arguments: none Returns: book = The workbook that was opened. ''' book = xlwt.Workbook() sheet = book.add_sheet('Contacts') return book def save_workbook(book, film): '''Saves the workbook created in create_workbook(). Arguments: book = The object returned by calling create_workbook(). film = The name of the film that will part of the workbook's name. Returns: none ''' book.save('{0} contacts'.format(film)) for film in films: save_workbook(create_workbook(), film) <commit_msg>Add header row to worksheet in workbooks<commit_after>import os import xlrd import xlwt # Assumes the directory with the workbook is relative to the script's location. directory = 'workbooks/' workbook = '' for dirpath, dirnames, filenames in os.walk(directory): for files in filenames: workbook = (dirpath + files) ''' Test films include: Repulsion The Crying Game Saint Joan My LIfe as a Zucchini ''' # Collects the names of all the films for which individual workbooks need to be # created. films = [] wbs = int(input('Number of workbooks to create: ')) for wb in range(0, wbs): film = input('Name the film: ') films.append(film) def create_workbook(): '''Creates a workbook. Arguments: none Returns: book = The workbook that was opened. ''' headers = ['First Name', 'Last Name', 'Email', 'Phone', 'Full address'] book = xlwt.Workbook() sheet = book.add_sheet('Contacts') column_number = 0 for header in headers: sheet.write(0, column_number, header) column_number += 1 return book def save_workbook(book, film): '''Saves the workbook created in create_workbook(). Arguments: book = The object returned by calling create_workbook(). film = The name of the film that will part of the workbook's name. Returns: none ''' book.save('{0} contacts.xls'.format(film)) for film in films: save_workbook(create_workbook(), film)
51a614025806756b33b0c9764fd91b3e2405570b
python/ql/test/experimental/query-tests/Security/CWE-943/mongoengine_good.py
python/ql/test/experimental/query-tests/Security/CWE-943/mongoengine_good.py
from flask import Flask, request import mongoengine as me from mongoengine.connection import get_db, connect from mongosanitizer.sanitizer import sanitize import json app = Flask(__name__) class Movie(me.Document): title = me.StringField(required=True) Movie(title='test').save() @app.route("/connect_find") def connect_find(): unsafe_search = request.args['search'] json_search = json.loads(unsafe_search) safe_search = sanitize(json_search) db = me.connect('mydb') return db.movie.find({'name': json_search}) # if __name__ == "__main__": # app.run(debug=True)
from flask import Flask, request import mongoengine as me from mongoengine.connection import get_db, connect from mongosanitizer.sanitizer import sanitize import json app = Flask(__name__) class Movie(me.Document): title = me.StringField(required=True) Movie(title='test').save() @app.route("/connect_find") def connect_find(): unsafe_search = request.args['search'] json_search = json.loads(unsafe_search) safe_search = sanitize(json_search) db = me.connect('mydb') return db.movie.find({'name': safe_search}) # if __name__ == "__main__": # app.run(debug=True)
Change variable name to correct sanitized input variable
Change variable name to correct sanitized input variable Co-authored-by: Rasmus Wriedt Larsen <6dfdada9c346ecb5eceda0aac2a0eed555506730@gmail.com>
Python
mit
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
from flask import Flask, request import mongoengine as me from mongoengine.connection import get_db, connect from mongosanitizer.sanitizer import sanitize import json app = Flask(__name__) class Movie(me.Document): title = me.StringField(required=True) Movie(title='test').save() @app.route("/connect_find") def connect_find(): unsafe_search = request.args['search'] json_search = json.loads(unsafe_search) safe_search = sanitize(json_search) db = me.connect('mydb') return db.movie.find({'name': json_search}) # if __name__ == "__main__": # app.run(debug=True) Change variable name to correct sanitized input variable Co-authored-by: Rasmus Wriedt Larsen <6dfdada9c346ecb5eceda0aac2a0eed555506730@gmail.com>
from flask import Flask, request import mongoengine as me from mongoengine.connection import get_db, connect from mongosanitizer.sanitizer import sanitize import json app = Flask(__name__) class Movie(me.Document): title = me.StringField(required=True) Movie(title='test').save() @app.route("/connect_find") def connect_find(): unsafe_search = request.args['search'] json_search = json.loads(unsafe_search) safe_search = sanitize(json_search) db = me.connect('mydb') return db.movie.find({'name': safe_search}) # if __name__ == "__main__": # app.run(debug=True)
<commit_before>from flask import Flask, request import mongoengine as me from mongoengine.connection import get_db, connect from mongosanitizer.sanitizer import sanitize import json app = Flask(__name__) class Movie(me.Document): title = me.StringField(required=True) Movie(title='test').save() @app.route("/connect_find") def connect_find(): unsafe_search = request.args['search'] json_search = json.loads(unsafe_search) safe_search = sanitize(json_search) db = me.connect('mydb') return db.movie.find({'name': json_search}) # if __name__ == "__main__": # app.run(debug=True) <commit_msg>Change variable name to correct sanitized input variable Co-authored-by: Rasmus Wriedt Larsen <6dfdada9c346ecb5eceda0aac2a0eed555506730@gmail.com><commit_after>
from flask import Flask, request import mongoengine as me from mongoengine.connection import get_db, connect from mongosanitizer.sanitizer import sanitize import json app = Flask(__name__) class Movie(me.Document): title = me.StringField(required=True) Movie(title='test').save() @app.route("/connect_find") def connect_find(): unsafe_search = request.args['search'] json_search = json.loads(unsafe_search) safe_search = sanitize(json_search) db = me.connect('mydb') return db.movie.find({'name': safe_search}) # if __name__ == "__main__": # app.run(debug=True)
from flask import Flask, request import mongoengine as me from mongoengine.connection import get_db, connect from mongosanitizer.sanitizer import sanitize import json app = Flask(__name__) class Movie(me.Document): title = me.StringField(required=True) Movie(title='test').save() @app.route("/connect_find") def connect_find(): unsafe_search = request.args['search'] json_search = json.loads(unsafe_search) safe_search = sanitize(json_search) db = me.connect('mydb') return db.movie.find({'name': json_search}) # if __name__ == "__main__": # app.run(debug=True) Change variable name to correct sanitized input variable Co-authored-by: Rasmus Wriedt Larsen <6dfdada9c346ecb5eceda0aac2a0eed555506730@gmail.com>from flask import Flask, request import mongoengine as me from mongoengine.connection import get_db, connect from mongosanitizer.sanitizer import sanitize import json app = Flask(__name__) class Movie(me.Document): title = me.StringField(required=True) Movie(title='test').save() @app.route("/connect_find") def connect_find(): unsafe_search = request.args['search'] json_search = json.loads(unsafe_search) safe_search = sanitize(json_search) db = me.connect('mydb') return db.movie.find({'name': safe_search}) # if __name__ == "__main__": # app.run(debug=True)
<commit_before>from flask import Flask, request import mongoengine as me from mongoengine.connection import get_db, connect from mongosanitizer.sanitizer import sanitize import json app = Flask(__name__) class Movie(me.Document): title = me.StringField(required=True) Movie(title='test').save() @app.route("/connect_find") def connect_find(): unsafe_search = request.args['search'] json_search = json.loads(unsafe_search) safe_search = sanitize(json_search) db = me.connect('mydb') return db.movie.find({'name': json_search}) # if __name__ == "__main__": # app.run(debug=True) <commit_msg>Change variable name to correct sanitized input variable Co-authored-by: Rasmus Wriedt Larsen <6dfdada9c346ecb5eceda0aac2a0eed555506730@gmail.com><commit_after>from flask import Flask, request import mongoengine as me from mongoengine.connection import get_db, connect from mongosanitizer.sanitizer import sanitize import json app = Flask(__name__) class Movie(me.Document): title = me.StringField(required=True) Movie(title='test').save() @app.route("/connect_find") def connect_find(): unsafe_search = request.args['search'] json_search = json.loads(unsafe_search) safe_search = sanitize(json_search) db = me.connect('mydb') return db.movie.find({'name': safe_search}) # if __name__ == "__main__": # app.run(debug=True)
0f1a3d06b316590c029e4e6c0e474f716e047033
pokebattle/game_entrypoint.py
pokebattle/game_entrypoint.py
from nameko.web.handlers import http from pokebattle.scores import ScoreService class GameService(object): score_service = RpcProxy('score_service') @http('POST', '/signup') def signup(self): pass @http('POST', '/login') def login(self): pass @http('POST', '/battle') def new_game(self): pass @http('GET', '/leaderboard') def leaderboard(self): pass @http('GET', '/user/<int:id>') def user(self): pass @http('GET', '/user/<int:id>/pokemons') def user_pokemons(self): pass
import json from nameko.web.handlers import http from nameko.rpc import RpcProxy from pokebattle.scores import ScoreService class GameService(object): name = 'game_service' score_rpc = RpcProxy('score_service') @http('POST', '/signup') def signup(self, request): pass @http('POST', '/login') def login(self, request): pass @http('POST', '/battle') def new_game(self, request): pass @http('GET', '/leaderboard') def leaderboard(self, request): return json.dumps(self.score_rpc.leaderboard()) @http('GET', '/user/<int:id>') def user(self, request): pass @http('GET', '/user/<int:id>/pokemons') def user_pokemons(self, request): pass
Add leaderbord rpc call and add request arg to all methods
Add leaderbord rpc call and add request arg to all methods
Python
mit
skooda/poke-battle,radekj/poke-battle
from nameko.web.handlers import http from pokebattle.scores import ScoreService class GameService(object): score_service = RpcProxy('score_service') @http('POST', '/signup') def signup(self): pass @http('POST', '/login') def login(self): pass @http('POST', '/battle') def new_game(self): pass @http('GET', '/leaderboard') def leaderboard(self): pass @http('GET', '/user/<int:id>') def user(self): pass @http('GET', '/user/<int:id>/pokemons') def user_pokemons(self): pass Add leaderbord rpc call and add request arg to all methods
import json from nameko.web.handlers import http from nameko.rpc import RpcProxy from pokebattle.scores import ScoreService class GameService(object): name = 'game_service' score_rpc = RpcProxy('score_service') @http('POST', '/signup') def signup(self, request): pass @http('POST', '/login') def login(self, request): pass @http('POST', '/battle') def new_game(self, request): pass @http('GET', '/leaderboard') def leaderboard(self, request): return json.dumps(self.score_rpc.leaderboard()) @http('GET', '/user/<int:id>') def user(self, request): pass @http('GET', '/user/<int:id>/pokemons') def user_pokemons(self, request): pass
<commit_before>from nameko.web.handlers import http from pokebattle.scores import ScoreService class GameService(object): score_service = RpcProxy('score_service') @http('POST', '/signup') def signup(self): pass @http('POST', '/login') def login(self): pass @http('POST', '/battle') def new_game(self): pass @http('GET', '/leaderboard') def leaderboard(self): pass @http('GET', '/user/<int:id>') def user(self): pass @http('GET', '/user/<int:id>/pokemons') def user_pokemons(self): pass <commit_msg>Add leaderbord rpc call and add request arg to all methods<commit_after>
import json from nameko.web.handlers import http from nameko.rpc import RpcProxy from pokebattle.scores import ScoreService class GameService(object): name = 'game_service' score_rpc = RpcProxy('score_service') @http('POST', '/signup') def signup(self, request): pass @http('POST', '/login') def login(self, request): pass @http('POST', '/battle') def new_game(self, request): pass @http('GET', '/leaderboard') def leaderboard(self, request): return json.dumps(self.score_rpc.leaderboard()) @http('GET', '/user/<int:id>') def user(self, request): pass @http('GET', '/user/<int:id>/pokemons') def user_pokemons(self, request): pass
from nameko.web.handlers import http from pokebattle.scores import ScoreService class GameService(object): score_service = RpcProxy('score_service') @http('POST', '/signup') def signup(self): pass @http('POST', '/login') def login(self): pass @http('POST', '/battle') def new_game(self): pass @http('GET', '/leaderboard') def leaderboard(self): pass @http('GET', '/user/<int:id>') def user(self): pass @http('GET', '/user/<int:id>/pokemons') def user_pokemons(self): pass Add leaderbord rpc call and add request arg to all methodsimport json from nameko.web.handlers import http from nameko.rpc import RpcProxy from pokebattle.scores import ScoreService class GameService(object): name = 'game_service' score_rpc = RpcProxy('score_service') @http('POST', '/signup') def signup(self, request): pass @http('POST', '/login') def login(self, request): pass @http('POST', '/battle') def new_game(self, request): pass @http('GET', '/leaderboard') def leaderboard(self, request): return json.dumps(self.score_rpc.leaderboard()) @http('GET', '/user/<int:id>') def user(self, request): pass @http('GET', '/user/<int:id>/pokemons') def user_pokemons(self, request): pass
<commit_before>from nameko.web.handlers import http from pokebattle.scores import ScoreService class GameService(object): score_service = RpcProxy('score_service') @http('POST', '/signup') def signup(self): pass @http('POST', '/login') def login(self): pass @http('POST', '/battle') def new_game(self): pass @http('GET', '/leaderboard') def leaderboard(self): pass @http('GET', '/user/<int:id>') def user(self): pass @http('GET', '/user/<int:id>/pokemons') def user_pokemons(self): pass <commit_msg>Add leaderbord rpc call and add request arg to all methods<commit_after>import json from nameko.web.handlers import http from nameko.rpc import RpcProxy from pokebattle.scores import ScoreService class GameService(object): name = 'game_service' score_rpc = RpcProxy('score_service') @http('POST', '/signup') def signup(self, request): pass @http('POST', '/login') def login(self, request): pass @http('POST', '/battle') def new_game(self, request): pass @http('GET', '/leaderboard') def leaderboard(self, request): return json.dumps(self.score_rpc.leaderboard()) @http('GET', '/user/<int:id>') def user(self, request): pass @http('GET', '/user/<int:id>/pokemons') def user_pokemons(self, request): pass
f27d2078a67a1a2ba0da0c000a68d8b0d212bf08
polyaxon/experiments/utils.py
polyaxon/experiments/utils.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import os from django.conf import settings from libs.paths import delete_path, create_path def get_experiment_outputs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values)) def get_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.LOGS_ROOT, '/'.join(values)) def delete_experiment_logs(experiment_group_name): path = get_experiment_logs_path(experiment_group_name) delete_path(path) def delete_experiment_outputs(experiment_group_name): path = get_experiment_outputs_path(experiment_group_name) delete_path(path) def create_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') path = settings.LOGS_ROOT for value in values[:-1]: path = os.path.join(path, value) if not os.path.isdir(path): create_path(path)
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import os from django.conf import settings from libs.paths import delete_path, create_path def get_experiment_outputs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values)) def get_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.LOGS_ROOT, '/'.join(values)) def delete_experiment_logs(experiment_group_name): path = get_experiment_logs_path(experiment_group_name) delete_path(path) def delete_experiment_outputs(experiment_group_name): path = get_experiment_outputs_path(experiment_group_name) delete_path(path) def create_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') path = settings.LOGS_ROOT for value in values[:-1]: path = os.path.join(path, value) if not os.path.isdir(path): create_path(path) # Create file with permissions path = os.path.join(path, values[-1]) open(path, 'w+') os.chmod(path, 0o777)
Update experiment logs path creation
Update experiment logs path creation
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import os from django.conf import settings from libs.paths import delete_path, create_path def get_experiment_outputs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values)) def get_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.LOGS_ROOT, '/'.join(values)) def delete_experiment_logs(experiment_group_name): path = get_experiment_logs_path(experiment_group_name) delete_path(path) def delete_experiment_outputs(experiment_group_name): path = get_experiment_outputs_path(experiment_group_name) delete_path(path) def create_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') path = settings.LOGS_ROOT for value in values[:-1]: path = os.path.join(path, value) if not os.path.isdir(path): create_path(path) Update experiment logs path creation
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import os from django.conf import settings from libs.paths import delete_path, create_path def get_experiment_outputs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values)) def get_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.LOGS_ROOT, '/'.join(values)) def delete_experiment_logs(experiment_group_name): path = get_experiment_logs_path(experiment_group_name) delete_path(path) def delete_experiment_outputs(experiment_group_name): path = get_experiment_outputs_path(experiment_group_name) delete_path(path) def create_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') path = settings.LOGS_ROOT for value in values[:-1]: path = os.path.join(path, value) if not os.path.isdir(path): create_path(path) # Create file with permissions path = os.path.join(path, values[-1]) open(path, 'w+') os.chmod(path, 0o777)
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import os from django.conf import settings from libs.paths import delete_path, create_path def get_experiment_outputs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values)) def get_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.LOGS_ROOT, '/'.join(values)) def delete_experiment_logs(experiment_group_name): path = get_experiment_logs_path(experiment_group_name) delete_path(path) def delete_experiment_outputs(experiment_group_name): path = get_experiment_outputs_path(experiment_group_name) delete_path(path) def create_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') path = settings.LOGS_ROOT for value in values[:-1]: path = os.path.join(path, value) if not os.path.isdir(path): create_path(path) <commit_msg>Update experiment logs path creation<commit_after>
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import os from django.conf import settings from libs.paths import delete_path, create_path def get_experiment_outputs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values)) def get_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.LOGS_ROOT, '/'.join(values)) def delete_experiment_logs(experiment_group_name): path = get_experiment_logs_path(experiment_group_name) delete_path(path) def delete_experiment_outputs(experiment_group_name): path = get_experiment_outputs_path(experiment_group_name) delete_path(path) def create_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') path = settings.LOGS_ROOT for value in values[:-1]: path = os.path.join(path, value) if not os.path.isdir(path): create_path(path) # Create file with permissions path = os.path.join(path, values[-1]) open(path, 'w+') os.chmod(path, 0o777)
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import os from django.conf import settings from libs.paths import delete_path, create_path def get_experiment_outputs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values)) def get_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.LOGS_ROOT, '/'.join(values)) def delete_experiment_logs(experiment_group_name): path = get_experiment_logs_path(experiment_group_name) delete_path(path) def delete_experiment_outputs(experiment_group_name): path = get_experiment_outputs_path(experiment_group_name) delete_path(path) def create_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') path = settings.LOGS_ROOT for value in values[:-1]: path = os.path.join(path, value) if not os.path.isdir(path): create_path(path) Update experiment logs path creation# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import os from django.conf import settings from libs.paths import delete_path, create_path def get_experiment_outputs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values)) def get_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.LOGS_ROOT, '/'.join(values)) def delete_experiment_logs(experiment_group_name): path = get_experiment_logs_path(experiment_group_name) delete_path(path) def delete_experiment_outputs(experiment_group_name): path = get_experiment_outputs_path(experiment_group_name) delete_path(path) def create_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') path = settings.LOGS_ROOT for value in values[:-1]: path = os.path.join(path, value) if not os.path.isdir(path): create_path(path) # Create file with permissions path = os.path.join(path, values[-1]) open(path, 'w+') os.chmod(path, 0o777)
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import os from django.conf import settings from libs.paths import delete_path, create_path def get_experiment_outputs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values)) def get_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.LOGS_ROOT, '/'.join(values)) def delete_experiment_logs(experiment_group_name): path = get_experiment_logs_path(experiment_group_name) delete_path(path) def delete_experiment_outputs(experiment_group_name): path = get_experiment_outputs_path(experiment_group_name) delete_path(path) def create_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') path = settings.LOGS_ROOT for value in values[:-1]: path = os.path.join(path, value) if not os.path.isdir(path): create_path(path) <commit_msg>Update experiment logs path creation<commit_after># -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import os from django.conf import settings from libs.paths import delete_path, create_path def get_experiment_outputs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.OUTPUTS_ROOT, '/'.join(values)) def get_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') return os.path.join(settings.LOGS_ROOT, '/'.join(values)) def delete_experiment_logs(experiment_group_name): path = get_experiment_logs_path(experiment_group_name) delete_path(path) def delete_experiment_outputs(experiment_group_name): path = get_experiment_outputs_path(experiment_group_name) delete_path(path) def create_experiment_logs_path(experiment_name): values = experiment_name.split('.') if len(values) == 3: values.insert(2, 'independents') path = settings.LOGS_ROOT for value in values[:-1]: path = os.path.join(path, value) if not os.path.isdir(path): create_path(path) # Create file with permissions path = os.path.join(path, values[-1]) open(path, 'w+') os.chmod(path, 0o777)
6370b362c77ae9c5f9aa64e11eae3941438b5359
openmc/deplete/__init__.py
openmc/deplete/__init__.py
""" openmc.deplete ============== A depletion front-end tool. """ from .dummy_comm import DummyCommunicator try: from mpi4py import MPI comm = MPI.COMM_WORLD have_mpi = True except ImportError: comm = DummyCommunicator() have_mpi = False from .nuclide import * from .chain import * from .operator import * from .reaction_rates import * from .abc import * from .results import * from .results_list import * from .integrator import *
""" openmc.deplete ============== A depletion front-end tool. """ from .dummy_comm import DummyCommunicator try: from mpi4py import MPI comm = MPI.COMM_WORLD have_mpi = True # check if running with MPI and if hdf5 is MPI-enabled from h5py import get_config if not get_config().mpi and comm.size > 1: # Raise exception only on process 0 if comm.rank: from sys import exit exit() raise RuntimeError( "Need MPI-enabled HDF5 install to perform depletion with MPI" ) except ImportError: comm = DummyCommunicator() have_mpi = False from .nuclide import * from .chain import * from .operator import * from .reaction_rates import * from .abc import * from .results import * from .results_list import * from .integrator import *
Check that hdf5 has MPI if performing depletion with MPI
Check that hdf5 has MPI if performing depletion with MPI Check added in openmc/depletion/__init__.py. Without this check, the exporting of the Results to hdf5 will hang, as the second process attempts to write to a file that has already been opened on another process. This error is only raised after a full transport calculation has been run. The check raises a more helpful error directly at the import from openmc.deplete, prior to transport calculations.
Python
mit
shikhar413/openmc,shikhar413/openmc,liangjg/openmc,amandalund/openmc,mit-crpg/openmc,mit-crpg/openmc,amandalund/openmc,walshjon/openmc,smharper/openmc,paulromano/openmc,paulromano/openmc,paulromano/openmc,walshjon/openmc,paulromano/openmc,walshjon/openmc,liangjg/openmc,walshjon/openmc,amandalund/openmc,smharper/openmc,shikhar413/openmc,smharper/openmc,liangjg/openmc,shikhar413/openmc,amandalund/openmc,smharper/openmc,mit-crpg/openmc,mit-crpg/openmc,liangjg/openmc
""" openmc.deplete ============== A depletion front-end tool. """ from .dummy_comm import DummyCommunicator try: from mpi4py import MPI comm = MPI.COMM_WORLD have_mpi = True except ImportError: comm = DummyCommunicator() have_mpi = False from .nuclide import * from .chain import * from .operator import * from .reaction_rates import * from .abc import * from .results import * from .results_list import * from .integrator import * Check that hdf5 has MPI if performing depletion with MPI Check added in openmc/depletion/__init__.py. Without this check, the exporting of the Results to hdf5 will hang, as the second process attempts to write to a file that has already been opened on another process. This error is only raised after a full transport calculation has been run. The check raises a more helpful error directly at the import from openmc.deplete, prior to transport calculations.
""" openmc.deplete ============== A depletion front-end tool. """ from .dummy_comm import DummyCommunicator try: from mpi4py import MPI comm = MPI.COMM_WORLD have_mpi = True # check if running with MPI and if hdf5 is MPI-enabled from h5py import get_config if not get_config().mpi and comm.size > 1: # Raise exception only on process 0 if comm.rank: from sys import exit exit() raise RuntimeError( "Need MPI-enabled HDF5 install to perform depletion with MPI" ) except ImportError: comm = DummyCommunicator() have_mpi = False from .nuclide import * from .chain import * from .operator import * from .reaction_rates import * from .abc import * from .results import * from .results_list import * from .integrator import *
<commit_before>""" openmc.deplete ============== A depletion front-end tool. """ from .dummy_comm import DummyCommunicator try: from mpi4py import MPI comm = MPI.COMM_WORLD have_mpi = True except ImportError: comm = DummyCommunicator() have_mpi = False from .nuclide import * from .chain import * from .operator import * from .reaction_rates import * from .abc import * from .results import * from .results_list import * from .integrator import * <commit_msg>Check that hdf5 has MPI if performing depletion with MPI Check added in openmc/depletion/__init__.py. Without this check, the exporting of the Results to hdf5 will hang, as the second process attempts to write to a file that has already been opened on another process. This error is only raised after a full transport calculation has been run. The check raises a more helpful error directly at the import from openmc.deplete, prior to transport calculations.<commit_after>
""" openmc.deplete ============== A depletion front-end tool. """ from .dummy_comm import DummyCommunicator try: from mpi4py import MPI comm = MPI.COMM_WORLD have_mpi = True # check if running with MPI and if hdf5 is MPI-enabled from h5py import get_config if not get_config().mpi and comm.size > 1: # Raise exception only on process 0 if comm.rank: from sys import exit exit() raise RuntimeError( "Need MPI-enabled HDF5 install to perform depletion with MPI" ) except ImportError: comm = DummyCommunicator() have_mpi = False from .nuclide import * from .chain import * from .operator import * from .reaction_rates import * from .abc import * from .results import * from .results_list import * from .integrator import *
""" openmc.deplete ============== A depletion front-end tool. """ from .dummy_comm import DummyCommunicator try: from mpi4py import MPI comm = MPI.COMM_WORLD have_mpi = True except ImportError: comm = DummyCommunicator() have_mpi = False from .nuclide import * from .chain import * from .operator import * from .reaction_rates import * from .abc import * from .results import * from .results_list import * from .integrator import * Check that hdf5 has MPI if performing depletion with MPI Check added in openmc/depletion/__init__.py. Without this check, the exporting of the Results to hdf5 will hang, as the second process attempts to write to a file that has already been opened on another process. This error is only raised after a full transport calculation has been run. The check raises a more helpful error directly at the import from openmc.deplete, prior to transport calculations.""" openmc.deplete ============== A depletion front-end tool. """ from .dummy_comm import DummyCommunicator try: from mpi4py import MPI comm = MPI.COMM_WORLD have_mpi = True # check if running with MPI and if hdf5 is MPI-enabled from h5py import get_config if not get_config().mpi and comm.size > 1: # Raise exception only on process 0 if comm.rank: from sys import exit exit() raise RuntimeError( "Need MPI-enabled HDF5 install to perform depletion with MPI" ) except ImportError: comm = DummyCommunicator() have_mpi = False from .nuclide import * from .chain import * from .operator import * from .reaction_rates import * from .abc import * from .results import * from .results_list import * from .integrator import *
<commit_before>""" openmc.deplete ============== A depletion front-end tool. """ from .dummy_comm import DummyCommunicator try: from mpi4py import MPI comm = MPI.COMM_WORLD have_mpi = True except ImportError: comm = DummyCommunicator() have_mpi = False from .nuclide import * from .chain import * from .operator import * from .reaction_rates import * from .abc import * from .results import * from .results_list import * from .integrator import * <commit_msg>Check that hdf5 has MPI if performing depletion with MPI Check added in openmc/depletion/__init__.py. Without this check, the exporting of the Results to hdf5 will hang, as the second process attempts to write to a file that has already been opened on another process. This error is only raised after a full transport calculation has been run. The check raises a more helpful error directly at the import from openmc.deplete, prior to transport calculations.<commit_after>""" openmc.deplete ============== A depletion front-end tool. """ from .dummy_comm import DummyCommunicator try: from mpi4py import MPI comm = MPI.COMM_WORLD have_mpi = True # check if running with MPI and if hdf5 is MPI-enabled from h5py import get_config if not get_config().mpi and comm.size > 1: # Raise exception only on process 0 if comm.rank: from sys import exit exit() raise RuntimeError( "Need MPI-enabled HDF5 install to perform depletion with MPI" ) except ImportError: comm = DummyCommunicator() have_mpi = False from .nuclide import * from .chain import * from .operator import * from .reaction_rates import * from .abc import * from .results import * from .results_list import * from .integrator import *
09b69d7e650055f75562f740d552434d2dfa2d6d
tapiriik/services/service.py
tapiriik/services/service.py
from tapiriik.services import * from tapiriik.database import db class Service: def FromID(id): if id=="runkeeper": return RunKeeper elif id=="strava": return Strava raise ValueError def List(): return [RunKeeper, Strava] def WebInit(): global UserAuthorizationURL for itm in Service.List(): itm.WebInit() def GetServiceRecordWithAuthDetails(service, authDetails): return db.connections.find_one({"Service": service.ID, "Authorization": authDetails}) def EnsureServiceRecordWithAuth(service, uid, authDetails): serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord is None: db.connections.insert({"ExternalID": uid, "Service": service.ID, "SynchronizedActivities": [], "Authorization": authDetails}) serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) return serviceRecord
from tapiriik.services import * from tapiriik.database import db class Service: def FromID(id): if id=="runkeeper": return RunKeeper elif id=="strava": return Strava raise ValueError def List(): return [RunKeeper, Strava] def WebInit(): global UserAuthorizationURL for itm in Service.List(): itm.WebInit() def GetServiceRecordWithAuthDetails(service, authDetails): return db.connections.find_one({"Service": service.ID, "Authorization": authDetails}) def EnsureServiceRecordWithAuth(service, uid, authDetails): serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord is None: db.connections.insert({"ExternalID": uid, "Service": service.ID, "SynchronizedActivities": [], "Authorization": authDetails}) serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord["Authorization"] != authDetails: db.connections.update({"ExternalID": uid, "Service": service.ID}, {"$set": {"Authorization": authDetails}}) return serviceRecord
Update auth details on reauthorization
Update auth details on reauthorization
Python
apache-2.0
marxin/tapiriik,campbellr/tapiriik,brunoflores/tapiriik,mduggan/tapiriik,abhijit86k/tapiriik,cpfair/tapiriik,gavioto/tapiriik,cmgrote/tapiriik,gavioto/tapiriik,gavioto/tapiriik,mjnbike/tapiriik,abhijit86k/tapiriik,dmschreiber/tapiriik,marxin/tapiriik,campbellr/tapiriik,mduggan/tapiriik,abs0/tapiriik,cmgrote/tapiriik,dmschreiber/tapiriik,cgourlay/tapiriik,marxin/tapiriik,abs0/tapiriik,gavioto/tapiriik,dlenski/tapiriik,cgourlay/tapiriik,niosus/tapiriik,campbellr/tapiriik,dlenski/tapiriik,mjnbike/tapiriik,dmschreiber/tapiriik,cgourlay/tapiriik,campbellr/tapiriik,niosus/tapiriik,abs0/tapiriik,olamy/tapiriik,olamy/tapiriik,cheatos101/tapiriik,mduggan/tapiriik,cmgrote/tapiriik,mduggan/tapiriik,abhijit86k/tapiriik,mjnbike/tapiriik,cmgrote/tapiriik,marxin/tapiriik,cheatos101/tapiriik,dlenski/tapiriik,olamy/tapiriik,dmschreiber/tapiriik,niosus/tapiriik,mjnbike/tapiriik,olamy/tapiriik,abhijit86k/tapiriik,cheatos101/tapiriik,brunoflores/tapiriik,abs0/tapiriik,niosus/tapiriik,cpfair/tapiriik,brunoflores/tapiriik,cgourlay/tapiriik,dlenski/tapiriik,cpfair/tapiriik,brunoflores/tapiriik,cheatos101/tapiriik,cpfair/tapiriik
from tapiriik.services import * from tapiriik.database import db class Service: def FromID(id): if id=="runkeeper": return RunKeeper elif id=="strava": return Strava raise ValueError def List(): return [RunKeeper, Strava] def WebInit(): global UserAuthorizationURL for itm in Service.List(): itm.WebInit() def GetServiceRecordWithAuthDetails(service, authDetails): return db.connections.find_one({"Service": service.ID, "Authorization": authDetails}) def EnsureServiceRecordWithAuth(service, uid, authDetails): serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord is None: db.connections.insert({"ExternalID": uid, "Service": service.ID, "SynchronizedActivities": [], "Authorization": authDetails}) serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) return serviceRecord Update auth details on reauthorization
from tapiriik.services import * from tapiriik.database import db class Service: def FromID(id): if id=="runkeeper": return RunKeeper elif id=="strava": return Strava raise ValueError def List(): return [RunKeeper, Strava] def WebInit(): global UserAuthorizationURL for itm in Service.List(): itm.WebInit() def GetServiceRecordWithAuthDetails(service, authDetails): return db.connections.find_one({"Service": service.ID, "Authorization": authDetails}) def EnsureServiceRecordWithAuth(service, uid, authDetails): serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord is None: db.connections.insert({"ExternalID": uid, "Service": service.ID, "SynchronizedActivities": [], "Authorization": authDetails}) serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord["Authorization"] != authDetails: db.connections.update({"ExternalID": uid, "Service": service.ID}, {"$set": {"Authorization": authDetails}}) return serviceRecord
<commit_before>from tapiriik.services import * from tapiriik.database import db class Service: def FromID(id): if id=="runkeeper": return RunKeeper elif id=="strava": return Strava raise ValueError def List(): return [RunKeeper, Strava] def WebInit(): global UserAuthorizationURL for itm in Service.List(): itm.WebInit() def GetServiceRecordWithAuthDetails(service, authDetails): return db.connections.find_one({"Service": service.ID, "Authorization": authDetails}) def EnsureServiceRecordWithAuth(service, uid, authDetails): serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord is None: db.connections.insert({"ExternalID": uid, "Service": service.ID, "SynchronizedActivities": [], "Authorization": authDetails}) serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) return serviceRecord <commit_msg>Update auth details on reauthorization<commit_after>
from tapiriik.services import * from tapiriik.database import db class Service: def FromID(id): if id=="runkeeper": return RunKeeper elif id=="strava": return Strava raise ValueError def List(): return [RunKeeper, Strava] def WebInit(): global UserAuthorizationURL for itm in Service.List(): itm.WebInit() def GetServiceRecordWithAuthDetails(service, authDetails): return db.connections.find_one({"Service": service.ID, "Authorization": authDetails}) def EnsureServiceRecordWithAuth(service, uid, authDetails): serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord is None: db.connections.insert({"ExternalID": uid, "Service": service.ID, "SynchronizedActivities": [], "Authorization": authDetails}) serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord["Authorization"] != authDetails: db.connections.update({"ExternalID": uid, "Service": service.ID}, {"$set": {"Authorization": authDetails}}) return serviceRecord
from tapiriik.services import * from tapiriik.database import db class Service: def FromID(id): if id=="runkeeper": return RunKeeper elif id=="strava": return Strava raise ValueError def List(): return [RunKeeper, Strava] def WebInit(): global UserAuthorizationURL for itm in Service.List(): itm.WebInit() def GetServiceRecordWithAuthDetails(service, authDetails): return db.connections.find_one({"Service": service.ID, "Authorization": authDetails}) def EnsureServiceRecordWithAuth(service, uid, authDetails): serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord is None: db.connections.insert({"ExternalID": uid, "Service": service.ID, "SynchronizedActivities": [], "Authorization": authDetails}) serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) return serviceRecord Update auth details on reauthorizationfrom tapiriik.services import * from tapiriik.database import db class Service: def FromID(id): if id=="runkeeper": return RunKeeper elif id=="strava": return Strava raise ValueError def List(): return [RunKeeper, Strava] def WebInit(): global UserAuthorizationURL for itm in Service.List(): itm.WebInit() def GetServiceRecordWithAuthDetails(service, authDetails): return db.connections.find_one({"Service": service.ID, "Authorization": authDetails}) def EnsureServiceRecordWithAuth(service, uid, authDetails): serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord is None: db.connections.insert({"ExternalID": uid, "Service": service.ID, "SynchronizedActivities": [], "Authorization": authDetails}) serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord["Authorization"] != authDetails: db.connections.update({"ExternalID": uid, "Service": service.ID}, {"$set": {"Authorization": authDetails}}) return serviceRecord
<commit_before>from tapiriik.services import * from tapiriik.database import db class Service: def FromID(id): if id=="runkeeper": return RunKeeper elif id=="strava": return Strava raise ValueError def List(): return [RunKeeper, Strava] def WebInit(): global UserAuthorizationURL for itm in Service.List(): itm.WebInit() def GetServiceRecordWithAuthDetails(service, authDetails): return db.connections.find_one({"Service": service.ID, "Authorization": authDetails}) def EnsureServiceRecordWithAuth(service, uid, authDetails): serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord is None: db.connections.insert({"ExternalID": uid, "Service": service.ID, "SynchronizedActivities": [], "Authorization": authDetails}) serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) return serviceRecord <commit_msg>Update auth details on reauthorization<commit_after>from tapiriik.services import * from tapiriik.database import db class Service: def FromID(id): if id=="runkeeper": return RunKeeper elif id=="strava": return Strava raise ValueError def List(): return [RunKeeper, Strava] def WebInit(): global UserAuthorizationURL for itm in Service.List(): itm.WebInit() def GetServiceRecordWithAuthDetails(service, authDetails): return db.connections.find_one({"Service": service.ID, "Authorization": authDetails}) def EnsureServiceRecordWithAuth(service, uid, authDetails): serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord is None: db.connections.insert({"ExternalID": uid, "Service": service.ID, "SynchronizedActivities": [], "Authorization": authDetails}) serviceRecord = db.connections.find_one({"ExternalID": uid, "Service": service.ID}) if serviceRecord["Authorization"] != authDetails: db.connections.update({"ExternalID": uid, "Service": service.ID}, {"$set": {"Authorization": authDetails}}) return serviceRecord
8c5edbf6d928ab937128b783782726c06592cc9f
rosetta/signals.py
rosetta/signals.py
from django import dispatch entry_changed = dispatch.Signal() post_save = dispatch.Signal()
from django import dispatch # providing_args=["user", "old_msgstr", "old_fuzzy", "pofile", "language_code"] entry_changed = dispatch.Signal() # providing_args=["language_code", "request"] post_save = dispatch.Signal()
Add providing_args as a comment
Add providing_args as a comment
Python
mit
mbi/django-rosetta,mbi/django-rosetta,mbi/django-rosetta,mbi/django-rosetta
from django import dispatch entry_changed = dispatch.Signal() post_save = dispatch.Signal() Add providing_args as a comment
from django import dispatch # providing_args=["user", "old_msgstr", "old_fuzzy", "pofile", "language_code"] entry_changed = dispatch.Signal() # providing_args=["language_code", "request"] post_save = dispatch.Signal()
<commit_before>from django import dispatch entry_changed = dispatch.Signal() post_save = dispatch.Signal() <commit_msg>Add providing_args as a comment<commit_after>
from django import dispatch # providing_args=["user", "old_msgstr", "old_fuzzy", "pofile", "language_code"] entry_changed = dispatch.Signal() # providing_args=["language_code", "request"] post_save = dispatch.Signal()
from django import dispatch entry_changed = dispatch.Signal() post_save = dispatch.Signal() Add providing_args as a commentfrom django import dispatch # providing_args=["user", "old_msgstr", "old_fuzzy", "pofile", "language_code"] entry_changed = dispatch.Signal() # providing_args=["language_code", "request"] post_save = dispatch.Signal()
<commit_before>from django import dispatch entry_changed = dispatch.Signal() post_save = dispatch.Signal() <commit_msg>Add providing_args as a comment<commit_after>from django import dispatch # providing_args=["user", "old_msgstr", "old_fuzzy", "pofile", "language_code"] entry_changed = dispatch.Signal() # providing_args=["language_code", "request"] post_save = dispatch.Signal()
6c37880ee408a5a01e27616b00895b81413ab9be
tests/test_documentation.py
tests/test_documentation.py
from unittest import TestCase import importlib def get_undocumented_wildcards(modulename): namespace = importlib.import_module(modulename) loc = namespace.__dict__ undocumented = [] for key, val in loc.items(): if (key[0] != "_") and (key not in {"_", "In", "Out", "get_ipython", "exit", "quit", "join", "S", }): description = val.__doc__ if not description: undocumented.append(key) return undocumented, len(loc.items()) class TestFlow(TestCase): def check_phi_flow(self): modulename = "phi.flow" undocumented, loc_len = get_undocumented_wildcards(modulename) undocumented_fraction = len(undocumented) / loc_len self.assertLess(undocumented_fraction, 0.25, f"{len(undocumented)/loc_len:.2%} of {modulename} imports undocumented. Missing Docstrings in {len(undocumented)}/{loc_len}:\n- " + "\n- ".join(undocumented))
from unittest import TestCase import importlib def get_undocumented_wildcards(modulename): namespace = importlib.import_module(modulename) loc = namespace.__dict__ undocumented = [] for key, val in loc.items(): if (key[0] != "_") and (key not in {"_", "In", "Out", "get_ipython", "exit", "quit", "join", "S", }): description = val.__doc__ if not description: undocumented.append(key) return undocumented, len(loc.items()) class TestFlow(TestCase): def test_phi_flow(self): modulename = "phi.flow" undocumented, loc_len = get_undocumented_wildcards(modulename) undocumented_fraction = len(undocumented) / loc_len self.assertLess(undocumented_fraction, 0.25, f"{len(undocumented)/loc_len:.2%} of {modulename} imports undocumented. Missing Docstrings in {len(undocumented)}/{loc_len}:\n- " + "\n- ".join(undocumented))
Fix naming bug in doc test
Fix naming bug in doc test
Python
mit
tum-pbs/PhiFlow,tum-pbs/PhiFlow
from unittest import TestCase import importlib def get_undocumented_wildcards(modulename): namespace = importlib.import_module(modulename) loc = namespace.__dict__ undocumented = [] for key, val in loc.items(): if (key[0] != "_") and (key not in {"_", "In", "Out", "get_ipython", "exit", "quit", "join", "S", }): description = val.__doc__ if not description: undocumented.append(key) return undocumented, len(loc.items()) class TestFlow(TestCase): def check_phi_flow(self): modulename = "phi.flow" undocumented, loc_len = get_undocumented_wildcards(modulename) undocumented_fraction = len(undocumented) / loc_len self.assertLess(undocumented_fraction, 0.25, f"{len(undocumented)/loc_len:.2%} of {modulename} imports undocumented. Missing Docstrings in {len(undocumented)}/{loc_len}:\n- " + "\n- ".join(undocumented)) Fix naming bug in doc test
from unittest import TestCase import importlib def get_undocumented_wildcards(modulename): namespace = importlib.import_module(modulename) loc = namespace.__dict__ undocumented = [] for key, val in loc.items(): if (key[0] != "_") and (key not in {"_", "In", "Out", "get_ipython", "exit", "quit", "join", "S", }): description = val.__doc__ if not description: undocumented.append(key) return undocumented, len(loc.items()) class TestFlow(TestCase): def test_phi_flow(self): modulename = "phi.flow" undocumented, loc_len = get_undocumented_wildcards(modulename) undocumented_fraction = len(undocumented) / loc_len self.assertLess(undocumented_fraction, 0.25, f"{len(undocumented)/loc_len:.2%} of {modulename} imports undocumented. Missing Docstrings in {len(undocumented)}/{loc_len}:\n- " + "\n- ".join(undocumented))
<commit_before>from unittest import TestCase import importlib def get_undocumented_wildcards(modulename): namespace = importlib.import_module(modulename) loc = namespace.__dict__ undocumented = [] for key, val in loc.items(): if (key[0] != "_") and (key not in {"_", "In", "Out", "get_ipython", "exit", "quit", "join", "S", }): description = val.__doc__ if not description: undocumented.append(key) return undocumented, len(loc.items()) class TestFlow(TestCase): def check_phi_flow(self): modulename = "phi.flow" undocumented, loc_len = get_undocumented_wildcards(modulename) undocumented_fraction = len(undocumented) / loc_len self.assertLess(undocumented_fraction, 0.25, f"{len(undocumented)/loc_len:.2%} of {modulename} imports undocumented. Missing Docstrings in {len(undocumented)}/{loc_len}:\n- " + "\n- ".join(undocumented)) <commit_msg>Fix naming bug in doc test<commit_after>
from unittest import TestCase import importlib def get_undocumented_wildcards(modulename): namespace = importlib.import_module(modulename) loc = namespace.__dict__ undocumented = [] for key, val in loc.items(): if (key[0] != "_") and (key not in {"_", "In", "Out", "get_ipython", "exit", "quit", "join", "S", }): description = val.__doc__ if not description: undocumented.append(key) return undocumented, len(loc.items()) class TestFlow(TestCase): def test_phi_flow(self): modulename = "phi.flow" undocumented, loc_len = get_undocumented_wildcards(modulename) undocumented_fraction = len(undocumented) / loc_len self.assertLess(undocumented_fraction, 0.25, f"{len(undocumented)/loc_len:.2%} of {modulename} imports undocumented. Missing Docstrings in {len(undocumented)}/{loc_len}:\n- " + "\n- ".join(undocumented))
from unittest import TestCase import importlib def get_undocumented_wildcards(modulename): namespace = importlib.import_module(modulename) loc = namespace.__dict__ undocumented = [] for key, val in loc.items(): if (key[0] != "_") and (key not in {"_", "In", "Out", "get_ipython", "exit", "quit", "join", "S", }): description = val.__doc__ if not description: undocumented.append(key) return undocumented, len(loc.items()) class TestFlow(TestCase): def check_phi_flow(self): modulename = "phi.flow" undocumented, loc_len = get_undocumented_wildcards(modulename) undocumented_fraction = len(undocumented) / loc_len self.assertLess(undocumented_fraction, 0.25, f"{len(undocumented)/loc_len:.2%} of {modulename} imports undocumented. Missing Docstrings in {len(undocumented)}/{loc_len}:\n- " + "\n- ".join(undocumented)) Fix naming bug in doc testfrom unittest import TestCase import importlib def get_undocumented_wildcards(modulename): namespace = importlib.import_module(modulename) loc = namespace.__dict__ undocumented = [] for key, val in loc.items(): if (key[0] != "_") and (key not in {"_", "In", "Out", "get_ipython", "exit", "quit", "join", "S", }): description = val.__doc__ if not description: undocumented.append(key) return undocumented, len(loc.items()) class TestFlow(TestCase): def test_phi_flow(self): modulename = "phi.flow" undocumented, loc_len = get_undocumented_wildcards(modulename) undocumented_fraction = len(undocumented) / loc_len self.assertLess(undocumented_fraction, 0.25, f"{len(undocumented)/loc_len:.2%} of {modulename} imports undocumented. Missing Docstrings in {len(undocumented)}/{loc_len}:\n- " + "\n- ".join(undocumented))
<commit_before>from unittest import TestCase import importlib def get_undocumented_wildcards(modulename): namespace = importlib.import_module(modulename) loc = namespace.__dict__ undocumented = [] for key, val in loc.items(): if (key[0] != "_") and (key not in {"_", "In", "Out", "get_ipython", "exit", "quit", "join", "S", }): description = val.__doc__ if not description: undocumented.append(key) return undocumented, len(loc.items()) class TestFlow(TestCase): def check_phi_flow(self): modulename = "phi.flow" undocumented, loc_len = get_undocumented_wildcards(modulename) undocumented_fraction = len(undocumented) / loc_len self.assertLess(undocumented_fraction, 0.25, f"{len(undocumented)/loc_len:.2%} of {modulename} imports undocumented. Missing Docstrings in {len(undocumented)}/{loc_len}:\n- " + "\n- ".join(undocumented)) <commit_msg>Fix naming bug in doc test<commit_after>from unittest import TestCase import importlib def get_undocumented_wildcards(modulename): namespace = importlib.import_module(modulename) loc = namespace.__dict__ undocumented = [] for key, val in loc.items(): if (key[0] != "_") and (key not in {"_", "In", "Out", "get_ipython", "exit", "quit", "join", "S", }): description = val.__doc__ if not description: undocumented.append(key) return undocumented, len(loc.items()) class TestFlow(TestCase): def test_phi_flow(self): modulename = "phi.flow" undocumented, loc_len = get_undocumented_wildcards(modulename) undocumented_fraction = len(undocumented) / loc_len self.assertLess(undocumented_fraction, 0.25, f"{len(undocumented)/loc_len:.2%} of {modulename} imports undocumented. Missing Docstrings in {len(undocumented)}/{loc_len}:\n- " + "\n- ".join(undocumented))
55bf8c79cb3b53af36ecb64ffc22b116e36d8ac6
sugar/p2p/model/Store.py
sugar/p2p/model/Store.py
from sugar.p2p.model.RemoteModel import RemoteModel from sugar.p2p.model.LocalModel import LocalModel class Store: def __init__(self, group): self._group = group self._local_models = {} def create_model(self, model_id): model = LocalModel(self._group, model_id) self._local_models[model_id] = model return model def get_model(self, model_id): if self._local_models.has_key(model_id): return self._local_models(model_id) else: service = self._group.get_service(model_id, LocalModel.SERVICE_TYPE) if service: return RemoteModel(self._group, service) else: return None
from sugar.p2p.model.RemoteModel import RemoteModel from sugar.p2p.model.LocalModel import LocalModel class Store: def __init__(self, group): self._group = group self._local_models = {} def create_model(self, model_id): model = LocalModel(self._group, model_id) self._local_models[model_id] = model return model def get_model(self, model_id): if self._local_models.has_key(model_id): return self._local_models[model_id] else: service = self._group.get_service(model_id, LocalModel.SERVICE_TYPE) if service: return RemoteModel(self._group, service) else: return None
Fix bad usage of a dict
Fix bad usage of a dict
Python
lgpl-2.1
samdroid-apps/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,ceibal-tatu/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,puneetgkaur/backup_sugar_sugartoolkit,i5o/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit,tchx84/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,puneetgkaur/backup_sugar_sugartoolkit,godiard/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,manuq/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,gusDuarte/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,ceibal-tatu/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3
from sugar.p2p.model.RemoteModel import RemoteModel from sugar.p2p.model.LocalModel import LocalModel class Store: def __init__(self, group): self._group = group self._local_models = {} def create_model(self, model_id): model = LocalModel(self._group, model_id) self._local_models[model_id] = model return model def get_model(self, model_id): if self._local_models.has_key(model_id): return self._local_models(model_id) else: service = self._group.get_service(model_id, LocalModel.SERVICE_TYPE) if service: return RemoteModel(self._group, service) else: return None Fix bad usage of a dict
from sugar.p2p.model.RemoteModel import RemoteModel from sugar.p2p.model.LocalModel import LocalModel class Store: def __init__(self, group): self._group = group self._local_models = {} def create_model(self, model_id): model = LocalModel(self._group, model_id) self._local_models[model_id] = model return model def get_model(self, model_id): if self._local_models.has_key(model_id): return self._local_models[model_id] else: service = self._group.get_service(model_id, LocalModel.SERVICE_TYPE) if service: return RemoteModel(self._group, service) else: return None
<commit_before>from sugar.p2p.model.RemoteModel import RemoteModel from sugar.p2p.model.LocalModel import LocalModel class Store: def __init__(self, group): self._group = group self._local_models = {} def create_model(self, model_id): model = LocalModel(self._group, model_id) self._local_models[model_id] = model return model def get_model(self, model_id): if self._local_models.has_key(model_id): return self._local_models(model_id) else: service = self._group.get_service(model_id, LocalModel.SERVICE_TYPE) if service: return RemoteModel(self._group, service) else: return None <commit_msg>Fix bad usage of a dict<commit_after>
from sugar.p2p.model.RemoteModel import RemoteModel from sugar.p2p.model.LocalModel import LocalModel class Store: def __init__(self, group): self._group = group self._local_models = {} def create_model(self, model_id): model = LocalModel(self._group, model_id) self._local_models[model_id] = model return model def get_model(self, model_id): if self._local_models.has_key(model_id): return self._local_models[model_id] else: service = self._group.get_service(model_id, LocalModel.SERVICE_TYPE) if service: return RemoteModel(self._group, service) else: return None
from sugar.p2p.model.RemoteModel import RemoteModel from sugar.p2p.model.LocalModel import LocalModel class Store: def __init__(self, group): self._group = group self._local_models = {} def create_model(self, model_id): model = LocalModel(self._group, model_id) self._local_models[model_id] = model return model def get_model(self, model_id): if self._local_models.has_key(model_id): return self._local_models(model_id) else: service = self._group.get_service(model_id, LocalModel.SERVICE_TYPE) if service: return RemoteModel(self._group, service) else: return None Fix bad usage of a dictfrom sugar.p2p.model.RemoteModel import RemoteModel from sugar.p2p.model.LocalModel import LocalModel class Store: def __init__(self, group): self._group = group self._local_models = {} def create_model(self, model_id): model = LocalModel(self._group, model_id) self._local_models[model_id] = model return model def get_model(self, model_id): if self._local_models.has_key(model_id): return self._local_models[model_id] else: service = self._group.get_service(model_id, LocalModel.SERVICE_TYPE) if service: return RemoteModel(self._group, service) else: return None
<commit_before>from sugar.p2p.model.RemoteModel import RemoteModel from sugar.p2p.model.LocalModel import LocalModel class Store: def __init__(self, group): self._group = group self._local_models = {} def create_model(self, model_id): model = LocalModel(self._group, model_id) self._local_models[model_id] = model return model def get_model(self, model_id): if self._local_models.has_key(model_id): return self._local_models(model_id) else: service = self._group.get_service(model_id, LocalModel.SERVICE_TYPE) if service: return RemoteModel(self._group, service) else: return None <commit_msg>Fix bad usage of a dict<commit_after>from sugar.p2p.model.RemoteModel import RemoteModel from sugar.p2p.model.LocalModel import LocalModel class Store: def __init__(self, group): self._group = group self._local_models = {} def create_model(self, model_id): model = LocalModel(self._group, model_id) self._local_models[model_id] = model return model def get_model(self, model_id): if self._local_models.has_key(model_id): return self._local_models[model_id] else: service = self._group.get_service(model_id, LocalModel.SERVICE_TYPE) if service: return RemoteModel(self._group, service) else: return None
8a81bef46b248f84ce43244ca82415cf0c7ffb6c
tests/databases/rgd/parser_test.py
tests/databases/rgd/parser_test.py
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from rnacentral_pipeline.databases.rgd.helpers import indexed from rnacentral_pipeline.databases.rgd import parser as rgd @pytest.fixture def sequences(): with indexed('data/rgd/sequences.fa.gz') as sequences: yield sequences def test_can_find_version(): with open('data/rgd/rat_genes.txt', 'r') as raw: assert rgd.get_version(raw) == 'genes-version-2.2.5' def test_can_parse_data(sequences): with open('data/rgd/rat_genes.txt', 'r') as raw: entries = list(rgd.parse(raw, sequences)) assert len(entries) == 14
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from rnacentral_pipeline.databases.rgd.helpers import indexed from rnacentral_pipeline.databases.rgd import parser as rgd @pytest.fixture def sequences(): with indexed('data/rgd/sequences.fa.gz') as sequences: yield sequences def test_can_find_version(): with open('data/rgd/rat_genes.txt', 'r') as raw: assert rgd.get_version(raw) == 'genes-version-2.2.5' @pytest.mark.xfail() def test_can_parse_data(sequences): with open('data/rgd/rat_genes.txt', 'r') as raw: entries = list(rgd.parse(raw, sequences)) assert len(entries) == 14
Mark another RGD test as failing
Mark another RGD test as failing RGD parsing is degrading more it seems.
Python
apache-2.0
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from rnacentral_pipeline.databases.rgd.helpers import indexed from rnacentral_pipeline.databases.rgd import parser as rgd @pytest.fixture def sequences(): with indexed('data/rgd/sequences.fa.gz') as sequences: yield sequences def test_can_find_version(): with open('data/rgd/rat_genes.txt', 'r') as raw: assert rgd.get_version(raw) == 'genes-version-2.2.5' def test_can_parse_data(sequences): with open('data/rgd/rat_genes.txt', 'r') as raw: entries = list(rgd.parse(raw, sequences)) assert len(entries) == 14 Mark another RGD test as failing RGD parsing is degrading more it seems.
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from rnacentral_pipeline.databases.rgd.helpers import indexed from rnacentral_pipeline.databases.rgd import parser as rgd @pytest.fixture def sequences(): with indexed('data/rgd/sequences.fa.gz') as sequences: yield sequences def test_can_find_version(): with open('data/rgd/rat_genes.txt', 'r') as raw: assert rgd.get_version(raw) == 'genes-version-2.2.5' @pytest.mark.xfail() def test_can_parse_data(sequences): with open('data/rgd/rat_genes.txt', 'r') as raw: entries = list(rgd.parse(raw, sequences)) assert len(entries) == 14
<commit_before># -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from rnacentral_pipeline.databases.rgd.helpers import indexed from rnacentral_pipeline.databases.rgd import parser as rgd @pytest.fixture def sequences(): with indexed('data/rgd/sequences.fa.gz') as sequences: yield sequences def test_can_find_version(): with open('data/rgd/rat_genes.txt', 'r') as raw: assert rgd.get_version(raw) == 'genes-version-2.2.5' def test_can_parse_data(sequences): with open('data/rgd/rat_genes.txt', 'r') as raw: entries = list(rgd.parse(raw, sequences)) assert len(entries) == 14 <commit_msg>Mark another RGD test as failing RGD parsing is degrading more it seems.<commit_after>
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from rnacentral_pipeline.databases.rgd.helpers import indexed from rnacentral_pipeline.databases.rgd import parser as rgd @pytest.fixture def sequences(): with indexed('data/rgd/sequences.fa.gz') as sequences: yield sequences def test_can_find_version(): with open('data/rgd/rat_genes.txt', 'r') as raw: assert rgd.get_version(raw) == 'genes-version-2.2.5' @pytest.mark.xfail() def test_can_parse_data(sequences): with open('data/rgd/rat_genes.txt', 'r') as raw: entries = list(rgd.parse(raw, sequences)) assert len(entries) == 14
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from rnacentral_pipeline.databases.rgd.helpers import indexed from rnacentral_pipeline.databases.rgd import parser as rgd @pytest.fixture def sequences(): with indexed('data/rgd/sequences.fa.gz') as sequences: yield sequences def test_can_find_version(): with open('data/rgd/rat_genes.txt', 'r') as raw: assert rgd.get_version(raw) == 'genes-version-2.2.5' def test_can_parse_data(sequences): with open('data/rgd/rat_genes.txt', 'r') as raw: entries = list(rgd.parse(raw, sequences)) assert len(entries) == 14 Mark another RGD test as failing RGD parsing is degrading more it seems.# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from rnacentral_pipeline.databases.rgd.helpers import indexed from rnacentral_pipeline.databases.rgd import parser as rgd @pytest.fixture def sequences(): with indexed('data/rgd/sequences.fa.gz') as sequences: yield sequences def test_can_find_version(): with open('data/rgd/rat_genes.txt', 'r') as raw: assert rgd.get_version(raw) == 'genes-version-2.2.5' @pytest.mark.xfail() def test_can_parse_data(sequences): with open('data/rgd/rat_genes.txt', 'r') as raw: entries = list(rgd.parse(raw, sequences)) assert len(entries) == 14
<commit_before># -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from rnacentral_pipeline.databases.rgd.helpers import indexed from rnacentral_pipeline.databases.rgd import parser as rgd @pytest.fixture def sequences(): with indexed('data/rgd/sequences.fa.gz') as sequences: yield sequences def test_can_find_version(): with open('data/rgd/rat_genes.txt', 'r') as raw: assert rgd.get_version(raw) == 'genes-version-2.2.5' def test_can_parse_data(sequences): with open('data/rgd/rat_genes.txt', 'r') as raw: entries = list(rgd.parse(raw, sequences)) assert len(entries) == 14 <commit_msg>Mark another RGD test as failing RGD parsing is degrading more it seems.<commit_after># -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from rnacentral_pipeline.databases.rgd.helpers import indexed from rnacentral_pipeline.databases.rgd import parser as rgd @pytest.fixture def sequences(): with indexed('data/rgd/sequences.fa.gz') as sequences: yield sequences def test_can_find_version(): with open('data/rgd/rat_genes.txt', 'r') as raw: assert rgd.get_version(raw) == 'genes-version-2.2.5' @pytest.mark.xfail() def test_can_parse_data(sequences): with open('data/rgd/rat_genes.txt', 'r') as raw: entries = list(rgd.parse(raw, sequences)) assert len(entries) == 14
ab7a546e4a7fb686f61b904777aa26c7d596ff03
pombola/south_africa/lib.py
pombola/south_africa/lib.py
import urlparse def make_pa_url(pombola_object, base_url): parsed_url = list(urlparse.urlparse(base_url)) parsed_url[2] = pombola_object.get_absolute_url() return urlparse.urlunparse(parsed_url) def add_extra_popolo_data_for_person(person, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(person, base_url) def add_extra_popolo_data_for_organization(organisation, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(organisation, base_url)
import urlparse def make_pa_url(pombola_object, base_url): parsed_url = list(urlparse.urlparse(base_url)) parsed_url[2] = pombola_object.get_absolute_url() return urlparse.urlunparse(parsed_url) def add_extra_popolo_data_for_person(person, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(person, base_url) personinterests = person.interests_register_entries.all() if personinterests: interests = {} for entry in personinterests: release = entry.release category = entry.category interests.setdefault(release.name, {}) interests[release.name].setdefault(category.name, []) #assuming no entrylineitems with duplicate keys within an entry entrylineitems = dict((e.key, e.value) for e in entry.line_items.all()) interests[release.name][category.name].append(entrylineitems) popolo_object['interests_register'] = interests def add_extra_popolo_data_for_organization(organisation, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(organisation, base_url)
Add members interests data to PopIt export
ZA: Add members interests data to PopIt export (Minor refactoring by Mark Longair.)
Python
agpl-3.0
hzj123/56th,mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola,hzj123/56th,geoffkilpin/pombola,patricmutwiri/pombola,patricmutwiri/pombola,mysociety/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,mysociety/pombola,patricmutwiri/pombola,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,geoffkilpin/pombola,hzj123/56th,hzj123/56th,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,mysociety/pombola,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,hzj123/56th,mysociety/pombola
import urlparse def make_pa_url(pombola_object, base_url): parsed_url = list(urlparse.urlparse(base_url)) parsed_url[2] = pombola_object.get_absolute_url() return urlparse.urlunparse(parsed_url) def add_extra_popolo_data_for_person(person, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(person, base_url) def add_extra_popolo_data_for_organization(organisation, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(organisation, base_url) ZA: Add members interests data to PopIt export (Minor refactoring by Mark Longair.)
import urlparse def make_pa_url(pombola_object, base_url): parsed_url = list(urlparse.urlparse(base_url)) parsed_url[2] = pombola_object.get_absolute_url() return urlparse.urlunparse(parsed_url) def add_extra_popolo_data_for_person(person, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(person, base_url) personinterests = person.interests_register_entries.all() if personinterests: interests = {} for entry in personinterests: release = entry.release category = entry.category interests.setdefault(release.name, {}) interests[release.name].setdefault(category.name, []) #assuming no entrylineitems with duplicate keys within an entry entrylineitems = dict((e.key, e.value) for e in entry.line_items.all()) interests[release.name][category.name].append(entrylineitems) popolo_object['interests_register'] = interests def add_extra_popolo_data_for_organization(organisation, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(organisation, base_url)
<commit_before>import urlparse def make_pa_url(pombola_object, base_url): parsed_url = list(urlparse.urlparse(base_url)) parsed_url[2] = pombola_object.get_absolute_url() return urlparse.urlunparse(parsed_url) def add_extra_popolo_data_for_person(person, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(person, base_url) def add_extra_popolo_data_for_organization(organisation, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(organisation, base_url) <commit_msg>ZA: Add members interests data to PopIt export (Minor refactoring by Mark Longair.)<commit_after>
import urlparse def make_pa_url(pombola_object, base_url): parsed_url = list(urlparse.urlparse(base_url)) parsed_url[2] = pombola_object.get_absolute_url() return urlparse.urlunparse(parsed_url) def add_extra_popolo_data_for_person(person, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(person, base_url) personinterests = person.interests_register_entries.all() if personinterests: interests = {} for entry in personinterests: release = entry.release category = entry.category interests.setdefault(release.name, {}) interests[release.name].setdefault(category.name, []) #assuming no entrylineitems with duplicate keys within an entry entrylineitems = dict((e.key, e.value) for e in entry.line_items.all()) interests[release.name][category.name].append(entrylineitems) popolo_object['interests_register'] = interests def add_extra_popolo_data_for_organization(organisation, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(organisation, base_url)
import urlparse def make_pa_url(pombola_object, base_url): parsed_url = list(urlparse.urlparse(base_url)) parsed_url[2] = pombola_object.get_absolute_url() return urlparse.urlunparse(parsed_url) def add_extra_popolo_data_for_person(person, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(person, base_url) def add_extra_popolo_data_for_organization(organisation, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(organisation, base_url) ZA: Add members interests data to PopIt export (Minor refactoring by Mark Longair.)import urlparse def make_pa_url(pombola_object, base_url): parsed_url = list(urlparse.urlparse(base_url)) parsed_url[2] = pombola_object.get_absolute_url() return urlparse.urlunparse(parsed_url) def add_extra_popolo_data_for_person(person, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(person, base_url) personinterests = person.interests_register_entries.all() if personinterests: interests = {} for entry in personinterests: release = entry.release category = entry.category interests.setdefault(release.name, {}) interests[release.name].setdefault(category.name, []) #assuming no entrylineitems with duplicate keys within an entry entrylineitems = dict((e.key, e.value) for e in entry.line_items.all()) interests[release.name][category.name].append(entrylineitems) popolo_object['interests_register'] = interests def add_extra_popolo_data_for_organization(organisation, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(organisation, base_url)
<commit_before>import urlparse def make_pa_url(pombola_object, base_url): parsed_url = list(urlparse.urlparse(base_url)) parsed_url[2] = pombola_object.get_absolute_url() return urlparse.urlunparse(parsed_url) def add_extra_popolo_data_for_person(person, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(person, base_url) def add_extra_popolo_data_for_organization(organisation, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(organisation, base_url) <commit_msg>ZA: Add members interests data to PopIt export (Minor refactoring by Mark Longair.)<commit_after>import urlparse def make_pa_url(pombola_object, base_url): parsed_url = list(urlparse.urlparse(base_url)) parsed_url[2] = pombola_object.get_absolute_url() return urlparse.urlunparse(parsed_url) def add_extra_popolo_data_for_person(person, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(person, base_url) personinterests = person.interests_register_entries.all() if personinterests: interests = {} for entry in personinterests: release = entry.release category = entry.category interests.setdefault(release.name, {}) interests[release.name].setdefault(category.name, []) #assuming no entrylineitems with duplicate keys within an entry entrylineitems = dict((e.key, e.value) for e in entry.line_items.all()) interests[release.name][category.name].append(entrylineitems) popolo_object['interests_register'] = interests def add_extra_popolo_data_for_organization(organisation, popolo_object, base_url): popolo_object['pa_url'] = make_pa_url(organisation, base_url)
aeae023a8b44e48bf52dfc757d3edd7222a4fbc1
rtrss/config_development.py
rtrss/config_development.py
import os ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:postgres@localhost/rtrss_dev' # directory to store runtime data, write access required DATA_DIR = os.path.join(ROOT_DIR, 'data') DEBUG = True SECRET_KEY = 'development key' FILESTORAGE_URL = 'file://{}'.format(DATA_DIR) SERVER_NAME = os.environ.get('C9_HOSTNAME', 'localhost:8080') PORT = int(os.environ.get('C9_PORT', 8080)) IP = os.environ.get('C9_IP', '0.0.0.0')
import os ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:postgres@localhost/rtrss_dev' # directory to store runtime data, write access required DATA_DIR = os.path.join(ROOT_DIR, 'data') DEBUG = True SECRET_KEY = 'development key' FILESTORAGE_URL = 'file://{}'.format(DATA_DIR) PORT = int(os.environ.get('C9_PORT', 8080)) IP = os.environ.get('C9_IP', '0.0.0.0')
Remove HOST_NAME from development config
Remove HOST_NAME from development config
Python
apache-2.0
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
import os ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:postgres@localhost/rtrss_dev' # directory to store runtime data, write access required DATA_DIR = os.path.join(ROOT_DIR, 'data') DEBUG = True SECRET_KEY = 'development key' FILESTORAGE_URL = 'file://{}'.format(DATA_DIR) SERVER_NAME = os.environ.get('C9_HOSTNAME', 'localhost:8080') PORT = int(os.environ.get('C9_PORT', 8080)) IP = os.environ.get('C9_IP', '0.0.0.0') Remove HOST_NAME from development config
import os ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:postgres@localhost/rtrss_dev' # directory to store runtime data, write access required DATA_DIR = os.path.join(ROOT_DIR, 'data') DEBUG = True SECRET_KEY = 'development key' FILESTORAGE_URL = 'file://{}'.format(DATA_DIR) PORT = int(os.environ.get('C9_PORT', 8080)) IP = os.environ.get('C9_IP', '0.0.0.0')
<commit_before>import os ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:postgres@localhost/rtrss_dev' # directory to store runtime data, write access required DATA_DIR = os.path.join(ROOT_DIR, 'data') DEBUG = True SECRET_KEY = 'development key' FILESTORAGE_URL = 'file://{}'.format(DATA_DIR) SERVER_NAME = os.environ.get('C9_HOSTNAME', 'localhost:8080') PORT = int(os.environ.get('C9_PORT', 8080)) IP = os.environ.get('C9_IP', '0.0.0.0') <commit_msg>Remove HOST_NAME from development config<commit_after>
import os ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:postgres@localhost/rtrss_dev' # directory to store runtime data, write access required DATA_DIR = os.path.join(ROOT_DIR, 'data') DEBUG = True SECRET_KEY = 'development key' FILESTORAGE_URL = 'file://{}'.format(DATA_DIR) PORT = int(os.environ.get('C9_PORT', 8080)) IP = os.environ.get('C9_IP', '0.0.0.0')
import os ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:postgres@localhost/rtrss_dev' # directory to store runtime data, write access required DATA_DIR = os.path.join(ROOT_DIR, 'data') DEBUG = True SECRET_KEY = 'development key' FILESTORAGE_URL = 'file://{}'.format(DATA_DIR) SERVER_NAME = os.environ.get('C9_HOSTNAME', 'localhost:8080') PORT = int(os.environ.get('C9_PORT', 8080)) IP = os.environ.get('C9_IP', '0.0.0.0') Remove HOST_NAME from development configimport os ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:postgres@localhost/rtrss_dev' # directory to store runtime data, write access required DATA_DIR = os.path.join(ROOT_DIR, 'data') DEBUG = True SECRET_KEY = 'development key' FILESTORAGE_URL = 'file://{}'.format(DATA_DIR) PORT = int(os.environ.get('C9_PORT', 8080)) IP = os.environ.get('C9_IP', '0.0.0.0')
<commit_before>import os ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:postgres@localhost/rtrss_dev' # directory to store runtime data, write access required DATA_DIR = os.path.join(ROOT_DIR, 'data') DEBUG = True SECRET_KEY = 'development key' FILESTORAGE_URL = 'file://{}'.format(DATA_DIR) SERVER_NAME = os.environ.get('C9_HOSTNAME', 'localhost:8080') PORT = int(os.environ.get('C9_PORT', 8080)) IP = os.environ.get('C9_IP', '0.0.0.0') <commit_msg>Remove HOST_NAME from development config<commit_after>import os ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:postgres@localhost/rtrss_dev' # directory to store runtime data, write access required DATA_DIR = os.path.join(ROOT_DIR, 'data') DEBUG = True SECRET_KEY = 'development key' FILESTORAGE_URL = 'file://{}'.format(DATA_DIR) PORT = int(os.environ.get('C9_PORT', 8080)) IP = os.environ.get('C9_IP', '0.0.0.0')
a1b465b81b023e846823e71538dbd2cbaccb2181
django_admin_bootstrapped/templatetags/bootstrapped_goodies_tags.py
django_admin_bootstrapped/templatetags/bootstrapped_goodies_tags.py
from django import template from django.template.loader import render_to_string, TemplateDoesNotExist register = template.Library() @register.simple_tag(takes_context=True) def render_with_template_if_exist(context, template, fallback): text = fallback try: text = render_to_string(template, context) except: pass return text @register.simple_tag(takes_context=True) def language_selector(context): """ displays a language selector dropdown in the admin, based on Django "LANGUAGES" context. requires: * USE_I18N = True / settings.py * LANGUAGES specified / settings.py (otherwise all Django locales will be displayed) * "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view) """ output = "" from django.conf import settings i18 = getattr(settings, 'USE_I18N', False) if i18: template = "admin/language_selector.html" context['i18n_is_set'] = True try: output = render_to_string(template, context) except: pass return output @register.filter(name='column_width') def column_width(value): return 12/len(list(value))
from django import template from django.template.loader import render_to_string, TemplateDoesNotExist register = template.Library() @register.simple_tag(takes_context=True) def render_with_template_if_exist(context, template, fallback): text = fallback try: text = render_to_string(template, context) except: pass return text @register.simple_tag(takes_context=True) def language_selector(context): """ displays a language selector dropdown in the admin, based on Django "LANGUAGES" context. requires: * USE_I18N = True / settings.py * LANGUAGES specified / settings.py (otherwise all Django locales will be displayed) * "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view) """ output = "" from django.conf import settings i18 = getattr(settings, 'USE_I18N', False) if i18: template = "admin/language_selector.html" context['i18n_is_set'] = True try: output = render_to_string(template, context) except: pass return output @register.filter(name='column_width') def column_width(value): return 12 // len(list(value))
Fix column_width filter in python3
Fix column_width filter in python3 Force integer division otherwise we'll fsck bootstrap classes As seen here: https://gist.github.com/ScreenDriver/86a812b7b3f891fe8649#file-broken_fieldsets
Python
apache-2.0
avara1986/django-admin-bootstrapped,andrewyager/django-admin-bootstrapped,jmagnusson/django-admin-bootstrapped,askinteractive/mezzanine-advanced-admin-new,xrmx/django-admin-bootstrapped,kevingu1003/django-admin-bootstrapped,avara1986/django-admin-bootstrapped,Corner1024/django-admin-bootstrapped,merlian/django-admin-bootstrapped,jmagnusson/django-admin-bootstrapped,askinteractive/mezzanine-advanced-admin,bformet/django-admin-bootstrapped,Corner1024/django-admin-bootstrapped,sn0wolf/django-admin-bootstrapped,IMAmuseum/django-admin-bootstrapped,merlian/django-admin-bootstrapped,NoodleEducation/django-admin-bootstrapped,sn0wolf/django-admin-bootstrapped,squallcs12/django-admin-bootstrapped,avara1986/django-admin-bootstrapped,andrewyager/django-admin-bootstrapped,andrewyager/django-admin-bootstrapped,pombredanne/django-admin-bootstrapped,xrmx/django-admin-bootstrapped,pombredanne/django-admin-bootstrapped,benthomasson/django-admin-bootstrapped,squallcs12/django-admin-bootstrapped,django-admin-bootstrapped/django-admin-bootstrapped,askinteractive/mezzanine-advanced-admin-new,merlian/django-admin-bootstrapped,squallcs12/django-admin-bootstrapped,kevingu1003/django-admin-bootstrapped,mynksngh/django-admin-bootstrapped,askinteractive/mezzanine-advanced-admin-new,bformet/django-admin-bootstrapped,NoodleEducation/django-admin-bootstrapped,IMAmuseum/django-admin-bootstrapped,kevingu1003/django-admin-bootstrapped,django-admin-bootstrapped/django-admin-bootstrapped,django-admin-bootstrapped/django-admin-bootstrapped,benthomasson/django-admin-bootstrapped,IMAmuseum/django-admin-bootstrapped,jmagnusson/django-admin-bootstrapped,mynksngh/django-admin-bootstrapped,benthomasson/django-admin-bootstrapped,xrmx/django-admin-bootstrapped,bformet/django-admin-bootstrapped,sn0wolf/django-admin-bootstrapped,Corner1024/django-admin-bootstrapped,askinteractive/mezzanine-advanced-admin,askinteractive/mezzanine-advanced-admin,NoodleEducation/django-admin-bootstrapped,mynksngh/django-admin-bootstrapped,pombredanne/django-admin-bootstrapped
from django import template from django.template.loader import render_to_string, TemplateDoesNotExist register = template.Library() @register.simple_tag(takes_context=True) def render_with_template_if_exist(context, template, fallback): text = fallback try: text = render_to_string(template, context) except: pass return text @register.simple_tag(takes_context=True) def language_selector(context): """ displays a language selector dropdown in the admin, based on Django "LANGUAGES" context. requires: * USE_I18N = True / settings.py * LANGUAGES specified / settings.py (otherwise all Django locales will be displayed) * "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view) """ output = "" from django.conf import settings i18 = getattr(settings, 'USE_I18N', False) if i18: template = "admin/language_selector.html" context['i18n_is_set'] = True try: output = render_to_string(template, context) except: pass return output @register.filter(name='column_width') def column_width(value): return 12/len(list(value))Fix column_width filter in python3 Force integer division otherwise we'll fsck bootstrap classes As seen here: https://gist.github.com/ScreenDriver/86a812b7b3f891fe8649#file-broken_fieldsets
from django import template from django.template.loader import render_to_string, TemplateDoesNotExist register = template.Library() @register.simple_tag(takes_context=True) def render_with_template_if_exist(context, template, fallback): text = fallback try: text = render_to_string(template, context) except: pass return text @register.simple_tag(takes_context=True) def language_selector(context): """ displays a language selector dropdown in the admin, based on Django "LANGUAGES" context. requires: * USE_I18N = True / settings.py * LANGUAGES specified / settings.py (otherwise all Django locales will be displayed) * "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view) """ output = "" from django.conf import settings i18 = getattr(settings, 'USE_I18N', False) if i18: template = "admin/language_selector.html" context['i18n_is_set'] = True try: output = render_to_string(template, context) except: pass return output @register.filter(name='column_width') def column_width(value): return 12 // len(list(value))
<commit_before>from django import template from django.template.loader import render_to_string, TemplateDoesNotExist register = template.Library() @register.simple_tag(takes_context=True) def render_with_template_if_exist(context, template, fallback): text = fallback try: text = render_to_string(template, context) except: pass return text @register.simple_tag(takes_context=True) def language_selector(context): """ displays a language selector dropdown in the admin, based on Django "LANGUAGES" context. requires: * USE_I18N = True / settings.py * LANGUAGES specified / settings.py (otherwise all Django locales will be displayed) * "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view) """ output = "" from django.conf import settings i18 = getattr(settings, 'USE_I18N', False) if i18: template = "admin/language_selector.html" context['i18n_is_set'] = True try: output = render_to_string(template, context) except: pass return output @register.filter(name='column_width') def column_width(value): return 12/len(list(value))<commit_msg>Fix column_width filter in python3 Force integer division otherwise we'll fsck bootstrap classes As seen here: https://gist.github.com/ScreenDriver/86a812b7b3f891fe8649#file-broken_fieldsets<commit_after>
from django import template from django.template.loader import render_to_string, TemplateDoesNotExist register = template.Library() @register.simple_tag(takes_context=True) def render_with_template_if_exist(context, template, fallback): text = fallback try: text = render_to_string(template, context) except: pass return text @register.simple_tag(takes_context=True) def language_selector(context): """ displays a language selector dropdown in the admin, based on Django "LANGUAGES" context. requires: * USE_I18N = True / settings.py * LANGUAGES specified / settings.py (otherwise all Django locales will be displayed) * "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view) """ output = "" from django.conf import settings i18 = getattr(settings, 'USE_I18N', False) if i18: template = "admin/language_selector.html" context['i18n_is_set'] = True try: output = render_to_string(template, context) except: pass return output @register.filter(name='column_width') def column_width(value): return 12 // len(list(value))
from django import template from django.template.loader import render_to_string, TemplateDoesNotExist register = template.Library() @register.simple_tag(takes_context=True) def render_with_template_if_exist(context, template, fallback): text = fallback try: text = render_to_string(template, context) except: pass return text @register.simple_tag(takes_context=True) def language_selector(context): """ displays a language selector dropdown in the admin, based on Django "LANGUAGES" context. requires: * USE_I18N = True / settings.py * LANGUAGES specified / settings.py (otherwise all Django locales will be displayed) * "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view) """ output = "" from django.conf import settings i18 = getattr(settings, 'USE_I18N', False) if i18: template = "admin/language_selector.html" context['i18n_is_set'] = True try: output = render_to_string(template, context) except: pass return output @register.filter(name='column_width') def column_width(value): return 12/len(list(value))Fix column_width filter in python3 Force integer division otherwise we'll fsck bootstrap classes As seen here: https://gist.github.com/ScreenDriver/86a812b7b3f891fe8649#file-broken_fieldsetsfrom django import template from django.template.loader import render_to_string, TemplateDoesNotExist register = template.Library() @register.simple_tag(takes_context=True) def render_with_template_if_exist(context, template, fallback): text = fallback try: text = render_to_string(template, context) except: pass return text @register.simple_tag(takes_context=True) def language_selector(context): """ displays a language selector dropdown in the admin, based on Django "LANGUAGES" context. requires: * USE_I18N = True / settings.py * LANGUAGES specified / settings.py (otherwise all Django locales will be displayed) * "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view) """ output = "" from django.conf import settings i18 = getattr(settings, 'USE_I18N', False) if i18: template = "admin/language_selector.html" context['i18n_is_set'] = True try: output = render_to_string(template, context) except: pass return output @register.filter(name='column_width') def column_width(value): return 12 // len(list(value))
<commit_before>from django import template from django.template.loader import render_to_string, TemplateDoesNotExist register = template.Library() @register.simple_tag(takes_context=True) def render_with_template_if_exist(context, template, fallback): text = fallback try: text = render_to_string(template, context) except: pass return text @register.simple_tag(takes_context=True) def language_selector(context): """ displays a language selector dropdown in the admin, based on Django "LANGUAGES" context. requires: * USE_I18N = True / settings.py * LANGUAGES specified / settings.py (otherwise all Django locales will be displayed) * "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view) """ output = "" from django.conf import settings i18 = getattr(settings, 'USE_I18N', False) if i18: template = "admin/language_selector.html" context['i18n_is_set'] = True try: output = render_to_string(template, context) except: pass return output @register.filter(name='column_width') def column_width(value): return 12/len(list(value))<commit_msg>Fix column_width filter in python3 Force integer division otherwise we'll fsck bootstrap classes As seen here: https://gist.github.com/ScreenDriver/86a812b7b3f891fe8649#file-broken_fieldsets<commit_after>from django import template from django.template.loader import render_to_string, TemplateDoesNotExist register = template.Library() @register.simple_tag(takes_context=True) def render_with_template_if_exist(context, template, fallback): text = fallback try: text = render_to_string(template, context) except: pass return text @register.simple_tag(takes_context=True) def language_selector(context): """ displays a language selector dropdown in the admin, based on Django "LANGUAGES" context. requires: * USE_I18N = True / settings.py * LANGUAGES specified / settings.py (otherwise all Django locales will be displayed) * "set_language" url configured (see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#the-set-language-redirect-view) """ output = "" from django.conf import settings i18 = getattr(settings, 'USE_I18N', False) if i18: template = "admin/language_selector.html" context['i18n_is_set'] = True try: output = render_to_string(template, context) except: pass return output @register.filter(name='column_width') def column_width(value): return 12 // len(list(value))
055ec832969ed5c875ec7d21320ff344df7956a1
sirius/__init__.py
sirius/__init__.py
import os as _os from . import LI_V00 from . import BO_V901 from . import SI_V07 from . import TI_V00 from . import TS_V500 from . import TB_V300 with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V500', 'TB_V300'] li = LI_V00 tb = TB_V300 bo = BO_V901 ts = TS_V500 si = SI_V07 ti = TI_V00
import os as _os from . import LI_V00 from . import BO_V901 from . import SI_V07 from . import TI_V00 from . import TS_V400 from . import TB_V300 with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V400', 'TB_V300'] li = LI_V00 tb = TB_V300 bo = BO_V901 ts = TS_V400 si = SI_V07 ti = TI_V00
Return TS to V400 for release
Return TS to V400 for release
Python
mit
lnls-fac/sirius
import os as _os from . import LI_V00 from . import BO_V901 from . import SI_V07 from . import TI_V00 from . import TS_V500 from . import TB_V300 with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V500', 'TB_V300'] li = LI_V00 tb = TB_V300 bo = BO_V901 ts = TS_V500 si = SI_V07 ti = TI_V00 Return TS to V400 for release
import os as _os from . import LI_V00 from . import BO_V901 from . import SI_V07 from . import TI_V00 from . import TS_V400 from . import TB_V300 with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V400', 'TB_V300'] li = LI_V00 tb = TB_V300 bo = BO_V901 ts = TS_V400 si = SI_V07 ti = TI_V00
<commit_before>import os as _os from . import LI_V00 from . import BO_V901 from . import SI_V07 from . import TI_V00 from . import TS_V500 from . import TB_V300 with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V500', 'TB_V300'] li = LI_V00 tb = TB_V300 bo = BO_V901 ts = TS_V500 si = SI_V07 ti = TI_V00 <commit_msg>Return TS to V400 for release<commit_after>
import os as _os from . import LI_V00 from . import BO_V901 from . import SI_V07 from . import TI_V00 from . import TS_V400 from . import TB_V300 with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V400', 'TB_V300'] li = LI_V00 tb = TB_V300 bo = BO_V901 ts = TS_V400 si = SI_V07 ti = TI_V00
import os as _os from . import LI_V00 from . import BO_V901 from . import SI_V07 from . import TI_V00 from . import TS_V500 from . import TB_V300 with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V500', 'TB_V300'] li = LI_V00 tb = TB_V300 bo = BO_V901 ts = TS_V500 si = SI_V07 ti = TI_V00 Return TS to V400 for releaseimport os as _os from . import LI_V00 from . import BO_V901 from . import SI_V07 from . import TI_V00 from . import TS_V400 from . import TB_V300 with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V400', 'TB_V300'] li = LI_V00 tb = TB_V300 bo = BO_V901 ts = TS_V400 si = SI_V07 ti = TI_V00
<commit_before>import os as _os from . import LI_V00 from . import BO_V901 from . import SI_V07 from . import TI_V00 from . import TS_V500 from . import TB_V300 with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V500', 'TB_V300'] li = LI_V00 tb = TB_V300 bo = BO_V901 ts = TS_V500 si = SI_V07 ti = TI_V00 <commit_msg>Return TS to V400 for release<commit_after>import os as _os from . import LI_V00 from . import BO_V901 from . import SI_V07 from . import TI_V00 from . import TS_V400 from . import TB_V300 with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f: __version__ = _f.read().strip() __all__ = ['LI_V00', 'BO_V901', 'SI_V07', 'TI_V00', 'TS_V400', 'TB_V300'] li = LI_V00 tb = TB_V300 bo = BO_V901 ts = TS_V400 si = SI_V07 ti = TI_V00
156b7363ff51532cddbb8ce1c7a5e6b8a3c7cc0a
accounts/tests/test_views.py
accounts/tests/test_views.py
"""accounts app unittests for views """ from django.test import TestCase class WelcomePageTest(TestCase): """Tests relating to the welcome_page view. """ def test_uses_welcome_template(self): """The root url should response with the welcome page template. """ response = self.client.get('/') self.assertTemplateUsed(response, 'accounts/welcome.html')
"""accounts app unittests for views """ from django.test import TestCase from django.urls import reverse class WelcomePageTest(TestCase): """Tests relating to the welcome_page view. """ def test_uses_welcome_template(self): """The root url should respond with the welcome page template. """ response = self.client.get('/') self.assertTemplateUsed(response, 'accounts/welcome.html') class SendLoginEmailTest(TestCase): """Tests for the view which sends the login email. """ def setUp(self): self.url = reverse('send_login_email') self.test_email = 'newvisitor@example.com' def test_uses_emailsent_template(self): """The send_login_email url responds with login_email_sent template. """ response = self.client.post(self.url, data={'email': self.test_email}) self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
Add test for send login email view
Add test for send login email view
Python
mit
randomic/aniauth-tdd,randomic/aniauth-tdd
"""accounts app unittests for views """ from django.test import TestCase class WelcomePageTest(TestCase): """Tests relating to the welcome_page view. """ def test_uses_welcome_template(self): """The root url should response with the welcome page template. """ response = self.client.get('/') self.assertTemplateUsed(response, 'accounts/welcome.html') Add test for send login email view
"""accounts app unittests for views """ from django.test import TestCase from django.urls import reverse class WelcomePageTest(TestCase): """Tests relating to the welcome_page view. """ def test_uses_welcome_template(self): """The root url should respond with the welcome page template. """ response = self.client.get('/') self.assertTemplateUsed(response, 'accounts/welcome.html') class SendLoginEmailTest(TestCase): """Tests for the view which sends the login email. """ def setUp(self): self.url = reverse('send_login_email') self.test_email = 'newvisitor@example.com' def test_uses_emailsent_template(self): """The send_login_email url responds with login_email_sent template. """ response = self.client.post(self.url, data={'email': self.test_email}) self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
<commit_before>"""accounts app unittests for views """ from django.test import TestCase class WelcomePageTest(TestCase): """Tests relating to the welcome_page view. """ def test_uses_welcome_template(self): """The root url should response with the welcome page template. """ response = self.client.get('/') self.assertTemplateUsed(response, 'accounts/welcome.html') <commit_msg>Add test for send login email view<commit_after>
"""accounts app unittests for views """ from django.test import TestCase from django.urls import reverse class WelcomePageTest(TestCase): """Tests relating to the welcome_page view. """ def test_uses_welcome_template(self): """The root url should respond with the welcome page template. """ response = self.client.get('/') self.assertTemplateUsed(response, 'accounts/welcome.html') class SendLoginEmailTest(TestCase): """Tests for the view which sends the login email. """ def setUp(self): self.url = reverse('send_login_email') self.test_email = 'newvisitor@example.com' def test_uses_emailsent_template(self): """The send_login_email url responds with login_email_sent template. """ response = self.client.post(self.url, data={'email': self.test_email}) self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
"""accounts app unittests for views """ from django.test import TestCase class WelcomePageTest(TestCase): """Tests relating to the welcome_page view. """ def test_uses_welcome_template(self): """The root url should response with the welcome page template. """ response = self.client.get('/') self.assertTemplateUsed(response, 'accounts/welcome.html') Add test for send login email view"""accounts app unittests for views """ from django.test import TestCase from django.urls import reverse class WelcomePageTest(TestCase): """Tests relating to the welcome_page view. """ def test_uses_welcome_template(self): """The root url should respond with the welcome page template. """ response = self.client.get('/') self.assertTemplateUsed(response, 'accounts/welcome.html') class SendLoginEmailTest(TestCase): """Tests for the view which sends the login email. """ def setUp(self): self.url = reverse('send_login_email') self.test_email = 'newvisitor@example.com' def test_uses_emailsent_template(self): """The send_login_email url responds with login_email_sent template. """ response = self.client.post(self.url, data={'email': self.test_email}) self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
<commit_before>"""accounts app unittests for views """ from django.test import TestCase class WelcomePageTest(TestCase): """Tests relating to the welcome_page view. """ def test_uses_welcome_template(self): """The root url should response with the welcome page template. """ response = self.client.get('/') self.assertTemplateUsed(response, 'accounts/welcome.html') <commit_msg>Add test for send login email view<commit_after>"""accounts app unittests for views """ from django.test import TestCase from django.urls import reverse class WelcomePageTest(TestCase): """Tests relating to the welcome_page view. """ def test_uses_welcome_template(self): """The root url should respond with the welcome page template. """ response = self.client.get('/') self.assertTemplateUsed(response, 'accounts/welcome.html') class SendLoginEmailTest(TestCase): """Tests for the view which sends the login email. """ def setUp(self): self.url = reverse('send_login_email') self.test_email = 'newvisitor@example.com' def test_uses_emailsent_template(self): """The send_login_email url responds with login_email_sent template. """ response = self.client.post(self.url, data={'email': self.test_email}) self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
000c583cc9f8eec4b0904669dd98d98d8c7df8d7
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages from imp import load_source setup( name='cmis', version=load_source('', 'cmis/_version.py').__version__, description='A server architecture built on top of a solid foundation ' 'provided by flask, sqlalchemy, and various extensions.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], author='Concordus Applications', author_email='support@concordusapps.com', url='http://github.com/concordusapps/alchemist', packages=find_packages('.'), entry_points={'pytest11': ['alchemist = alchemist.plugin']}, dependency_links=[ 'git+git://github.com/concordusapps/python-cmislib.git@master' '#egg=cmislib-dev', ], install_requires=[ "cmislib == dev" ], )
#!/usr/bin/env python from setuptools import setup, find_packages from imp import load_source setup( name='cmis', version=load_source('', 'cmis/_version.py').__version__, description='A server architecture built on top of a solid foundation ' 'provided by flask, sqlalchemy, and various extensions.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], author='Concordus Applications', author_email='support@concordusapps.com', url='http://github.com/concordusapps/alchemist', packages=find_packages('.'), entry_points={'pytest11': ['alchemist = alchemist.plugin']}, dependency_links=[ 'git+git://github.com/concordusapps/python-cmislib.git@topics/py3k' '#egg=cmislib-dev', ], install_requires=[ "cmislib == dev" ], )
Fix cmislib branch for py3k
Fix cmislib branch for py3k
Python
mit
concordusapps/python-cmis
#!/usr/bin/env python from setuptools import setup, find_packages from imp import load_source setup( name='cmis', version=load_source('', 'cmis/_version.py').__version__, description='A server architecture built on top of a solid foundation ' 'provided by flask, sqlalchemy, and various extensions.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], author='Concordus Applications', author_email='support@concordusapps.com', url='http://github.com/concordusapps/alchemist', packages=find_packages('.'), entry_points={'pytest11': ['alchemist = alchemist.plugin']}, dependency_links=[ 'git+git://github.com/concordusapps/python-cmislib.git@master' '#egg=cmislib-dev', ], install_requires=[ "cmislib == dev" ], ) Fix cmislib branch for py3k
#!/usr/bin/env python from setuptools import setup, find_packages from imp import load_source setup( name='cmis', version=load_source('', 'cmis/_version.py').__version__, description='A server architecture built on top of a solid foundation ' 'provided by flask, sqlalchemy, and various extensions.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], author='Concordus Applications', author_email='support@concordusapps.com', url='http://github.com/concordusapps/alchemist', packages=find_packages('.'), entry_points={'pytest11': ['alchemist = alchemist.plugin']}, dependency_links=[ 'git+git://github.com/concordusapps/python-cmislib.git@topics/py3k' '#egg=cmislib-dev', ], install_requires=[ "cmislib == dev" ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages from imp import load_source setup( name='cmis', version=load_source('', 'cmis/_version.py').__version__, description='A server architecture built on top of a solid foundation ' 'provided by flask, sqlalchemy, and various extensions.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], author='Concordus Applications', author_email='support@concordusapps.com', url='http://github.com/concordusapps/alchemist', packages=find_packages('.'), entry_points={'pytest11': ['alchemist = alchemist.plugin']}, dependency_links=[ 'git+git://github.com/concordusapps/python-cmislib.git@master' '#egg=cmislib-dev', ], install_requires=[ "cmislib == dev" ], ) <commit_msg>Fix cmislib branch for py3k<commit_after>
#!/usr/bin/env python from setuptools import setup, find_packages from imp import load_source setup( name='cmis', version=load_source('', 'cmis/_version.py').__version__, description='A server architecture built on top of a solid foundation ' 'provided by flask, sqlalchemy, and various extensions.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], author='Concordus Applications', author_email='support@concordusapps.com', url='http://github.com/concordusapps/alchemist', packages=find_packages('.'), entry_points={'pytest11': ['alchemist = alchemist.plugin']}, dependency_links=[ 'git+git://github.com/concordusapps/python-cmislib.git@topics/py3k' '#egg=cmislib-dev', ], install_requires=[ "cmislib == dev" ], )
#!/usr/bin/env python from setuptools import setup, find_packages from imp import load_source setup( name='cmis', version=load_source('', 'cmis/_version.py').__version__, description='A server architecture built on top of a solid foundation ' 'provided by flask, sqlalchemy, and various extensions.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], author='Concordus Applications', author_email='support@concordusapps.com', url='http://github.com/concordusapps/alchemist', packages=find_packages('.'), entry_points={'pytest11': ['alchemist = alchemist.plugin']}, dependency_links=[ 'git+git://github.com/concordusapps/python-cmislib.git@master' '#egg=cmislib-dev', ], install_requires=[ "cmislib == dev" ], ) Fix cmislib branch for py3k#!/usr/bin/env python from setuptools import setup, find_packages from imp import load_source setup( name='cmis', version=load_source('', 'cmis/_version.py').__version__, description='A server architecture built on top of a solid foundation ' 'provided by flask, sqlalchemy, and various extensions.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], author='Concordus Applications', author_email='support@concordusapps.com', url='http://github.com/concordusapps/alchemist', packages=find_packages('.'), entry_points={'pytest11': ['alchemist = alchemist.plugin']}, dependency_links=[ 'git+git://github.com/concordusapps/python-cmislib.git@topics/py3k' '#egg=cmislib-dev', ], install_requires=[ "cmislib == dev" ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages from imp import load_source setup( name='cmis', version=load_source('', 'cmis/_version.py').__version__, description='A server architecture built on top of a solid foundation ' 'provided by flask, sqlalchemy, and various extensions.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], author='Concordus Applications', author_email='support@concordusapps.com', url='http://github.com/concordusapps/alchemist', packages=find_packages('.'), entry_points={'pytest11': ['alchemist = alchemist.plugin']}, dependency_links=[ 'git+git://github.com/concordusapps/python-cmislib.git@master' '#egg=cmislib-dev', ], install_requires=[ "cmislib == dev" ], ) <commit_msg>Fix cmislib branch for py3k<commit_after>#!/usr/bin/env python from setuptools import setup, find_packages from imp import load_source setup( name='cmis', version=load_source('', 'cmis/_version.py').__version__, description='A server architecture built on top of a solid foundation ' 'provided by flask, sqlalchemy, and various extensions.', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities', ], author='Concordus Applications', author_email='support@concordusapps.com', url='http://github.com/concordusapps/alchemist', packages=find_packages('.'), entry_points={'pytest11': ['alchemist = alchemist.plugin']}, dependency_links=[ 'git+git://github.com/concordusapps/python-cmislib.git@topics/py3k' '#egg=cmislib-dev', ], install_requires=[ "cmislib == dev" ], )
e771aa55c3644b3e405dd53dfc72235de4d37109
setup.py
setup.py
from setuptools import setup, find_packages setup( name="elasticmagic", version="0.0.0a0", author="Alexander Koval", author_email="kovalidis@gmail.com", description=("Python orm for elasticsearch."), license="Apache License 2.0", keywords="elasticsearch dsl", url="https://github.com/anti-social/elasticmagic", packages=find_packages(exclude=["tests"]), install_requires=[ "elasticsearch>=6.0.0,<8.0", "python-dateutil", ], extras_require={ "geo": [ "python-geohash", ], "async": [ "elasticsearch-py-async", ], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", "Topic :: Software Development :: Libraries :: Python Modules", ], )
from setuptools import setup, find_packages setup( name="elasticmagic", version="0.0.0a0", author="Alexander Koval", author_email="kovalidis@gmail.com", description=("Python orm for elasticsearch."), license="Apache License 2.0", keywords="elasticsearch dsl", url="https://github.com/anti-social/elasticmagic", packages=find_packages(exclude=["tests"]), install_requires=[ "elasticsearch", "python-dateutil", ], extras_require={ "geo": [ "python-geohash", ], "async": [ "elasticsearch-py-async", ], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", "Topic :: Software Development :: Libraries :: Python Modules", ], )
Remove version bounds for elasticsearch dependency
Remove version bounds for elasticsearch dependency
Python
apache-2.0
anti-social/elasticmagic,anti-social/elasticmagic
from setuptools import setup, find_packages setup( name="elasticmagic", version="0.0.0a0", author="Alexander Koval", author_email="kovalidis@gmail.com", description=("Python orm for elasticsearch."), license="Apache License 2.0", keywords="elasticsearch dsl", url="https://github.com/anti-social/elasticmagic", packages=find_packages(exclude=["tests"]), install_requires=[ "elasticsearch>=6.0.0,<8.0", "python-dateutil", ], extras_require={ "geo": [ "python-geohash", ], "async": [ "elasticsearch-py-async", ], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", "Topic :: Software Development :: Libraries :: Python Modules", ], ) Remove version bounds for elasticsearch dependency
from setuptools import setup, find_packages setup( name="elasticmagic", version="0.0.0a0", author="Alexander Koval", author_email="kovalidis@gmail.com", description=("Python orm for elasticsearch."), license="Apache License 2.0", keywords="elasticsearch dsl", url="https://github.com/anti-social/elasticmagic", packages=find_packages(exclude=["tests"]), install_requires=[ "elasticsearch", "python-dateutil", ], extras_require={ "geo": [ "python-geohash", ], "async": [ "elasticsearch-py-async", ], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", "Topic :: Software Development :: Libraries :: Python Modules", ], )
<commit_before>from setuptools import setup, find_packages setup( name="elasticmagic", version="0.0.0a0", author="Alexander Koval", author_email="kovalidis@gmail.com", description=("Python orm for elasticsearch."), license="Apache License 2.0", keywords="elasticsearch dsl", url="https://github.com/anti-social/elasticmagic", packages=find_packages(exclude=["tests"]), install_requires=[ "elasticsearch>=6.0.0,<8.0", "python-dateutil", ], extras_require={ "geo": [ "python-geohash", ], "async": [ "elasticsearch-py-async", ], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", "Topic :: Software Development :: Libraries :: Python Modules", ], ) <commit_msg>Remove version bounds for elasticsearch dependency<commit_after>
from setuptools import setup, find_packages setup( name="elasticmagic", version="0.0.0a0", author="Alexander Koval", author_email="kovalidis@gmail.com", description=("Python orm for elasticsearch."), license="Apache License 2.0", keywords="elasticsearch dsl", url="https://github.com/anti-social/elasticmagic", packages=find_packages(exclude=["tests"]), install_requires=[ "elasticsearch", "python-dateutil", ], extras_require={ "geo": [ "python-geohash", ], "async": [ "elasticsearch-py-async", ], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", "Topic :: Software Development :: Libraries :: Python Modules", ], )
from setuptools import setup, find_packages setup( name="elasticmagic", version="0.0.0a0", author="Alexander Koval", author_email="kovalidis@gmail.com", description=("Python orm for elasticsearch."), license="Apache License 2.0", keywords="elasticsearch dsl", url="https://github.com/anti-social/elasticmagic", packages=find_packages(exclude=["tests"]), install_requires=[ "elasticsearch>=6.0.0,<8.0", "python-dateutil", ], extras_require={ "geo": [ "python-geohash", ], "async": [ "elasticsearch-py-async", ], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", "Topic :: Software Development :: Libraries :: Python Modules", ], ) Remove version bounds for elasticsearch dependencyfrom setuptools import setup, find_packages setup( name="elasticmagic", version="0.0.0a0", author="Alexander Koval", author_email="kovalidis@gmail.com", description=("Python orm for elasticsearch."), license="Apache License 2.0", keywords="elasticsearch dsl", url="https://github.com/anti-social/elasticmagic", packages=find_packages(exclude=["tests"]), install_requires=[ "elasticsearch", "python-dateutil", ], extras_require={ "geo": [ "python-geohash", ], "async": [ "elasticsearch-py-async", ], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", "Topic :: Software Development :: Libraries :: Python Modules", ], )
<commit_before>from setuptools import setup, find_packages setup( name="elasticmagic", version="0.0.0a0", author="Alexander Koval", author_email="kovalidis@gmail.com", description=("Python orm for elasticsearch."), license="Apache License 2.0", keywords="elasticsearch dsl", url="https://github.com/anti-social/elasticmagic", packages=find_packages(exclude=["tests"]), install_requires=[ "elasticsearch>=6.0.0,<8.0", "python-dateutil", ], extras_require={ "geo": [ "python-geohash", ], "async": [ "elasticsearch-py-async", ], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", "Topic :: Software Development :: Libraries :: Python Modules", ], ) <commit_msg>Remove version bounds for elasticsearch dependency<commit_after>from setuptools import setup, find_packages setup( name="elasticmagic", version="0.0.0a0", author="Alexander Koval", author_email="kovalidis@gmail.com", description=("Python orm for elasticsearch."), license="Apache License 2.0", keywords="elasticsearch dsl", url="https://github.com/anti-social/elasticmagic", packages=find_packages(exclude=["tests"]), install_requires=[ "elasticsearch", "python-dateutil", ], extras_require={ "geo": [ "python-geohash", ], "async": [ "elasticsearch-py-async", ], }, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", "Topic :: Software Development :: Libraries :: Python Modules", ], )
5d52ccca4be5cc08ecedf1063712a1fa917ccbc8
setup.py
setup.py
import os from setuptools import setup, find_packages cdir = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(cdir, 'readme.rst')).read() from keg_bouncer.version import VERSION setup( name='KegBouncer', version=VERSION, description='A three-tiered permissions model for KegElements built atop Flask-User', author='Level 12', author_email='devteam@level12.io', url='https://github.com/level12/keg-bouncer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=find_packages(exclude=['keg_bouncer_test_app*']), include_package_data=True, zip_safe=False, install_requires=[ 'Flask-Login', 'Keg', 'KegElements', 'cryptography', 'six', 'SQLAlchemy', 'wrapt', ], long_descripton=README, )
import os from setuptools import setup, find_packages cdir = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(cdir, 'readme.rst')).read() setup( name='KegBouncer', setup_requires=['setuptools_scm'], use_scm_version=True, description='A three-tiered permissions model for KegElements built atop Flask-User', long_description=README, author='Level 12', author_email='devteam@level12.io', url='https://github.com/level12/keg-bouncer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=find_packages(exclude=['keg_bouncer_test_app*']), include_package_data=True, zip_safe=False, install_requires=[ 'Flask-Login', 'Keg', 'KegElements', 'cryptography', 'six', 'SQLAlchemy', 'wrapt', ], )
Use git version tag for version
Use git version tag for version
Python
bsd-3-clause
level12/keg-bouncer,level12/keg-bouncer
import os from setuptools import setup, find_packages cdir = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(cdir, 'readme.rst')).read() from keg_bouncer.version import VERSION setup( name='KegBouncer', version=VERSION, description='A three-tiered permissions model for KegElements built atop Flask-User', author='Level 12', author_email='devteam@level12.io', url='https://github.com/level12/keg-bouncer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=find_packages(exclude=['keg_bouncer_test_app*']), include_package_data=True, zip_safe=False, install_requires=[ 'Flask-Login', 'Keg', 'KegElements', 'cryptography', 'six', 'SQLAlchemy', 'wrapt', ], long_descripton=README, ) Use git version tag for version
import os from setuptools import setup, find_packages cdir = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(cdir, 'readme.rst')).read() setup( name='KegBouncer', setup_requires=['setuptools_scm'], use_scm_version=True, description='A three-tiered permissions model for KegElements built atop Flask-User', long_description=README, author='Level 12', author_email='devteam@level12.io', url='https://github.com/level12/keg-bouncer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=find_packages(exclude=['keg_bouncer_test_app*']), include_package_data=True, zip_safe=False, install_requires=[ 'Flask-Login', 'Keg', 'KegElements', 'cryptography', 'six', 'SQLAlchemy', 'wrapt', ], )
<commit_before>import os from setuptools import setup, find_packages cdir = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(cdir, 'readme.rst')).read() from keg_bouncer.version import VERSION setup( name='KegBouncer', version=VERSION, description='A three-tiered permissions model for KegElements built atop Flask-User', author='Level 12', author_email='devteam@level12.io', url='https://github.com/level12/keg-bouncer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=find_packages(exclude=['keg_bouncer_test_app*']), include_package_data=True, zip_safe=False, install_requires=[ 'Flask-Login', 'Keg', 'KegElements', 'cryptography', 'six', 'SQLAlchemy', 'wrapt', ], long_descripton=README, ) <commit_msg>Use git version tag for version<commit_after>
import os from setuptools import setup, find_packages cdir = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(cdir, 'readme.rst')).read() setup( name='KegBouncer', setup_requires=['setuptools_scm'], use_scm_version=True, description='A three-tiered permissions model for KegElements built atop Flask-User', long_description=README, author='Level 12', author_email='devteam@level12.io', url='https://github.com/level12/keg-bouncer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=find_packages(exclude=['keg_bouncer_test_app*']), include_package_data=True, zip_safe=False, install_requires=[ 'Flask-Login', 'Keg', 'KegElements', 'cryptography', 'six', 'SQLAlchemy', 'wrapt', ], )
import os from setuptools import setup, find_packages cdir = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(cdir, 'readme.rst')).read() from keg_bouncer.version import VERSION setup( name='KegBouncer', version=VERSION, description='A three-tiered permissions model for KegElements built atop Flask-User', author='Level 12', author_email='devteam@level12.io', url='https://github.com/level12/keg-bouncer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=find_packages(exclude=['keg_bouncer_test_app*']), include_package_data=True, zip_safe=False, install_requires=[ 'Flask-Login', 'Keg', 'KegElements', 'cryptography', 'six', 'SQLAlchemy', 'wrapt', ], long_descripton=README, ) Use git version tag for versionimport os from setuptools import setup, find_packages cdir = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(cdir, 'readme.rst')).read() setup( name='KegBouncer', setup_requires=['setuptools_scm'], use_scm_version=True, description='A three-tiered permissions model for KegElements built atop Flask-User', long_description=README, author='Level 12', author_email='devteam@level12.io', url='https://github.com/level12/keg-bouncer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=find_packages(exclude=['keg_bouncer_test_app*']), include_package_data=True, zip_safe=False, install_requires=[ 'Flask-Login', 'Keg', 'KegElements', 'cryptography', 'six', 'SQLAlchemy', 'wrapt', ], )
<commit_before>import os from setuptools import setup, find_packages cdir = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(cdir, 'readme.rst')).read() from keg_bouncer.version import VERSION setup( name='KegBouncer', version=VERSION, description='A three-tiered permissions model for KegElements built atop Flask-User', author='Level 12', author_email='devteam@level12.io', url='https://github.com/level12/keg-bouncer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=find_packages(exclude=['keg_bouncer_test_app*']), include_package_data=True, zip_safe=False, install_requires=[ 'Flask-Login', 'Keg', 'KegElements', 'cryptography', 'six', 'SQLAlchemy', 'wrapt', ], long_descripton=README, ) <commit_msg>Use git version tag for version<commit_after>import os from setuptools import setup, find_packages cdir = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(cdir, 'readme.rst')).read() setup( name='KegBouncer', setup_requires=['setuptools_scm'], use_scm_version=True, description='A three-tiered permissions model for KegElements built atop Flask-User', long_description=README, author='Level 12', author_email='devteam@level12.io', url='https://github.com/level12/keg-bouncer', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=find_packages(exclude=['keg_bouncer_test_app*']), include_package_data=True, zip_safe=False, install_requires=[ 'Flask-Login', 'Keg', 'KegElements', 'cryptography', 'six', 'SQLAlchemy', 'wrapt', ], )
5fcd68e1088a4873abea4f3fa06fbc34dbc677ff
setup.py
setup.py
#!/usr/bin/env python from ez_setup import use_setuptools use_setuptools() from setuptools import setup import re main_py = open('flatcat/__init__.py').read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py)) requires = [ 'morfessor', ] setup(name='Morfessor FlatCat', version=metadata['version'], author=metadata['author'], author_email='morfessor@cis.hut.fi', url='http://www.cis.hut.fi/projects/morpho/', description='Morfessor FlatCat', packages=['flatcat', 'flatcat.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], license="BSD", scripts=['scripts/flatcat', 'scripts/flatcat-train', 'scripts/flatcat-segment', 'scripts/flatcat-diagnostics', 'scripts/flatcat-reformat' ], install_requires=requires, #extras_require={ # 'docs': [l.strip() for l in open('docs/build_requirements.txt')] #} )
#!/usr/bin/env python from ez_setup import use_setuptools use_setuptools() from setuptools import setup import re main_py = open('flatcat/__init__.py').read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py)) requires = [ 'morfessor', ] setup(name='Morfessor FlatCat', version=metadata['version'], author=metadata['author'], author_email='morfessor@cis.hut.fi', url='http://www.cis.hut.fi/projects/morpho/', description='Morfessor FlatCat', packages=['flatcat', 'flatcat.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], license="BSD", scripts=['scripts/flatcat', 'scripts/flatcat-train', 'scripts/flatcat-segment', 'scripts/flatcat-diagnostics', 'scripts/flatcat-reformat' ], install_requires=requires, extras_require={ 'docs': [l.strip() for l in open('docs/build_requirements.txt')] } )
Revert "Fix to improperly disabled docs"
Revert "Fix to improperly disabled docs" This reverts commit 8bc704f6272ccfebd48f7282e02420a56d8e934d.
Python
bsd-2-clause
aalto-speech/flatcat
#!/usr/bin/env python from ez_setup import use_setuptools use_setuptools() from setuptools import setup import re main_py = open('flatcat/__init__.py').read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py)) requires = [ 'morfessor', ] setup(name='Morfessor FlatCat', version=metadata['version'], author=metadata['author'], author_email='morfessor@cis.hut.fi', url='http://www.cis.hut.fi/projects/morpho/', description='Morfessor FlatCat', packages=['flatcat', 'flatcat.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], license="BSD", scripts=['scripts/flatcat', 'scripts/flatcat-train', 'scripts/flatcat-segment', 'scripts/flatcat-diagnostics', 'scripts/flatcat-reformat' ], install_requires=requires, #extras_require={ # 'docs': [l.strip() for l in open('docs/build_requirements.txt')] #} ) Revert "Fix to improperly disabled docs" This reverts commit 8bc704f6272ccfebd48f7282e02420a56d8e934d.
#!/usr/bin/env python from ez_setup import use_setuptools use_setuptools() from setuptools import setup import re main_py = open('flatcat/__init__.py').read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py)) requires = [ 'morfessor', ] setup(name='Morfessor FlatCat', version=metadata['version'], author=metadata['author'], author_email='morfessor@cis.hut.fi', url='http://www.cis.hut.fi/projects/morpho/', description='Morfessor FlatCat', packages=['flatcat', 'flatcat.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], license="BSD", scripts=['scripts/flatcat', 'scripts/flatcat-train', 'scripts/flatcat-segment', 'scripts/flatcat-diagnostics', 'scripts/flatcat-reformat' ], install_requires=requires, extras_require={ 'docs': [l.strip() for l in open('docs/build_requirements.txt')] } )
<commit_before>#!/usr/bin/env python from ez_setup import use_setuptools use_setuptools() from setuptools import setup import re main_py = open('flatcat/__init__.py').read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py)) requires = [ 'morfessor', ] setup(name='Morfessor FlatCat', version=metadata['version'], author=metadata['author'], author_email='morfessor@cis.hut.fi', url='http://www.cis.hut.fi/projects/morpho/', description='Morfessor FlatCat', packages=['flatcat', 'flatcat.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], license="BSD", scripts=['scripts/flatcat', 'scripts/flatcat-train', 'scripts/flatcat-segment', 'scripts/flatcat-diagnostics', 'scripts/flatcat-reformat' ], install_requires=requires, #extras_require={ # 'docs': [l.strip() for l in open('docs/build_requirements.txt')] #} ) <commit_msg>Revert "Fix to improperly disabled docs" This reverts commit 8bc704f6272ccfebd48f7282e02420a56d8e934d.<commit_after>
#!/usr/bin/env python from ez_setup import use_setuptools use_setuptools() from setuptools import setup import re main_py = open('flatcat/__init__.py').read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py)) requires = [ 'morfessor', ] setup(name='Morfessor FlatCat', version=metadata['version'], author=metadata['author'], author_email='morfessor@cis.hut.fi', url='http://www.cis.hut.fi/projects/morpho/', description='Morfessor FlatCat', packages=['flatcat', 'flatcat.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], license="BSD", scripts=['scripts/flatcat', 'scripts/flatcat-train', 'scripts/flatcat-segment', 'scripts/flatcat-diagnostics', 'scripts/flatcat-reformat' ], install_requires=requires, extras_require={ 'docs': [l.strip() for l in open('docs/build_requirements.txt')] } )
#!/usr/bin/env python from ez_setup import use_setuptools use_setuptools() from setuptools import setup import re main_py = open('flatcat/__init__.py').read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py)) requires = [ 'morfessor', ] setup(name='Morfessor FlatCat', version=metadata['version'], author=metadata['author'], author_email='morfessor@cis.hut.fi', url='http://www.cis.hut.fi/projects/morpho/', description='Morfessor FlatCat', packages=['flatcat', 'flatcat.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], license="BSD", scripts=['scripts/flatcat', 'scripts/flatcat-train', 'scripts/flatcat-segment', 'scripts/flatcat-diagnostics', 'scripts/flatcat-reformat' ], install_requires=requires, #extras_require={ # 'docs': [l.strip() for l in open('docs/build_requirements.txt')] #} ) Revert "Fix to improperly disabled docs" This reverts commit 8bc704f6272ccfebd48f7282e02420a56d8e934d.#!/usr/bin/env python from ez_setup import use_setuptools use_setuptools() from setuptools import setup import re main_py = open('flatcat/__init__.py').read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py)) requires = [ 'morfessor', ] setup(name='Morfessor FlatCat', version=metadata['version'], author=metadata['author'], author_email='morfessor@cis.hut.fi', url='http://www.cis.hut.fi/projects/morpho/', description='Morfessor FlatCat', packages=['flatcat', 'flatcat.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], license="BSD", scripts=['scripts/flatcat', 'scripts/flatcat-train', 'scripts/flatcat-segment', 'scripts/flatcat-diagnostics', 'scripts/flatcat-reformat' ], install_requires=requires, extras_require={ 'docs': [l.strip() for l in open('docs/build_requirements.txt')] } )
<commit_before>#!/usr/bin/env python from ez_setup import use_setuptools use_setuptools() from setuptools import setup import re main_py = open('flatcat/__init__.py').read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py)) requires = [ 'morfessor', ] setup(name='Morfessor FlatCat', version=metadata['version'], author=metadata['author'], author_email='morfessor@cis.hut.fi', url='http://www.cis.hut.fi/projects/morpho/', description='Morfessor FlatCat', packages=['flatcat', 'flatcat.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], license="BSD", scripts=['scripts/flatcat', 'scripts/flatcat-train', 'scripts/flatcat-segment', 'scripts/flatcat-diagnostics', 'scripts/flatcat-reformat' ], install_requires=requires, #extras_require={ # 'docs': [l.strip() for l in open('docs/build_requirements.txt')] #} ) <commit_msg>Revert "Fix to improperly disabled docs" This reverts commit 8bc704f6272ccfebd48f7282e02420a56d8e934d.<commit_after>#!/usr/bin/env python from ez_setup import use_setuptools use_setuptools() from setuptools import setup import re main_py = open('flatcat/__init__.py').read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", main_py)) requires = [ 'morfessor', ] setup(name='Morfessor FlatCat', version=metadata['version'], author=metadata['author'], author_email='morfessor@cis.hut.fi', url='http://www.cis.hut.fi/projects/morpho/', description='Morfessor FlatCat', packages=['flatcat', 'flatcat.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', ], license="BSD", scripts=['scripts/flatcat', 'scripts/flatcat-train', 'scripts/flatcat-segment', 'scripts/flatcat-diagnostics', 'scripts/flatcat-reformat' ], install_requires=requires, extras_require={ 'docs': [l.strip() for l in open('docs/build_requirements.txt')] } )
d78270bd415988180f11413d739086e560516464
setup.py
setup.py
from __future__ import print_function, absolute_import, division from ast import literal_eval from setuptools import setup def get_version(source='petl/__init__.py'): with open(source) as f: for line in f: if line.startswith('__version__'): return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") setup( name='petl', version=get_version(), author='Alistair Miles', author_email='alimanfoo@googlemail.com', package_dir={'': '.'}, packages=['petl', 'petl.io', 'petl.transform', 'petl.util', 'petl.test', 'petl.test.io', 'petl.test.transform', 'petl.test.util'], scripts=['bin/petl'], url='https://github.com/alimanfoo/petl', license='MIT License', description='A Python package for extracting, transforming and loading ' 'tables of data.', long_description=open('README.txt').read(), classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
from __future__ import print_function, absolute_import, division from ast import literal_eval from setuptools import setup def get_version(source='petl/__init__.py'): with open(source) as f: for line in f: if line.startswith('__version__'): return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") setup( name='petl', version=get_version(), author='Alistair Miles', author_email='alimanfoo@googlemail.com', package_dir={'': '.'}, packages=['petl', 'petl.io', 'petl.transform', 'petl.util', 'petl.test', 'petl.test.io', 'petl.test.transform', 'petl.test.util'], scripts=['bin/petl'], url='https://github.com/alimanfoo/petl', license='MIT License', description='A Python package for extracting, transforming and loading ' 'tables of data.', long_description=open('README.txt').read(), python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
Add python_requires to help pip
Add python_requires to help pip
Python
mit
alimanfoo/petl,psnj/petl,Marketing1by1/petl
from __future__ import print_function, absolute_import, division from ast import literal_eval from setuptools import setup def get_version(source='petl/__init__.py'): with open(source) as f: for line in f: if line.startswith('__version__'): return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") setup( name='petl', version=get_version(), author='Alistair Miles', author_email='alimanfoo@googlemail.com', package_dir={'': '.'}, packages=['petl', 'petl.io', 'petl.transform', 'petl.util', 'petl.test', 'petl.test.io', 'petl.test.transform', 'petl.test.util'], scripts=['bin/petl'], url='https://github.com/alimanfoo/petl', license='MIT License', description='A Python package for extracting, transforming and loading ' 'tables of data.', long_description=open('README.txt').read(), classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules' ] ) Add python_requires to help pip
from __future__ import print_function, absolute_import, division from ast import literal_eval from setuptools import setup def get_version(source='petl/__init__.py'): with open(source) as f: for line in f: if line.startswith('__version__'): return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") setup( name='petl', version=get_version(), author='Alistair Miles', author_email='alimanfoo@googlemail.com', package_dir={'': '.'}, packages=['petl', 'petl.io', 'petl.transform', 'petl.util', 'petl.test', 'petl.test.io', 'petl.test.transform', 'petl.test.util'], scripts=['bin/petl'], url='https://github.com/alimanfoo/petl', license='MIT License', description='A Python package for extracting, transforming and loading ' 'tables of data.', long_description=open('README.txt').read(), python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
<commit_before>from __future__ import print_function, absolute_import, division from ast import literal_eval from setuptools import setup def get_version(source='petl/__init__.py'): with open(source) as f: for line in f: if line.startswith('__version__'): return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") setup( name='petl', version=get_version(), author='Alistair Miles', author_email='alimanfoo@googlemail.com', package_dir={'': '.'}, packages=['petl', 'petl.io', 'petl.transform', 'petl.util', 'petl.test', 'petl.test.io', 'petl.test.transform', 'petl.test.util'], scripts=['bin/petl'], url='https://github.com/alimanfoo/petl', license='MIT License', description='A Python package for extracting, transforming and loading ' 'tables of data.', long_description=open('README.txt').read(), classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules' ] ) <commit_msg>Add python_requires to help pip<commit_after>
from __future__ import print_function, absolute_import, division from ast import literal_eval from setuptools import setup def get_version(source='petl/__init__.py'): with open(source) as f: for line in f: if line.startswith('__version__'): return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") setup( name='petl', version=get_version(), author='Alistair Miles', author_email='alimanfoo@googlemail.com', package_dir={'': '.'}, packages=['petl', 'petl.io', 'petl.transform', 'petl.util', 'petl.test', 'petl.test.io', 'petl.test.transform', 'petl.test.util'], scripts=['bin/petl'], url='https://github.com/alimanfoo/petl', license='MIT License', description='A Python package for extracting, transforming and loading ' 'tables of data.', long_description=open('README.txt').read(), python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
from __future__ import print_function, absolute_import, division from ast import literal_eval from setuptools import setup def get_version(source='petl/__init__.py'): with open(source) as f: for line in f: if line.startswith('__version__'): return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") setup( name='petl', version=get_version(), author='Alistair Miles', author_email='alimanfoo@googlemail.com', package_dir={'': '.'}, packages=['petl', 'petl.io', 'petl.transform', 'petl.util', 'petl.test', 'petl.test.io', 'petl.test.transform', 'petl.test.util'], scripts=['bin/petl'], url='https://github.com/alimanfoo/petl', license='MIT License', description='A Python package for extracting, transforming and loading ' 'tables of data.', long_description=open('README.txt').read(), classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules' ] ) Add python_requires to help pipfrom __future__ import print_function, absolute_import, division from ast import literal_eval from setuptools import setup def get_version(source='petl/__init__.py'): with open(source) as f: for line in f: if line.startswith('__version__'): return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") setup( name='petl', version=get_version(), author='Alistair Miles', author_email='alimanfoo@googlemail.com', package_dir={'': '.'}, packages=['petl', 'petl.io', 'petl.transform', 'petl.util', 'petl.test', 'petl.test.io', 'petl.test.transform', 'petl.test.util'], scripts=['bin/petl'], url='https://github.com/alimanfoo/petl', license='MIT License', description='A Python package for extracting, transforming and loading ' 'tables of data.', long_description=open('README.txt').read(), python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
<commit_before>from __future__ import print_function, absolute_import, division from ast import literal_eval from setuptools import setup def get_version(source='petl/__init__.py'): with open(source) as f: for line in f: if line.startswith('__version__'): return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") setup( name='petl', version=get_version(), author='Alistair Miles', author_email='alimanfoo@googlemail.com', package_dir={'': '.'}, packages=['petl', 'petl.io', 'petl.transform', 'petl.util', 'petl.test', 'petl.test.io', 'petl.test.transform', 'petl.test.util'], scripts=['bin/petl'], url='https://github.com/alimanfoo/petl', license='MIT License', description='A Python package for extracting, transforming and loading ' 'tables of data.', long_description=open('README.txt').read(), classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules' ] ) <commit_msg>Add python_requires to help pip<commit_after>from __future__ import print_function, absolute_import, division from ast import literal_eval from setuptools import setup def get_version(source='petl/__init__.py'): with open(source) as f: for line in f: if line.startswith('__version__'): return literal_eval(line.split('=')[-1].lstrip()) raise ValueError("__version__ not found") setup( name='petl', version=get_version(), author='Alistair Miles', author_email='alimanfoo@googlemail.com', package_dir={'': '.'}, packages=['petl', 'petl.io', 'petl.transform', 'petl.util', 'petl.test', 'petl.test.io', 'petl.test.transform', 'petl.test.util'], scripts=['bin/petl'], url='https://github.com/alimanfoo/petl', license='MIT License', description='A Python package for extracting, transforming and loading ' 'tables of data.', long_description=open('README.txt').read(), python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
876d7e0e03706b21f2d2de93e31289ed4cf30fd5
setup.py
setup.py
# -*- coding: utf-8 -*- from setuptools import setup, find_packages classifiers = [ "Development Status :: 4 - Production/Beta", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Topic :: Software Development", "Topic :: Software Development :: Version Control", "Topic :: Text Processing :: Filters", ] setup( name='diff-highlight', version='0.1.0', description='pretty diff highlighter; emphasis changed words in diff', long_description=open("README.rst").read(), long_description='', classifiers=classifiers, keywords=['mercurial', 'git', 'diff', 'highlight'], author='Takeshi Komiya', author_email='i.tkomiya at gmail.com', url='http://blockdiag.com/', download_url='http://pypi.python.org/pypi/diff-highlight', license='Apache License 2.0', py_modules=['diff_highlight'], packates=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, )
# -*- coding: utf-8 -*- from setuptools import setup, find_packages classifiers = [ "Development Status :: 4 - Production/Beta", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Topic :: Software Development", "Topic :: Software Development :: Version Control", "Topic :: Text Processing :: Filters", ] setup( name='diff-highlight', version='0.1.0', description='pretty diff highlighter; emphasis changed words in diff', long_description=open("README.rst").read(), classifiers=classifiers, keywords=['mercurial', 'git', 'diff', 'highlight'], author='Takeshi Komiya', author_email='i.tkomiya at gmail.com', url='http://blockdiag.com/', download_url='http://pypi.python.org/pypi/diff-highlight', license='Apache License 2.0', py_modules=['diff_highlight'], packates=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, )
Fix long_description field was duplicated.
Fix long_description field was duplicated.
Python
apache-2.0
tk0miya/diff-highlight
# -*- coding: utf-8 -*- from setuptools import setup, find_packages classifiers = [ "Development Status :: 4 - Production/Beta", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Topic :: Software Development", "Topic :: Software Development :: Version Control", "Topic :: Text Processing :: Filters", ] setup( name='diff-highlight', version='0.1.0', description='pretty diff highlighter; emphasis changed words in diff', long_description=open("README.rst").read(), long_description='', classifiers=classifiers, keywords=['mercurial', 'git', 'diff', 'highlight'], author='Takeshi Komiya', author_email='i.tkomiya at gmail.com', url='http://blockdiag.com/', download_url='http://pypi.python.org/pypi/diff-highlight', license='Apache License 2.0', py_modules=['diff_highlight'], packates=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, ) Fix long_description field was duplicated.
# -*- coding: utf-8 -*- from setuptools import setup, find_packages classifiers = [ "Development Status :: 4 - Production/Beta", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Topic :: Software Development", "Topic :: Software Development :: Version Control", "Topic :: Text Processing :: Filters", ] setup( name='diff-highlight', version='0.1.0', description='pretty diff highlighter; emphasis changed words in diff', long_description=open("README.rst").read(), classifiers=classifiers, keywords=['mercurial', 'git', 'diff', 'highlight'], author='Takeshi Komiya', author_email='i.tkomiya at gmail.com', url='http://blockdiag.com/', download_url='http://pypi.python.org/pypi/diff-highlight', license='Apache License 2.0', py_modules=['diff_highlight'], packates=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, )
<commit_before># -*- coding: utf-8 -*- from setuptools import setup, find_packages classifiers = [ "Development Status :: 4 - Production/Beta", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Topic :: Software Development", "Topic :: Software Development :: Version Control", "Topic :: Text Processing :: Filters", ] setup( name='diff-highlight', version='0.1.0', description='pretty diff highlighter; emphasis changed words in diff', long_description=open("README.rst").read(), long_description='', classifiers=classifiers, keywords=['mercurial', 'git', 'diff', 'highlight'], author='Takeshi Komiya', author_email='i.tkomiya at gmail.com', url='http://blockdiag.com/', download_url='http://pypi.python.org/pypi/diff-highlight', license='Apache License 2.0', py_modules=['diff_highlight'], packates=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, ) <commit_msg>Fix long_description field was duplicated.<commit_after>
# -*- coding: utf-8 -*- from setuptools import setup, find_packages classifiers = [ "Development Status :: 4 - Production/Beta", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Topic :: Software Development", "Topic :: Software Development :: Version Control", "Topic :: Text Processing :: Filters", ] setup( name='diff-highlight', version='0.1.0', description='pretty diff highlighter; emphasis changed words in diff', long_description=open("README.rst").read(), classifiers=classifiers, keywords=['mercurial', 'git', 'diff', 'highlight'], author='Takeshi Komiya', author_email='i.tkomiya at gmail.com', url='http://blockdiag.com/', download_url='http://pypi.python.org/pypi/diff-highlight', license='Apache License 2.0', py_modules=['diff_highlight'], packates=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, )
# -*- coding: utf-8 -*- from setuptools import setup, find_packages classifiers = [ "Development Status :: 4 - Production/Beta", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Topic :: Software Development", "Topic :: Software Development :: Version Control", "Topic :: Text Processing :: Filters", ] setup( name='diff-highlight', version='0.1.0', description='pretty diff highlighter; emphasis changed words in diff', long_description=open("README.rst").read(), long_description='', classifiers=classifiers, keywords=['mercurial', 'git', 'diff', 'highlight'], author='Takeshi Komiya', author_email='i.tkomiya at gmail.com', url='http://blockdiag.com/', download_url='http://pypi.python.org/pypi/diff-highlight', license='Apache License 2.0', py_modules=['diff_highlight'], packates=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, ) Fix long_description field was duplicated.# -*- coding: utf-8 -*- from setuptools import setup, find_packages classifiers = [ "Development Status :: 4 - Production/Beta", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Topic :: Software Development", "Topic :: Software Development :: Version Control", "Topic :: Text Processing :: Filters", ] setup( name='diff-highlight', version='0.1.0', description='pretty diff highlighter; emphasis changed words in diff', long_description=open("README.rst").read(), classifiers=classifiers, keywords=['mercurial', 'git', 'diff', 'highlight'], author='Takeshi Komiya', author_email='i.tkomiya at gmail.com', url='http://blockdiag.com/', download_url='http://pypi.python.org/pypi/diff-highlight', license='Apache License 2.0', py_modules=['diff_highlight'], packates=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, )
<commit_before># -*- coding: utf-8 -*- from setuptools import setup, find_packages classifiers = [ "Development Status :: 4 - Production/Beta", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Topic :: Software Development", "Topic :: Software Development :: Version Control", "Topic :: Text Processing :: Filters", ] setup( name='diff-highlight', version='0.1.0', description='pretty diff highlighter; emphasis changed words in diff', long_description=open("README.rst").read(), long_description='', classifiers=classifiers, keywords=['mercurial', 'git', 'diff', 'highlight'], author='Takeshi Komiya', author_email='i.tkomiya at gmail.com', url='http://blockdiag.com/', download_url='http://pypi.python.org/pypi/diff-highlight', license='Apache License 2.0', py_modules=['diff_highlight'], packates=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, ) <commit_msg>Fix long_description field was duplicated.<commit_after># -*- coding: utf-8 -*- from setuptools import setup, find_packages classifiers = [ "Development Status :: 4 - Production/Beta", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Topic :: Software Development", "Topic :: Software Development :: Version Control", "Topic :: Text Processing :: Filters", ] setup( name='diff-highlight', version='0.1.0', description='pretty diff highlighter; emphasis changed words in diff', long_description=open("README.rst").read(), classifiers=classifiers, keywords=['mercurial', 'git', 'diff', 'highlight'], author='Takeshi Komiya', author_email='i.tkomiya at gmail.com', url='http://blockdiag.com/', download_url='http://pypi.python.org/pypi/diff-highlight', license='Apache License 2.0', py_modules=['diff_highlight'], packates=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, )
d4abbe52c804d0a11a3826f8df8e1591d25a771e
setup.py
setup.py
from setuptools import setup, find_packages setup( name='flask-ldap-login', version='0.1', author='Continuum Analitics', author_email='srossross@gmail.com', url='https://github.com/srossross/flask-ldap-login', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'flask-ldap-login-check = flask_ldap_login.check:main', ], }, )
from setuptools import setup, find_packages setup( name='flask-ldap-login', version='0.1', author='Continuum Analytics', author_email='dev@continuum.io', url='https://github.com/ContinuumIO/flask-ldap-login', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'flask-ldap-login-check = flask_ldap_login.check:main', ], }, )
Fix typo, author email, and package url
Fix typo, author email, and package url
Python
bsd-2-clause
ContinuumIO/flask-ldap-login,ContinuumIO/flask-ldap-login
from setuptools import setup, find_packages setup( name='flask-ldap-login', version='0.1', author='Continuum Analitics', author_email='srossross@gmail.com', url='https://github.com/srossross/flask-ldap-login', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'flask-ldap-login-check = flask_ldap_login.check:main', ], }, ) Fix typo, author email, and package url
from setuptools import setup, find_packages setup( name='flask-ldap-login', version='0.1', author='Continuum Analytics', author_email='dev@continuum.io', url='https://github.com/ContinuumIO/flask-ldap-login', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'flask-ldap-login-check = flask_ldap_login.check:main', ], }, )
<commit_before>from setuptools import setup, find_packages setup( name='flask-ldap-login', version='0.1', author='Continuum Analitics', author_email='srossross@gmail.com', url='https://github.com/srossross/flask-ldap-login', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'flask-ldap-login-check = flask_ldap_login.check:main', ], }, ) <commit_msg>Fix typo, author email, and package url<commit_after>
from setuptools import setup, find_packages setup( name='flask-ldap-login', version='0.1', author='Continuum Analytics', author_email='dev@continuum.io', url='https://github.com/ContinuumIO/flask-ldap-login', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'flask-ldap-login-check = flask_ldap_login.check:main', ], }, )
from setuptools import setup, find_packages setup( name='flask-ldap-login', version='0.1', author='Continuum Analitics', author_email='srossross@gmail.com', url='https://github.com/srossross/flask-ldap-login', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'flask-ldap-login-check = flask_ldap_login.check:main', ], }, ) Fix typo, author email, and package urlfrom setuptools import setup, find_packages setup( name='flask-ldap-login', version='0.1', author='Continuum Analytics', author_email='dev@continuum.io', url='https://github.com/ContinuumIO/flask-ldap-login', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'flask-ldap-login-check = flask_ldap_login.check:main', ], }, )
<commit_before>from setuptools import setup, find_packages setup( name='flask-ldap-login', version='0.1', author='Continuum Analitics', author_email='srossross@gmail.com', url='https://github.com/srossross/flask-ldap-login', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'flask-ldap-login-check = flask_ldap_login.check:main', ], }, ) <commit_msg>Fix typo, author email, and package url<commit_after>from setuptools import setup, find_packages setup( name='flask-ldap-login', version='0.1', author='Continuum Analytics', author_email='dev@continuum.io', url='https://github.com/ContinuumIO/flask-ldap-login', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'flask-ldap-login-check = flask_ldap_login.check:main', ], }, )
cc6e1f096a63c9f52dbee6779c143b6df1f11c05
setup.py
setup.py
from distutils.core import setup setup( name="armstrong.templates.standard", version="1.0.0", description="Provides a basic project template for an Armstrong project", long_description=open("README.rst").read(), author='Texas Tribune & Bay Citizen', author_email='dev@armstrongcms.org', packages=[ "armstrong.templates.standard", ], package_dir={ "armstrong.templates.standard": "project_template", }, namespace_packages=[ "armstrong", "armstrong.templates", "armstrong.templates.standard", ], entry_points={ "armstrong.templates": [ "standard = armstrong.templates.standard", ], }, )
from distutils.core import setup import os package_data = [] BASE_DIR = os.path.dirname(__file__) walk_generator = os.walk(os.path.join(BASE_DIR, "project_template")) paths_and_files = [(paths, files) for paths, dirs, files in walk_generator] for path, files in paths_and_files: prefix = path[len("project_template/"):] if files: package_data.append(os.path.join(prefix, "*.*")) setup( name="armstrong.templates.standard", version="1.0.1", description="Provides a basic project template for an Armstrong project", long_description=open("README.rst").read(), author='Texas Tribune & Bay Citizen', author_email='dev@armstrongcms.org', packages=[ "armstrong.templates.standard", ], package_dir={ "armstrong.templates.standard": "project_template", }, package_data={ "armstrong.templates.standard": package_data, }, namespace_packages=[ "armstrong", "armstrong.templates", "armstrong.templates.standard", ], entry_points={ "armstrong.templates": [ "standard = armstrong.templates.standard", ], }, )
Add missing package_data to file (v1.0.1)
Add missing package_data to file (v1.0.1)
Python
apache-2.0
armstrong/armstrong.templates.standard,armstrong/armstrong.templates.standard
from distutils.core import setup setup( name="armstrong.templates.standard", version="1.0.0", description="Provides a basic project template for an Armstrong project", long_description=open("README.rst").read(), author='Texas Tribune & Bay Citizen', author_email='dev@armstrongcms.org', packages=[ "armstrong.templates.standard", ], package_dir={ "armstrong.templates.standard": "project_template", }, namespace_packages=[ "armstrong", "armstrong.templates", "armstrong.templates.standard", ], entry_points={ "armstrong.templates": [ "standard = armstrong.templates.standard", ], }, ) Add missing package_data to file (v1.0.1)
from distutils.core import setup import os package_data = [] BASE_DIR = os.path.dirname(__file__) walk_generator = os.walk(os.path.join(BASE_DIR, "project_template")) paths_and_files = [(paths, files) for paths, dirs, files in walk_generator] for path, files in paths_and_files: prefix = path[len("project_template/"):] if files: package_data.append(os.path.join(prefix, "*.*")) setup( name="armstrong.templates.standard", version="1.0.1", description="Provides a basic project template for an Armstrong project", long_description=open("README.rst").read(), author='Texas Tribune & Bay Citizen', author_email='dev@armstrongcms.org', packages=[ "armstrong.templates.standard", ], package_dir={ "armstrong.templates.standard": "project_template", }, package_data={ "armstrong.templates.standard": package_data, }, namespace_packages=[ "armstrong", "armstrong.templates", "armstrong.templates.standard", ], entry_points={ "armstrong.templates": [ "standard = armstrong.templates.standard", ], }, )
<commit_before>from distutils.core import setup setup( name="armstrong.templates.standard", version="1.0.0", description="Provides a basic project template for an Armstrong project", long_description=open("README.rst").read(), author='Texas Tribune & Bay Citizen', author_email='dev@armstrongcms.org', packages=[ "armstrong.templates.standard", ], package_dir={ "armstrong.templates.standard": "project_template", }, namespace_packages=[ "armstrong", "armstrong.templates", "armstrong.templates.standard", ], entry_points={ "armstrong.templates": [ "standard = armstrong.templates.standard", ], }, ) <commit_msg>Add missing package_data to file (v1.0.1)<commit_after>
from distutils.core import setup import os package_data = [] BASE_DIR = os.path.dirname(__file__) walk_generator = os.walk(os.path.join(BASE_DIR, "project_template")) paths_and_files = [(paths, files) for paths, dirs, files in walk_generator] for path, files in paths_and_files: prefix = path[len("project_template/"):] if files: package_data.append(os.path.join(prefix, "*.*")) setup( name="armstrong.templates.standard", version="1.0.1", description="Provides a basic project template for an Armstrong project", long_description=open("README.rst").read(), author='Texas Tribune & Bay Citizen', author_email='dev@armstrongcms.org', packages=[ "armstrong.templates.standard", ], package_dir={ "armstrong.templates.standard": "project_template", }, package_data={ "armstrong.templates.standard": package_data, }, namespace_packages=[ "armstrong", "armstrong.templates", "armstrong.templates.standard", ], entry_points={ "armstrong.templates": [ "standard = armstrong.templates.standard", ], }, )
from distutils.core import setup setup( name="armstrong.templates.standard", version="1.0.0", description="Provides a basic project template for an Armstrong project", long_description=open("README.rst").read(), author='Texas Tribune & Bay Citizen', author_email='dev@armstrongcms.org', packages=[ "armstrong.templates.standard", ], package_dir={ "armstrong.templates.standard": "project_template", }, namespace_packages=[ "armstrong", "armstrong.templates", "armstrong.templates.standard", ], entry_points={ "armstrong.templates": [ "standard = armstrong.templates.standard", ], }, ) Add missing package_data to file (v1.0.1)from distutils.core import setup import os package_data = [] BASE_DIR = os.path.dirname(__file__) walk_generator = os.walk(os.path.join(BASE_DIR, "project_template")) paths_and_files = [(paths, files) for paths, dirs, files in walk_generator] for path, files in paths_and_files: prefix = path[len("project_template/"):] if files: package_data.append(os.path.join(prefix, "*.*")) setup( name="armstrong.templates.standard", version="1.0.1", description="Provides a basic project template for an Armstrong project", long_description=open("README.rst").read(), author='Texas Tribune & Bay Citizen', author_email='dev@armstrongcms.org', packages=[ "armstrong.templates.standard", ], package_dir={ "armstrong.templates.standard": "project_template", }, package_data={ "armstrong.templates.standard": package_data, }, namespace_packages=[ "armstrong", "armstrong.templates", "armstrong.templates.standard", ], entry_points={ "armstrong.templates": [ "standard = armstrong.templates.standard", ], }, )
<commit_before>from distutils.core import setup setup( name="armstrong.templates.standard", version="1.0.0", description="Provides a basic project template for an Armstrong project", long_description=open("README.rst").read(), author='Texas Tribune & Bay Citizen', author_email='dev@armstrongcms.org', packages=[ "armstrong.templates.standard", ], package_dir={ "armstrong.templates.standard": "project_template", }, namespace_packages=[ "armstrong", "armstrong.templates", "armstrong.templates.standard", ], entry_points={ "armstrong.templates": [ "standard = armstrong.templates.standard", ], }, ) <commit_msg>Add missing package_data to file (v1.0.1)<commit_after>from distutils.core import setup import os package_data = [] BASE_DIR = os.path.dirname(__file__) walk_generator = os.walk(os.path.join(BASE_DIR, "project_template")) paths_and_files = [(paths, files) for paths, dirs, files in walk_generator] for path, files in paths_and_files: prefix = path[len("project_template/"):] if files: package_data.append(os.path.join(prefix, "*.*")) setup( name="armstrong.templates.standard", version="1.0.1", description="Provides a basic project template for an Armstrong project", long_description=open("README.rst").read(), author='Texas Tribune & Bay Citizen', author_email='dev@armstrongcms.org', packages=[ "armstrong.templates.standard", ], package_dir={ "armstrong.templates.standard": "project_template", }, package_data={ "armstrong.templates.standard": package_data, }, namespace_packages=[ "armstrong", "armstrong.templates", "armstrong.templates.standard", ], entry_points={ "armstrong.templates": [ "standard = armstrong.templates.standard", ], }, )
1bc326e065fb9580408fe9e78282f22c00d5d376
setup.py
setup.py
# -*- coding: utf-8 -*- import os from setuptools import setup VERSION = '3.1.1' setup( name='conllu', packages=["conllu"], version=VERSION, description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary', long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), long_description_content_type="text/markdown", author=u'Emil Stenström', author_email='em@kth.se', url='https://github.com/EmilStenstrom/conllu/', install_requires=[], keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", ], )
# -*- coding: utf-8 -*- import os from setuptools import setup VERSION = '3.1.1' setup( name='conllu', packages=["conllu"], version=VERSION, description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary', long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), long_description_content_type="text/markdown", author=u'Emil Stenström', author_email='em@kth.se', url='https://github.com/EmilStenstrom/conllu/', install_requires=[], keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", ], )
Remove old Python pypi classifiers.
Remove old Python pypi classifiers.
Python
mit
EmilStenstrom/conllu
# -*- coding: utf-8 -*- import os from setuptools import setup VERSION = '3.1.1' setup( name='conllu', packages=["conllu"], version=VERSION, description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary', long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), long_description_content_type="text/markdown", author=u'Emil Stenström', author_email='em@kth.se', url='https://github.com/EmilStenstrom/conllu/', install_requires=[], keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", ], ) Remove old Python pypi classifiers.
# -*- coding: utf-8 -*- import os from setuptools import setup VERSION = '3.1.1' setup( name='conllu', packages=["conllu"], version=VERSION, description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary', long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), long_description_content_type="text/markdown", author=u'Emil Stenström', author_email='em@kth.se', url='https://github.com/EmilStenstrom/conllu/', install_requires=[], keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", ], )
<commit_before># -*- coding: utf-8 -*- import os from setuptools import setup VERSION = '3.1.1' setup( name='conllu', packages=["conllu"], version=VERSION, description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary', long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), long_description_content_type="text/markdown", author=u'Emil Stenström', author_email='em@kth.se', url='https://github.com/EmilStenstrom/conllu/', install_requires=[], keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", ], ) <commit_msg>Remove old Python pypi classifiers.<commit_after>
# -*- coding: utf-8 -*- import os from setuptools import setup VERSION = '3.1.1' setup( name='conllu', packages=["conllu"], version=VERSION, description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary', long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), long_description_content_type="text/markdown", author=u'Emil Stenström', author_email='em@kth.se', url='https://github.com/EmilStenstrom/conllu/', install_requires=[], keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", ], )
# -*- coding: utf-8 -*- import os from setuptools import setup VERSION = '3.1.1' setup( name='conllu', packages=["conllu"], version=VERSION, description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary', long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), long_description_content_type="text/markdown", author=u'Emil Stenström', author_email='em@kth.se', url='https://github.com/EmilStenstrom/conllu/', install_requires=[], keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", ], ) Remove old Python pypi classifiers.# -*- coding: utf-8 -*- import os from setuptools import setup VERSION = '3.1.1' setup( name='conllu', packages=["conllu"], version=VERSION, description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary', long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), long_description_content_type="text/markdown", author=u'Emil Stenström', author_email='em@kth.se', url='https://github.com/EmilStenstrom/conllu/', install_requires=[], keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", ], )
<commit_before># -*- coding: utf-8 -*- import os from setuptools import setup VERSION = '3.1.1' setup( name='conllu', packages=["conllu"], version=VERSION, description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary', long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), long_description_content_type="text/markdown", author=u'Emil Stenström', author_email='em@kth.se', url='https://github.com/EmilStenstrom/conllu/', install_requires=[], keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", ], ) <commit_msg>Remove old Python pypi classifiers.<commit_after># -*- coding: utf-8 -*- import os from setuptools import setup VERSION = '3.1.1' setup( name='conllu', packages=["conllu"], version=VERSION, description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary', long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(), long_description_content_type="text/markdown", author=u'Emil Stenström', author_email='em@kth.se', url='https://github.com/EmilStenstrom/conllu/', install_requires=[], keywords=['conllu', 'conll', 'conll-u', 'parser', 'nlp'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", ], )
2af2efc18c2a778d9e2eb6f8a8539d013f7837e7
setup.py
setup.py
import os import os.path import sys from setuptools import find_packages, setup requirements = ['PyYAML'] major, minor = sys.version_info[:2] if major == 2 and minor < 7: requirements.append('argparse') setup( name='behave', version='1.0', description='A Cucumber-like BDD tool', author='Benno Rice', author_email='benno@jeamland.net', url='http://github.com/jeamland/behave', packages=find_packages(), package_data={'behave': ['languages.yml']}, scripts=['bin/behave'], install_requires=requirements, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Software Development :: Testing", "License :: OSI Approved :: BSD License", ], )
import os import os.path import sys from setuptools import find_packages, setup requirements = ['parse>=1.1.5', 'PyYAML'] major, minor = sys.version_info[:2] if major == 2 and minor < 7: requirements.append('argparse') setup( name='behave', version='1.0', description='A Cucumber-like BDD tool', author='Benno Rice', author_email='benno@jeamland.net', url='http://github.com/jeamland/behave', packages=find_packages(), package_data={'behave': ['languages.yml']}, scripts=['bin/behave'], install_requires=requirements, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: Testing", "License :: OSI Approved :: BSD License", ], )
Add dependency on parse, be more explicit in supported Python versions.
Add dependency on parse, be more explicit in supported Python versions.
Python
bsd-2-clause
kymbert/behave,allanlewis/behave,KevinOrtman/behave,hugeinc/behave-parallel,benthomasson/behave,connorsml/behave,spacediver/behave,benthomasson/behave,KevinMarkVI/behave-parallel,spacediver/behave,allanlewis/behave,Gimpneek/behave,kymbert/behave,KevinOrtman/behave,memee/behave,jenisys/behave,metaperl/behave,charleswhchan/behave,joshal/behave,tokunbo/behave-parallel,metaperl/behave,Abdoctor/behave,Gimpneek/behave,Gimpneek/behave,Abdoctor/behave,mzcity123/behave,charleswhchan/behave,vrutkovs/behave,joshal/behave,tokunbo/behave-parallel,mzcity123/behave,jenisys/behave,connorsml/behave,vrutkovs/behave
import os import os.path import sys from setuptools import find_packages, setup requirements = ['PyYAML'] major, minor = sys.version_info[:2] if major == 2 and minor < 7: requirements.append('argparse') setup( name='behave', version='1.0', description='A Cucumber-like BDD tool', author='Benno Rice', author_email='benno@jeamland.net', url='http://github.com/jeamland/behave', packages=find_packages(), package_data={'behave': ['languages.yml']}, scripts=['bin/behave'], install_requires=requirements, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Software Development :: Testing", "License :: OSI Approved :: BSD License", ], ) Add dependency on parse, be more explicit in supported Python versions.
import os import os.path import sys from setuptools import find_packages, setup requirements = ['parse>=1.1.5', 'PyYAML'] major, minor = sys.version_info[:2] if major == 2 and minor < 7: requirements.append('argparse') setup( name='behave', version='1.0', description='A Cucumber-like BDD tool', author='Benno Rice', author_email='benno@jeamland.net', url='http://github.com/jeamland/behave', packages=find_packages(), package_data={'behave': ['languages.yml']}, scripts=['bin/behave'], install_requires=requirements, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: Testing", "License :: OSI Approved :: BSD License", ], )
<commit_before>import os import os.path import sys from setuptools import find_packages, setup requirements = ['PyYAML'] major, minor = sys.version_info[:2] if major == 2 and minor < 7: requirements.append('argparse') setup( name='behave', version='1.0', description='A Cucumber-like BDD tool', author='Benno Rice', author_email='benno@jeamland.net', url='http://github.com/jeamland/behave', packages=find_packages(), package_data={'behave': ['languages.yml']}, scripts=['bin/behave'], install_requires=requirements, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Software Development :: Testing", "License :: OSI Approved :: BSD License", ], ) <commit_msg>Add dependency on parse, be more explicit in supported Python versions.<commit_after>
import os import os.path import sys from setuptools import find_packages, setup requirements = ['parse>=1.1.5', 'PyYAML'] major, minor = sys.version_info[:2] if major == 2 and minor < 7: requirements.append('argparse') setup( name='behave', version='1.0', description='A Cucumber-like BDD tool', author='Benno Rice', author_email='benno@jeamland.net', url='http://github.com/jeamland/behave', packages=find_packages(), package_data={'behave': ['languages.yml']}, scripts=['bin/behave'], install_requires=requirements, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: Testing", "License :: OSI Approved :: BSD License", ], )
import os import os.path import sys from setuptools import find_packages, setup requirements = ['PyYAML'] major, minor = sys.version_info[:2] if major == 2 and minor < 7: requirements.append('argparse') setup( name='behave', version='1.0', description='A Cucumber-like BDD tool', author='Benno Rice', author_email='benno@jeamland.net', url='http://github.com/jeamland/behave', packages=find_packages(), package_data={'behave': ['languages.yml']}, scripts=['bin/behave'], install_requires=requirements, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Software Development :: Testing", "License :: OSI Approved :: BSD License", ], ) Add dependency on parse, be more explicit in supported Python versions.import os import os.path import sys from setuptools import find_packages, setup requirements = ['parse>=1.1.5', 'PyYAML'] major, minor = sys.version_info[:2] if major == 2 and minor < 7: requirements.append('argparse') setup( name='behave', version='1.0', description='A Cucumber-like BDD tool', author='Benno Rice', author_email='benno@jeamland.net', url='http://github.com/jeamland/behave', packages=find_packages(), package_data={'behave': ['languages.yml']}, scripts=['bin/behave'], install_requires=requirements, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: Testing", "License :: OSI Approved :: BSD License", ], )
<commit_before>import os import os.path import sys from setuptools import find_packages, setup requirements = ['PyYAML'] major, minor = sys.version_info[:2] if major == 2 and minor < 7: requirements.append('argparse') setup( name='behave', version='1.0', description='A Cucumber-like BDD tool', author='Benno Rice', author_email='benno@jeamland.net', url='http://github.com/jeamland/behave', packages=find_packages(), package_data={'behave': ['languages.yml']}, scripts=['bin/behave'], install_requires=requirements, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Software Development :: Testing", "License :: OSI Approved :: BSD License", ], ) <commit_msg>Add dependency on parse, be more explicit in supported Python versions.<commit_after>import os import os.path import sys from setuptools import find_packages, setup requirements = ['parse>=1.1.5', 'PyYAML'] major, minor = sys.version_info[:2] if major == 2 and minor < 7: requirements.append('argparse') setup( name='behave', version='1.0', description='A Cucumber-like BDD tool', author='Benno Rice', author_email='benno@jeamland.net', url='http://github.com/jeamland/behave', packages=find_packages(), package_data={'behave': ['languages.yml']}, scripts=['bin/behave'], install_requires=requirements, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: Testing", "License :: OSI Approved :: BSD License", ], )
089413714bfdcf09fa2faf123dfa26faa2b1af4a
setup.py
setup.py
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyramid', 'cornice', 'colander', 'couchdb', ] test_requires = requires + ['lettuce', ] setup(name='daybed', version='0.0', description='daybed', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="daybed.tests", entry_points="""\ [paste.app_factory] main = daybed:main """, )
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyramid', 'cornice', 'colander', 'couchdb', ] test_requires = requires + ['lettuce', ] setup(name='daybed', version='0.0', description='daybed', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), dependency_links = [ "https://github.com/mozilla-services/cornice/tarball/spore-support#egg=cornice" ], include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="daybed.tests", entry_points="""\ [paste.app_factory] main = daybed:main """, )
Install the spore branch of cornice
Install the spore branch of cornice
Python
bsd-3-clause
spiral-project/daybed,spiral-project/daybed
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyramid', 'cornice', 'colander', 'couchdb', ] test_requires = requires + ['lettuce', ] setup(name='daybed', version='0.0', description='daybed', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="daybed.tests", entry_points="""\ [paste.app_factory] main = daybed:main """, ) Install the spore branch of cornice
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyramid', 'cornice', 'colander', 'couchdb', ] test_requires = requires + ['lettuce', ] setup(name='daybed', version='0.0', description='daybed', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), dependency_links = [ "https://github.com/mozilla-services/cornice/tarball/spore-support#egg=cornice" ], include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="daybed.tests", entry_points="""\ [paste.app_factory] main = daybed:main """, )
<commit_before>import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyramid', 'cornice', 'colander', 'couchdb', ] test_requires = requires + ['lettuce', ] setup(name='daybed', version='0.0', description='daybed', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="daybed.tests", entry_points="""\ [paste.app_factory] main = daybed:main """, ) <commit_msg>Install the spore branch of cornice<commit_after>
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyramid', 'cornice', 'colander', 'couchdb', ] test_requires = requires + ['lettuce', ] setup(name='daybed', version='0.0', description='daybed', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), dependency_links = [ "https://github.com/mozilla-services/cornice/tarball/spore-support#egg=cornice" ], include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="daybed.tests", entry_points="""\ [paste.app_factory] main = daybed:main """, )
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyramid', 'cornice', 'colander', 'couchdb', ] test_requires = requires + ['lettuce', ] setup(name='daybed', version='0.0', description='daybed', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="daybed.tests", entry_points="""\ [paste.app_factory] main = daybed:main """, ) Install the spore branch of corniceimport os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyramid', 'cornice', 'colander', 'couchdb', ] test_requires = requires + ['lettuce', ] setup(name='daybed', version='0.0', description='daybed', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), dependency_links = [ "https://github.com/mozilla-services/cornice/tarball/spore-support#egg=cornice" ], include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="daybed.tests", entry_points="""\ [paste.app_factory] main = daybed:main """, )
<commit_before>import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyramid', 'cornice', 'colander', 'couchdb', ] test_requires = requires + ['lettuce', ] setup(name='daybed', version='0.0', description='daybed', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="daybed.tests", entry_points="""\ [paste.app_factory] main = daybed:main """, ) <commit_msg>Install the spore branch of cornice<commit_after>import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read() CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() requires = [ 'pyramid', 'cornice', 'colander', 'couchdb', ] test_requires = requires + ['lettuce', ] setup(name='daybed', version='0.0', description='daybed', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), dependency_links = [ "https://github.com/mozilla-services/cornice/tarball/spore-support#egg=cornice" ], include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="daybed.tests", entry_points="""\ [paste.app_factory] main = daybed:main """, )
7ba4dca75301a75e0dd68327e0309f8521d6eeb7
setup.py
setup.py
from distutils.core import setup setup( name='django-grappelli-autocomplete-fk-edit-link', version='1.0.0dev', packages=['grappelli_autocomplete_fk_edit_link',], license='MIT', description='ModelAdmin mixin that adds edit links to Django Grappelli autocomplete lookups.', long_description=open('README.md').read(), install_requires=[ 'django-grappelli==2.6.3', ], )
from distutils.core import setup setup( name='django-grappelli-autocomplete-fk-edit-link', version='1.0.0dev', packages=['grappelli_autocomplete_fk_edit_link',], license='MIT', description='ModelAdmin mixin that adds edit links to Django Grappelli autocomplete lookups.', long_description=open('README.md').read(), install_requires=[ 'django-grappelli>=2.6.3', ], )
Support newer versions of Grappelli.
Support newer versions of Grappelli.
Python
mit
CrossWaterBridge/django-grappelli-autocomplete-fk-edit-link,olivierdalang/django-grappelli-autocomplete-fk-edit-link,CrossWaterBridge/django-grappelli-autocomplete-fk-edit-link,olivierdalang/django-grappelli-autocomplete-fk-edit-link
from distutils.core import setup setup( name='django-grappelli-autocomplete-fk-edit-link', version='1.0.0dev', packages=['grappelli_autocomplete_fk_edit_link',], license='MIT', description='ModelAdmin mixin that adds edit links to Django Grappelli autocomplete lookups.', long_description=open('README.md').read(), install_requires=[ 'django-grappelli==2.6.3', ], ) Support newer versions of Grappelli.
from distutils.core import setup setup( name='django-grappelli-autocomplete-fk-edit-link', version='1.0.0dev', packages=['grappelli_autocomplete_fk_edit_link',], license='MIT', description='ModelAdmin mixin that adds edit links to Django Grappelli autocomplete lookups.', long_description=open('README.md').read(), install_requires=[ 'django-grappelli>=2.6.3', ], )
<commit_before>from distutils.core import setup setup( name='django-grappelli-autocomplete-fk-edit-link', version='1.0.0dev', packages=['grappelli_autocomplete_fk_edit_link',], license='MIT', description='ModelAdmin mixin that adds edit links to Django Grappelli autocomplete lookups.', long_description=open('README.md').read(), install_requires=[ 'django-grappelli==2.6.3', ], ) <commit_msg>Support newer versions of Grappelli.<commit_after>
from distutils.core import setup setup( name='django-grappelli-autocomplete-fk-edit-link', version='1.0.0dev', packages=['grappelli_autocomplete_fk_edit_link',], license='MIT', description='ModelAdmin mixin that adds edit links to Django Grappelli autocomplete lookups.', long_description=open('README.md').read(), install_requires=[ 'django-grappelli>=2.6.3', ], )
from distutils.core import setup setup( name='django-grappelli-autocomplete-fk-edit-link', version='1.0.0dev', packages=['grappelli_autocomplete_fk_edit_link',], license='MIT', description='ModelAdmin mixin that adds edit links to Django Grappelli autocomplete lookups.', long_description=open('README.md').read(), install_requires=[ 'django-grappelli==2.6.3', ], ) Support newer versions of Grappelli.from distutils.core import setup setup( name='django-grappelli-autocomplete-fk-edit-link', version='1.0.0dev', packages=['grappelli_autocomplete_fk_edit_link',], license='MIT', description='ModelAdmin mixin that adds edit links to Django Grappelli autocomplete lookups.', long_description=open('README.md').read(), install_requires=[ 'django-grappelli>=2.6.3', ], )
<commit_before>from distutils.core import setup setup( name='django-grappelli-autocomplete-fk-edit-link', version='1.0.0dev', packages=['grappelli_autocomplete_fk_edit_link',], license='MIT', description='ModelAdmin mixin that adds edit links to Django Grappelli autocomplete lookups.', long_description=open('README.md').read(), install_requires=[ 'django-grappelli==2.6.3', ], ) <commit_msg>Support newer versions of Grappelli.<commit_after>from distutils.core import setup setup( name='django-grappelli-autocomplete-fk-edit-link', version='1.0.0dev', packages=['grappelli_autocomplete_fk_edit_link',], license='MIT', description='ModelAdmin mixin that adds edit links to Django Grappelli autocomplete lookups.', long_description=open('README.md').read(), install_requires=[ 'django-grappelli>=2.6.3', ], )
a22fc986ddf81a915ae5a8bb48d755ec04c65fc2
setup.py
setup.py
from setuptools import setup from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt') setup( name = "lrs", version = "0.0.0", author = "ADL", packages=['lrs'], install_requires=[str(ir.req) for ir in install_reqs], )
from setuptools import setup from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt', session=False) setup( name = "lrs", version = "0.0.0", author = "ADL", packages=['lrs'], install_requires=[str(ir.req) for ir in install_reqs], )
Handle newer versions of pip.
Handle newer versions of pip.
Python
apache-2.0
frasern/ADL_LRS,frasern/ADL_LRS,frasern/ADL_LRS
from setuptools import setup from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt') setup( name = "lrs", version = "0.0.0", author = "ADL", packages=['lrs'], install_requires=[str(ir.req) for ir in install_reqs], ) Handle newer versions of pip.
from setuptools import setup from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt', session=False) setup( name = "lrs", version = "0.0.0", author = "ADL", packages=['lrs'], install_requires=[str(ir.req) for ir in install_reqs], )
<commit_before>from setuptools import setup from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt') setup( name = "lrs", version = "0.0.0", author = "ADL", packages=['lrs'], install_requires=[str(ir.req) for ir in install_reqs], ) <commit_msg>Handle newer versions of pip.<commit_after>
from setuptools import setup from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt', session=False) setup( name = "lrs", version = "0.0.0", author = "ADL", packages=['lrs'], install_requires=[str(ir.req) for ir in install_reqs], )
from setuptools import setup from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt') setup( name = "lrs", version = "0.0.0", author = "ADL", packages=['lrs'], install_requires=[str(ir.req) for ir in install_reqs], ) Handle newer versions of pip.from setuptools import setup from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt', session=False) setup( name = "lrs", version = "0.0.0", author = "ADL", packages=['lrs'], install_requires=[str(ir.req) for ir in install_reqs], )
<commit_before>from setuptools import setup from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt') setup( name = "lrs", version = "0.0.0", author = "ADL", packages=['lrs'], install_requires=[str(ir.req) for ir in install_reqs], ) <commit_msg>Handle newer versions of pip.<commit_after>from setuptools import setup from pip.req import parse_requirements install_reqs = parse_requirements('requirements.txt', session=False) setup( name = "lrs", version = "0.0.0", author = "ADL", packages=['lrs'], install_requires=[str(ir.req) for ir in install_reqs], )
8513d765a071c6f7d8c3bc20ba73e0f8b0744252
setup.py
setup.py
#!/usr/bin/python2 ''' The setup script for salt ''' from distutils.core import setup setup(name='salt', version='0.1', description='Portable, distrubuted, remote execution system', author='Thomas S Hatch', author_email='thatch45@gmail.com', url='https://github.com/thatch45/salt', packages=['salt'], scripts=['scripts/salt-master', 'scripts/salt-minion'], data_files=[('/etc/salt', ['conf/master', 'conf/minion', ]), ('/etc/rc.d/', ['init/salt-minion', 'init/salt-master', ]), ], )
#!/usr/bin/python2 ''' The setup script for salt ''' from distutils.core import setup setup(name='salt', version='0.1', description='Portable, distrubuted, remote execution system', author='Thomas S Hatch', author_email='thatch45@gmail.com', url='https://github.com/thatch45/salt', packages=['salt', 'salt.modules'], scripts=['scripts/salt-master', 'scripts/salt-minion'], data_files=[('/etc/salt', ['conf/master', 'conf/minion', ]), ('/etc/rc.d/', ['init/salt-minion', 'init/salt-master', ]), ], )
Add the salt.modules module to the package
Add the salt.modules module to the package
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
#!/usr/bin/python2 ''' The setup script for salt ''' from distutils.core import setup setup(name='salt', version='0.1', description='Portable, distrubuted, remote execution system', author='Thomas S Hatch', author_email='thatch45@gmail.com', url='https://github.com/thatch45/salt', packages=['salt'], scripts=['scripts/salt-master', 'scripts/salt-minion'], data_files=[('/etc/salt', ['conf/master', 'conf/minion', ]), ('/etc/rc.d/', ['init/salt-minion', 'init/salt-master', ]), ], ) Add the salt.modules module to the package
#!/usr/bin/python2 ''' The setup script for salt ''' from distutils.core import setup setup(name='salt', version='0.1', description='Portable, distrubuted, remote execution system', author='Thomas S Hatch', author_email='thatch45@gmail.com', url='https://github.com/thatch45/salt', packages=['salt', 'salt.modules'], scripts=['scripts/salt-master', 'scripts/salt-minion'], data_files=[('/etc/salt', ['conf/master', 'conf/minion', ]), ('/etc/rc.d/', ['init/salt-minion', 'init/salt-master', ]), ], )
<commit_before>#!/usr/bin/python2 ''' The setup script for salt ''' from distutils.core import setup setup(name='salt', version='0.1', description='Portable, distrubuted, remote execution system', author='Thomas S Hatch', author_email='thatch45@gmail.com', url='https://github.com/thatch45/salt', packages=['salt'], scripts=['scripts/salt-master', 'scripts/salt-minion'], data_files=[('/etc/salt', ['conf/master', 'conf/minion', ]), ('/etc/rc.d/', ['init/salt-minion', 'init/salt-master', ]), ], ) <commit_msg>Add the salt.modules module to the package<commit_after>
#!/usr/bin/python2 ''' The setup script for salt ''' from distutils.core import setup setup(name='salt', version='0.1', description='Portable, distrubuted, remote execution system', author='Thomas S Hatch', author_email='thatch45@gmail.com', url='https://github.com/thatch45/salt', packages=['salt', 'salt.modules'], scripts=['scripts/salt-master', 'scripts/salt-minion'], data_files=[('/etc/salt', ['conf/master', 'conf/minion', ]), ('/etc/rc.d/', ['init/salt-minion', 'init/salt-master', ]), ], )
#!/usr/bin/python2 ''' The setup script for salt ''' from distutils.core import setup setup(name='salt', version='0.1', description='Portable, distrubuted, remote execution system', author='Thomas S Hatch', author_email='thatch45@gmail.com', url='https://github.com/thatch45/salt', packages=['salt'], scripts=['scripts/salt-master', 'scripts/salt-minion'], data_files=[('/etc/salt', ['conf/master', 'conf/minion', ]), ('/etc/rc.d/', ['init/salt-minion', 'init/salt-master', ]), ], ) Add the salt.modules module to the package#!/usr/bin/python2 ''' The setup script for salt ''' from distutils.core import setup setup(name='salt', version='0.1', description='Portable, distrubuted, remote execution system', author='Thomas S Hatch', author_email='thatch45@gmail.com', url='https://github.com/thatch45/salt', packages=['salt', 'salt.modules'], scripts=['scripts/salt-master', 'scripts/salt-minion'], data_files=[('/etc/salt', ['conf/master', 'conf/minion', ]), ('/etc/rc.d/', ['init/salt-minion', 'init/salt-master', ]), ], )
<commit_before>#!/usr/bin/python2 ''' The setup script for salt ''' from distutils.core import setup setup(name='salt', version='0.1', description='Portable, distrubuted, remote execution system', author='Thomas S Hatch', author_email='thatch45@gmail.com', url='https://github.com/thatch45/salt', packages=['salt'], scripts=['scripts/salt-master', 'scripts/salt-minion'], data_files=[('/etc/salt', ['conf/master', 'conf/minion', ]), ('/etc/rc.d/', ['init/salt-minion', 'init/salt-master', ]), ], ) <commit_msg>Add the salt.modules module to the package<commit_after>#!/usr/bin/python2 ''' The setup script for salt ''' from distutils.core import setup setup(name='salt', version='0.1', description='Portable, distrubuted, remote execution system', author='Thomas S Hatch', author_email='thatch45@gmail.com', url='https://github.com/thatch45/salt', packages=['salt', 'salt.modules'], scripts=['scripts/salt-master', 'scripts/salt-minion'], data_files=[('/etc/salt', ['conf/master', 'conf/minion', ]), ('/etc/rc.d/', ['init/salt-minion', 'init/salt-master', ]), ], )
bc1a8ca4f38f112ceeff9a72ded30ce9342b64bb
setup.py
setup.py
from setuptools import setup setup( name='pbdeploy', description='a port-based deployment framework for practicing continuous deployment', version='1.2', packages=['pbdeploy'], scripts=['bin/pbdeploy'], license='The MIT License', author='Michael Rooney', author_email='mrooney.pbdeploy@rowk.com', url='https://github.com/mrooney/pbdeploy', install_requires=['psutil'], )
from setuptools import setup setup( name='pbdeploy', description='a port-based deployment framework for practicing continuous deployment', version='1.0', packages=['pbdeploy'], scripts=['bin/pbdeploy'], license='The MIT License', author='Michael Rooney', author_email='mrooney.pbdeploy@rowk.com', url='https://github.com/mrooney/pbdeploy', install_requires=['psutil'], )
Revert "bump to 1.2 after shell revert"
Revert "bump to 1.2 after shell revert" This reverts commit e2fcb76f6f6ee99a98ee529917959235576e2d07.
Python
mit
mrooney/pbdeploy
from setuptools import setup setup( name='pbdeploy', description='a port-based deployment framework for practicing continuous deployment', version='1.2', packages=['pbdeploy'], scripts=['bin/pbdeploy'], license='The MIT License', author='Michael Rooney', author_email='mrooney.pbdeploy@rowk.com', url='https://github.com/mrooney/pbdeploy', install_requires=['psutil'], ) Revert "bump to 1.2 after shell revert" This reverts commit e2fcb76f6f6ee99a98ee529917959235576e2d07.
from setuptools import setup setup( name='pbdeploy', description='a port-based deployment framework for practicing continuous deployment', version='1.0', packages=['pbdeploy'], scripts=['bin/pbdeploy'], license='The MIT License', author='Michael Rooney', author_email='mrooney.pbdeploy@rowk.com', url='https://github.com/mrooney/pbdeploy', install_requires=['psutil'], )
<commit_before>from setuptools import setup setup( name='pbdeploy', description='a port-based deployment framework for practicing continuous deployment', version='1.2', packages=['pbdeploy'], scripts=['bin/pbdeploy'], license='The MIT License', author='Michael Rooney', author_email='mrooney.pbdeploy@rowk.com', url='https://github.com/mrooney/pbdeploy', install_requires=['psutil'], ) <commit_msg>Revert "bump to 1.2 after shell revert" This reverts commit e2fcb76f6f6ee99a98ee529917959235576e2d07.<commit_after>
from setuptools import setup setup( name='pbdeploy', description='a port-based deployment framework for practicing continuous deployment', version='1.0', packages=['pbdeploy'], scripts=['bin/pbdeploy'], license='The MIT License', author='Michael Rooney', author_email='mrooney.pbdeploy@rowk.com', url='https://github.com/mrooney/pbdeploy', install_requires=['psutil'], )
from setuptools import setup setup( name='pbdeploy', description='a port-based deployment framework for practicing continuous deployment', version='1.2', packages=['pbdeploy'], scripts=['bin/pbdeploy'], license='The MIT License', author='Michael Rooney', author_email='mrooney.pbdeploy@rowk.com', url='https://github.com/mrooney/pbdeploy', install_requires=['psutil'], ) Revert "bump to 1.2 after shell revert" This reverts commit e2fcb76f6f6ee99a98ee529917959235576e2d07.from setuptools import setup setup( name='pbdeploy', description='a port-based deployment framework for practicing continuous deployment', version='1.0', packages=['pbdeploy'], scripts=['bin/pbdeploy'], license='The MIT License', author='Michael Rooney', author_email='mrooney.pbdeploy@rowk.com', url='https://github.com/mrooney/pbdeploy', install_requires=['psutil'], )
<commit_before>from setuptools import setup setup( name='pbdeploy', description='a port-based deployment framework for practicing continuous deployment', version='1.2', packages=['pbdeploy'], scripts=['bin/pbdeploy'], license='The MIT License', author='Michael Rooney', author_email='mrooney.pbdeploy@rowk.com', url='https://github.com/mrooney/pbdeploy', install_requires=['psutil'], ) <commit_msg>Revert "bump to 1.2 after shell revert" This reverts commit e2fcb76f6f6ee99a98ee529917959235576e2d07.<commit_after>from setuptools import setup setup( name='pbdeploy', description='a port-based deployment framework for practicing continuous deployment', version='1.0', packages=['pbdeploy'], scripts=['bin/pbdeploy'], license='The MIT License', author='Michael Rooney', author_email='mrooney.pbdeploy@rowk.com', url='https://github.com/mrooney/pbdeploy', install_requires=['psutil'], )
061f1c63ecdd811eae513d6865146bace7be8b00
setup.py
setup.py
import codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-robots', version='0.1.dev', license='BSD', description='robots.txt simple app for Django', long_description=read('README.rst'), author='Mike Yumatov', author_email='mike@yumatov.org', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], )
import codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-robots', version='0.1.dev', license='BSD', description='robots.txt simple app for Django', long_description=read('README.rst'), url='https://github.com/trilan/lemon-robots', author='Mike Yumatov', author_email='mike@yumatov.org', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], )
Add project URL to the distribution info
Add project URL to the distribution info
Python
isc
trilan/lemon-robots,trilan/lemon-robots
import codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-robots', version='0.1.dev', license='BSD', description='robots.txt simple app for Django', long_description=read('README.rst'), author='Mike Yumatov', author_email='mike@yumatov.org', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], ) Add project URL to the distribution info
import codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-robots', version='0.1.dev', license='BSD', description='robots.txt simple app for Django', long_description=read('README.rst'), url='https://github.com/trilan/lemon-robots', author='Mike Yumatov', author_email='mike@yumatov.org', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], )
<commit_before>import codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-robots', version='0.1.dev', license='BSD', description='robots.txt simple app for Django', long_description=read('README.rst'), author='Mike Yumatov', author_email='mike@yumatov.org', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], ) <commit_msg>Add project URL to the distribution info<commit_after>
import codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-robots', version='0.1.dev', license='BSD', description='robots.txt simple app for Django', long_description=read('README.rst'), url='https://github.com/trilan/lemon-robots', author='Mike Yumatov', author_email='mike@yumatov.org', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], )
import codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-robots', version='0.1.dev', license='BSD', description='robots.txt simple app for Django', long_description=read('README.rst'), author='Mike Yumatov', author_email='mike@yumatov.org', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], ) Add project URL to the distribution infoimport codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-robots', version='0.1.dev', license='BSD', description='robots.txt simple app for Django', long_description=read('README.rst'), url='https://github.com/trilan/lemon-robots', author='Mike Yumatov', author_email='mike@yumatov.org', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], )
<commit_before>import codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-robots', version='0.1.dev', license='BSD', description='robots.txt simple app for Django', long_description=read('README.rst'), author='Mike Yumatov', author_email='mike@yumatov.org', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], ) <commit_msg>Add project URL to the distribution info<commit_after>import codecs import os from setuptools import setup, find_packages def read(filename): filepath = os.path.join(os.path.dirname(__file__), filename) return codecs.open(filepath, encoding='utf-8').read() setup( name='lemon-robots', version='0.1.dev', license='BSD', description='robots.txt simple app for Django', long_description=read('README.rst'), url='https://github.com/trilan/lemon-robots', author='Mike Yumatov', author_email='mike@yumatov.org', packages=find_packages(), include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', ], )
8a5dbb75db80f85cebb7f700d5516e271a4ab1b7
setup.py
setup.py
from setuptools import setup, find_packages with open('README.rst', 'r', encoding='utf-8') as f: long_description = f.read() setup( name='jacquard', version='0.1.0', url='https://github.com/prophile/jacquard', description="Split testing server", long_description=long_description, author="Alistair Lynn", author_email="alistair@alynn.co.uk", packages=find_packages(), classifiers=( 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: Open Source :: MIT', 'Natural Language :: English', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Office/Business', ), install_requires=( 'redis', 'werkzeug', ), setup_requires=( 'pytest-runner', ), tests_require=( 'pytest', ), entry_points={ 'console_scripts': ( 'jacquard = jacquard.cli:main', ), 'jacquard.storage_engines': ( 'dummy = jacquard.storage.dummy:DummyStore', 'redis = jacquard.storage.redis:RedisStore', 'file = jacquard.storage.file:FileStore', ), }, )
from setuptools import setup, find_packages with open('README.rst', 'r', encoding='utf-8') as f: long_description = f.read() setup( name='jacquard', version='0.1.0', url='https://github.com/prophile/jacquard', description="Split testing server", long_description=long_description, author="Alistair Lynn", author_email="alistair@alynn.co.uk", packages=find_packages(), classifiers=( 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: Open Source :: MIT', 'Natural Language :: English', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Office/Business', ), install_requires=( 'redis', 'werkzeug', ), setup_requires=( 'pytest-runner', ), tests_require=( 'pytest', ), entry_points={ 'console_scripts': ( 'jacquard = jacquard.cli:main', ), 'jacquard.storage_engines': ( 'dummy = jacquard.storage.dummy:DummyStore', 'redis = jacquard.storage.redis:RedisStore', 'file = jacquard.storage.file:FileStore', ), 'jacquard.commands': ( 'storage-dump = jacquard.storage.commands:StorageDump', ), }, )
Add package resources for subcommands
Add package resources for subcommands
Python
mit
prophile/jacquard,prophile/jacquard
from setuptools import setup, find_packages with open('README.rst', 'r', encoding='utf-8') as f: long_description = f.read() setup( name='jacquard', version='0.1.0', url='https://github.com/prophile/jacquard', description="Split testing server", long_description=long_description, author="Alistair Lynn", author_email="alistair@alynn.co.uk", packages=find_packages(), classifiers=( 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: Open Source :: MIT', 'Natural Language :: English', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Office/Business', ), install_requires=( 'redis', 'werkzeug', ), setup_requires=( 'pytest-runner', ), tests_require=( 'pytest', ), entry_points={ 'console_scripts': ( 'jacquard = jacquard.cli:main', ), 'jacquard.storage_engines': ( 'dummy = jacquard.storage.dummy:DummyStore', 'redis = jacquard.storage.redis:RedisStore', 'file = jacquard.storage.file:FileStore', ), }, ) Add package resources for subcommands
from setuptools import setup, find_packages with open('README.rst', 'r', encoding='utf-8') as f: long_description = f.read() setup( name='jacquard', version='0.1.0', url='https://github.com/prophile/jacquard', description="Split testing server", long_description=long_description, author="Alistair Lynn", author_email="alistair@alynn.co.uk", packages=find_packages(), classifiers=( 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: Open Source :: MIT', 'Natural Language :: English', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Office/Business', ), install_requires=( 'redis', 'werkzeug', ), setup_requires=( 'pytest-runner', ), tests_require=( 'pytest', ), entry_points={ 'console_scripts': ( 'jacquard = jacquard.cli:main', ), 'jacquard.storage_engines': ( 'dummy = jacquard.storage.dummy:DummyStore', 'redis = jacquard.storage.redis:RedisStore', 'file = jacquard.storage.file:FileStore', ), 'jacquard.commands': ( 'storage-dump = jacquard.storage.commands:StorageDump', ), }, )
<commit_before>from setuptools import setup, find_packages with open('README.rst', 'r', encoding='utf-8') as f: long_description = f.read() setup( name='jacquard', version='0.1.0', url='https://github.com/prophile/jacquard', description="Split testing server", long_description=long_description, author="Alistair Lynn", author_email="alistair@alynn.co.uk", packages=find_packages(), classifiers=( 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: Open Source :: MIT', 'Natural Language :: English', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Office/Business', ), install_requires=( 'redis', 'werkzeug', ), setup_requires=( 'pytest-runner', ), tests_require=( 'pytest', ), entry_points={ 'console_scripts': ( 'jacquard = jacquard.cli:main', ), 'jacquard.storage_engines': ( 'dummy = jacquard.storage.dummy:DummyStore', 'redis = jacquard.storage.redis:RedisStore', 'file = jacquard.storage.file:FileStore', ), }, ) <commit_msg>Add package resources for subcommands<commit_after>
from setuptools import setup, find_packages with open('README.rst', 'r', encoding='utf-8') as f: long_description = f.read() setup( name='jacquard', version='0.1.0', url='https://github.com/prophile/jacquard', description="Split testing server", long_description=long_description, author="Alistair Lynn", author_email="alistair@alynn.co.uk", packages=find_packages(), classifiers=( 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: Open Source :: MIT', 'Natural Language :: English', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Office/Business', ), install_requires=( 'redis', 'werkzeug', ), setup_requires=( 'pytest-runner', ), tests_require=( 'pytest', ), entry_points={ 'console_scripts': ( 'jacquard = jacquard.cli:main', ), 'jacquard.storage_engines': ( 'dummy = jacquard.storage.dummy:DummyStore', 'redis = jacquard.storage.redis:RedisStore', 'file = jacquard.storage.file:FileStore', ), 'jacquard.commands': ( 'storage-dump = jacquard.storage.commands:StorageDump', ), }, )
from setuptools import setup, find_packages with open('README.rst', 'r', encoding='utf-8') as f: long_description = f.read() setup( name='jacquard', version='0.1.0', url='https://github.com/prophile/jacquard', description="Split testing server", long_description=long_description, author="Alistair Lynn", author_email="alistair@alynn.co.uk", packages=find_packages(), classifiers=( 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: Open Source :: MIT', 'Natural Language :: English', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Office/Business', ), install_requires=( 'redis', 'werkzeug', ), setup_requires=( 'pytest-runner', ), tests_require=( 'pytest', ), entry_points={ 'console_scripts': ( 'jacquard = jacquard.cli:main', ), 'jacquard.storage_engines': ( 'dummy = jacquard.storage.dummy:DummyStore', 'redis = jacquard.storage.redis:RedisStore', 'file = jacquard.storage.file:FileStore', ), }, ) Add package resources for subcommandsfrom setuptools import setup, find_packages with open('README.rst', 'r', encoding='utf-8') as f: long_description = f.read() setup( name='jacquard', version='0.1.0', url='https://github.com/prophile/jacquard', description="Split testing server", long_description=long_description, author="Alistair Lynn", author_email="alistair@alynn.co.uk", packages=find_packages(), classifiers=( 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: Open Source :: MIT', 'Natural Language :: English', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Office/Business', ), install_requires=( 'redis', 'werkzeug', ), setup_requires=( 'pytest-runner', ), tests_require=( 'pytest', ), entry_points={ 'console_scripts': ( 'jacquard = jacquard.cli:main', ), 'jacquard.storage_engines': ( 'dummy = jacquard.storage.dummy:DummyStore', 'redis = jacquard.storage.redis:RedisStore', 'file = jacquard.storage.file:FileStore', ), 'jacquard.commands': ( 'storage-dump = jacquard.storage.commands:StorageDump', ), }, )
<commit_before>from setuptools import setup, find_packages with open('README.rst', 'r', encoding='utf-8') as f: long_description = f.read() setup( name='jacquard', version='0.1.0', url='https://github.com/prophile/jacquard', description="Split testing server", long_description=long_description, author="Alistair Lynn", author_email="alistair@alynn.co.uk", packages=find_packages(), classifiers=( 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: Open Source :: MIT', 'Natural Language :: English', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Office/Business', ), install_requires=( 'redis', 'werkzeug', ), setup_requires=( 'pytest-runner', ), tests_require=( 'pytest', ), entry_points={ 'console_scripts': ( 'jacquard = jacquard.cli:main', ), 'jacquard.storage_engines': ( 'dummy = jacquard.storage.dummy:DummyStore', 'redis = jacquard.storage.redis:RedisStore', 'file = jacquard.storage.file:FileStore', ), }, ) <commit_msg>Add package resources for subcommands<commit_after>from setuptools import setup, find_packages with open('README.rst', 'r', encoding='utf-8') as f: long_description = f.read() setup( name='jacquard', version='0.1.0', url='https://github.com/prophile/jacquard', description="Split testing server", long_description=long_description, author="Alistair Lynn", author_email="alistair@alynn.co.uk", packages=find_packages(), classifiers=( 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: Open Source :: MIT', 'Natural Language :: English', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Office/Business', ), install_requires=( 'redis', 'werkzeug', ), setup_requires=( 'pytest-runner', ), tests_require=( 'pytest', ), entry_points={ 'console_scripts': ( 'jacquard = jacquard.cli:main', ), 'jacquard.storage_engines': ( 'dummy = jacquard.storage.dummy:DummyStore', 'redis = jacquard.storage.redis:RedisStore', 'file = jacquard.storage.file:FileStore', ), 'jacquard.commands': ( 'storage-dump = jacquard.storage.commands:StorageDump', ), }, )
52f2208570500c675f89376a1b1e1181bceefa51
setup.py
setup.py
import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.5", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], data_files = [ ('', ['pubsubpull/trigger-attach.sql', 'pubsubpull/trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], )
import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.6", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], package_data = [ ('pubsubpull', 'trigger-attach.sql', 'trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], )
Use package_data for data files.
Use package_data for data files.
Python
mit
KayEss/django-pubsubpull,KayEss/django-pubsubpull,KayEss/django-pubsubpull
import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.5", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], data_files = [ ('', ['pubsubpull/trigger-attach.sql', 'pubsubpull/trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], ) Use package_data for data files.
import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.6", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], package_data = [ ('pubsubpull', 'trigger-attach.sql', 'trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], )
<commit_before>import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.5", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], data_files = [ ('', ['pubsubpull/trigger-attach.sql', 'pubsubpull/trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], ) <commit_msg>Use package_data for data files.<commit_after>
import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.6", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], package_data = [ ('pubsubpull', 'trigger-attach.sql', 'trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], )
import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.5", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], data_files = [ ('', ['pubsubpull/trigger-attach.sql', 'pubsubpull/trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], ) Use package_data for data files.import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.6", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], package_data = [ ('pubsubpull', 'trigger-attach.sql', 'trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], )
<commit_before>import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.5", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], data_files = [ ('', ['pubsubpull/trigger-attach.sql', 'pubsubpull/trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], ) <commit_msg>Use package_data for data files.<commit_after>import os from setuptools import setup def read(fname1, fname2): if os.path.exists(fname1): fname = fname1 else: fname = fname2 return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "django-pubsubpull", version = "0.0.0.6", author = "Kirit Saelensminde", author_email = "kirit@felspar.com", url='https://github.com/KayEss/django-pubsubpull', description = ("Pub/sub and pull for Django"), long_description = read('README','README.md'), license = "Boost Software License - Version 1.0 - August 17th, 2003", keywords = "django rest data pub-sub pull", packages = [ 'pubsubpull', 'pubsubpull.operations', 'pubsubpull.tests', 'pubsubpull.migrations', 'pubsubpull.south_migrations'], package_data = [ ('pubsubpull', 'trigger-attach.sql', 'trigger-function.sql'])], install_requires = [ 'django-slumber', 'django-async'], classifiers = [ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved", ], )
416963350b881ee13862d92db3bdf3890df41145
setup.py
setup.py
""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.2', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.3', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, # include static assets package_data = { '': ['*.jinja', '*.js'] }, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
Include *.js and *.jinja files in sdist packages
Include *.js and *.jinja files in sdist packages
Python
bsd-3-clause
bradleywright/flask-mustachejs,bradleywright/flask-mustachejs,bradwright/flask-mustachejs,bradwright/flask-mustachejs
""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.2', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] ) Include *.js and *.jinja files in sdist packages
""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.3', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, # include static assets package_data = { '': ['*.jinja', '*.js'] }, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
<commit_before>""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.2', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] ) <commit_msg>Include *.js and *.jinja files in sdist packages<commit_after>
""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.3', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, # include static assets package_data = { '': ['*.jinja', '*.js'] }, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.2', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] ) Include *.js and *.jinja files in sdist packages""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.3', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, # include static assets package_data = { '': ['*.jinja', '*.js'] }, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
<commit_before>""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.2', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] ) <commit_msg>Include *.js and *.jinja files in sdist packages<commit_after>""" ---------------- Flask-Mustache ---------------- `Mustache`__ integration for Flask. __ http://mustache.github.com/ Flask-Mustache adds template helpers and context processors to assist Flask developers with integrating the Mustache library into their development process. """ from setuptools import setup setup( name='Flask-MustacheJS', version='0.4.3', url='https://github.com/bradleywright/flask-mustache', license='BSD', author='Bradley Wright', author_email='brad@intranation.com', description='Mustache integration in Flask, with Jinja and client-side libraries.', long_description=__doc__, packages=['flask_mustache'], zip_safe=False, include_package_data=True, # include static assets package_data = { '': ['*.jinja', '*.js'] }, platforms='any', install_requires=[ 'Flask', 'pystache' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
51d2469b8c1b9465ff5a41a3c057acdfabdc6bc4
setup.py
setup.py
from setuptools import setup from timebook import get_version setup( name='timebook', version=get_version(), url='http://bitbucket.org/trevor/timebook/', description='track what you spend time on', author='Trevor Caira', author_email='trevor@caira.com', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities', ], packages=['timebook'], install_requires=[ 'docopt==0.6.2', ], entry_points={'console_scripts': [ 't = timebook.cmdline:run_from_cmdline']}, )
from setuptools import setup from timebook import get_version setup( name='timebook', version=get_version(), url='http://bitbucket.org/trevor/timebook/', description='track what you spend time on', author='Trevor Caira', author_email='trevor@caira.com', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Utilities', ], packages=['timebook'], install_requires=[ 'docopt==0.6.2', ], entry_points={'console_scripts': [ 't = timebook.cmdline:run_from_cmdline']}, )
Update language trove classifier list to include Python 3
Update language trove classifier list to include Python 3 The Python 3.x support is now reasonably well-tested (74% coverage), so this closes #5.
Python
mit
imiric/timebook
from setuptools import setup from timebook import get_version setup( name='timebook', version=get_version(), url='http://bitbucket.org/trevor/timebook/', description='track what you spend time on', author='Trevor Caira', author_email='trevor@caira.com', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities', ], packages=['timebook'], install_requires=[ 'docopt==0.6.2', ], entry_points={'console_scripts': [ 't = timebook.cmdline:run_from_cmdline']}, ) Update language trove classifier list to include Python 3 The Python 3.x support is now reasonably well-tested (74% coverage), so this closes #5.
from setuptools import setup from timebook import get_version setup( name='timebook', version=get_version(), url='http://bitbucket.org/trevor/timebook/', description='track what you spend time on', author='Trevor Caira', author_email='trevor@caira.com', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Utilities', ], packages=['timebook'], install_requires=[ 'docopt==0.6.2', ], entry_points={'console_scripts': [ 't = timebook.cmdline:run_from_cmdline']}, )
<commit_before>from setuptools import setup from timebook import get_version setup( name='timebook', version=get_version(), url='http://bitbucket.org/trevor/timebook/', description='track what you spend time on', author='Trevor Caira', author_email='trevor@caira.com', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities', ], packages=['timebook'], install_requires=[ 'docopt==0.6.2', ], entry_points={'console_scripts': [ 't = timebook.cmdline:run_from_cmdline']}, ) <commit_msg>Update language trove classifier list to include Python 3 The Python 3.x support is now reasonably well-tested (74% coverage), so this closes #5.<commit_after>
from setuptools import setup from timebook import get_version setup( name='timebook', version=get_version(), url='http://bitbucket.org/trevor/timebook/', description='track what you spend time on', author='Trevor Caira', author_email='trevor@caira.com', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Utilities', ], packages=['timebook'], install_requires=[ 'docopt==0.6.2', ], entry_points={'console_scripts': [ 't = timebook.cmdline:run_from_cmdline']}, )
from setuptools import setup from timebook import get_version setup( name='timebook', version=get_version(), url='http://bitbucket.org/trevor/timebook/', description='track what you spend time on', author='Trevor Caira', author_email='trevor@caira.com', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities', ], packages=['timebook'], install_requires=[ 'docopt==0.6.2', ], entry_points={'console_scripts': [ 't = timebook.cmdline:run_from_cmdline']}, ) Update language trove classifier list to include Python 3 The Python 3.x support is now reasonably well-tested (74% coverage), so this closes #5.from setuptools import setup from timebook import get_version setup( name='timebook', version=get_version(), url='http://bitbucket.org/trevor/timebook/', description='track what you spend time on', author='Trevor Caira', author_email='trevor@caira.com', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Utilities', ], packages=['timebook'], install_requires=[ 'docopt==0.6.2', ], entry_points={'console_scripts': [ 't = timebook.cmdline:run_from_cmdline']}, )
<commit_before>from setuptools import setup from timebook import get_version setup( name='timebook', version=get_version(), url='http://bitbucket.org/trevor/timebook/', description='track what you spend time on', author='Trevor Caira', author_email='trevor@caira.com', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Utilities', ], packages=['timebook'], install_requires=[ 'docopt==0.6.2', ], entry_points={'console_scripts': [ 't = timebook.cmdline:run_from_cmdline']}, ) <commit_msg>Update language trove classifier list to include Python 3 The Python 3.x support is now reasonably well-tested (74% coverage), so this closes #5.<commit_after>from setuptools import setup from timebook import get_version setup( name='timebook', version=get_version(), url='http://bitbucket.org/trevor/timebook/', description='track what you spend time on', author='Trevor Caira', author_email='trevor@caira.com', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Utilities', ], packages=['timebook'], install_requires=[ 'docopt==0.6.2', ], entry_points={'console_scripts': [ 't = timebook.cmdline:run_from_cmdline']}, )
e0f4135b90a3f920db3a14b14b70e0e57df3d717
setup.py
setup.py
## # Copyright (c) 2006-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # DRI: Cyrus Daboo, cdaboo@apple.com ## from distutils.core import setup, Extension import sys import commands setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = commands.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = commands.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], )
## # Copyright (c) 2006-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # DRI: Cyrus Daboo, cdaboo@apple.com ## from distutils.core import setup, Extension import sys if sys.version_info < (3,0): import commands as subprocess else: import subprocess setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = subprocess.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = subprocess.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], )
Build for either python 2 or python 3
Build for either python 2 or python 3
Python
apache-2.0
admiyo/PyKerberos,admiyo/PyKerberos,admiyo/PyKerberos
## # Copyright (c) 2006-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # DRI: Cyrus Daboo, cdaboo@apple.com ## from distutils.core import setup, Extension import sys import commands setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = commands.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = commands.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], ) Build for either python 2 or python 3
## # Copyright (c) 2006-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # DRI: Cyrus Daboo, cdaboo@apple.com ## from distutils.core import setup, Extension import sys if sys.version_info < (3,0): import commands as subprocess else: import subprocess setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = subprocess.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = subprocess.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], )
<commit_before>## # Copyright (c) 2006-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # DRI: Cyrus Daboo, cdaboo@apple.com ## from distutils.core import setup, Extension import sys import commands setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = commands.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = commands.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], ) <commit_msg>Build for either python 2 or python 3<commit_after>
## # Copyright (c) 2006-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # DRI: Cyrus Daboo, cdaboo@apple.com ## from distutils.core import setup, Extension import sys if sys.version_info < (3,0): import commands as subprocess else: import subprocess setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = subprocess.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = subprocess.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], )
## # Copyright (c) 2006-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # DRI: Cyrus Daboo, cdaboo@apple.com ## from distutils.core import setup, Extension import sys import commands setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = commands.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = commands.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], ) Build for either python 2 or python 3## # Copyright (c) 2006-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # DRI: Cyrus Daboo, cdaboo@apple.com ## from distutils.core import setup, Extension import sys if sys.version_info < (3,0): import commands as subprocess else: import subprocess setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = subprocess.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = subprocess.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], )
<commit_before>## # Copyright (c) 2006-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # DRI: Cyrus Daboo, cdaboo@apple.com ## from distutils.core import setup, Extension import sys import commands setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = commands.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = commands.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], ) <commit_msg>Build for either python 2 or python 3<commit_after>## # Copyright (c) 2006-2007 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # DRI: Cyrus Daboo, cdaboo@apple.com ## from distutils.core import setup, Extension import sys if sys.version_info < (3,0): import commands as subprocess else: import subprocess setup ( name = "kerberos", version = "1.0", description = "Kerberos high-level interface", ext_modules = [ Extension( "kerberos", extra_link_args = subprocess.getoutput("krb5-config --libs gssapi").split(), extra_compile_args = subprocess.getoutput("krb5-config --cflags gssapi").split(), sources = [ "src/kerberos.c", "src/kerberosbasic.c", "src/kerberosgss.c", "src/base64.c" ], ), ], )
61fb82beb9fd159fa06bccc9fc0ac55ba3bcaa64
setup.py
setup.py
from setuptools import setup setup( name='qual', version='0.1', description='Calendar stuff', url='http://github.com/jwg4/qual', author='Jack Grahl', author_email='jack.grahl@yahoo.co.uk', license='Apache License 2.0', packages=['qual'], test_suite='nose.collector', tests_require=['nose', 'hypothesis', 'hypothesis[extra]'] )
from setuptools import setup setup( name='qual', version='0.1', description='Calendar stuff', url='http://github.com/jwg4/qual', author='Jack Grahl', author_email='jack.grahl@yahoo.co.uk', license='Apache License 2.0', packages=['qual'], test_suite='nose.collector', tests_require=['nose', 'hypothesis', 'hypothesis[extras]'] )
Correct name of extras package for hypothesis.
Correct name of extras package for hypothesis.
Python
apache-2.0
jwg4/qual,jwg4/calexicon
from setuptools import setup setup( name='qual', version='0.1', description='Calendar stuff', url='http://github.com/jwg4/qual', author='Jack Grahl', author_email='jack.grahl@yahoo.co.uk', license='Apache License 2.0', packages=['qual'], test_suite='nose.collector', tests_require=['nose', 'hypothesis', 'hypothesis[extra]'] ) Correct name of extras package for hypothesis.
from setuptools import setup setup( name='qual', version='0.1', description='Calendar stuff', url='http://github.com/jwg4/qual', author='Jack Grahl', author_email='jack.grahl@yahoo.co.uk', license='Apache License 2.0', packages=['qual'], test_suite='nose.collector', tests_require=['nose', 'hypothesis', 'hypothesis[extras]'] )
<commit_before>from setuptools import setup setup( name='qual', version='0.1', description='Calendar stuff', url='http://github.com/jwg4/qual', author='Jack Grahl', author_email='jack.grahl@yahoo.co.uk', license='Apache License 2.0', packages=['qual'], test_suite='nose.collector', tests_require=['nose', 'hypothesis', 'hypothesis[extra]'] ) <commit_msg>Correct name of extras package for hypothesis.<commit_after>
from setuptools import setup setup( name='qual', version='0.1', description='Calendar stuff', url='http://github.com/jwg4/qual', author='Jack Grahl', author_email='jack.grahl@yahoo.co.uk', license='Apache License 2.0', packages=['qual'], test_suite='nose.collector', tests_require=['nose', 'hypothesis', 'hypothesis[extras]'] )
from setuptools import setup setup( name='qual', version='0.1', description='Calendar stuff', url='http://github.com/jwg4/qual', author='Jack Grahl', author_email='jack.grahl@yahoo.co.uk', license='Apache License 2.0', packages=['qual'], test_suite='nose.collector', tests_require=['nose', 'hypothesis', 'hypothesis[extra]'] ) Correct name of extras package for hypothesis.from setuptools import setup setup( name='qual', version='0.1', description='Calendar stuff', url='http://github.com/jwg4/qual', author='Jack Grahl', author_email='jack.grahl@yahoo.co.uk', license='Apache License 2.0', packages=['qual'], test_suite='nose.collector', tests_require=['nose', 'hypothesis', 'hypothesis[extras]'] )
<commit_before>from setuptools import setup setup( name='qual', version='0.1', description='Calendar stuff', url='http://github.com/jwg4/qual', author='Jack Grahl', author_email='jack.grahl@yahoo.co.uk', license='Apache License 2.0', packages=['qual'], test_suite='nose.collector', tests_require=['nose', 'hypothesis', 'hypothesis[extra]'] ) <commit_msg>Correct name of extras package for hypothesis.<commit_after>from setuptools import setup setup( name='qual', version='0.1', description='Calendar stuff', url='http://github.com/jwg4/qual', author='Jack Grahl', author_email='jack.grahl@yahoo.co.uk', license='Apache License 2.0', packages=['qual'], test_suite='nose.collector', tests_require=['nose', 'hypothesis', 'hypothesis[extras]'] )
3138c70b9f9d8c44d6e80396afcbc5524b98cb58
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name = "MarkdownPP", description = "Markdown preprocessor", version = "1.0", author = "John Reese", author_email = "john@noswap.com", url = "https://github.com/jreese/markdown-pp", classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Utilities', 'Development Status :: 4 - Beta', ], license='MIT License', scripts = ['bin/markdown-pp'], packages = ['MarkdownPP', 'MarkdownPP/Modules'], )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup from os import path import shutil if path.isfile('README.md'): shutil.copyfile('README.md', 'README') setup(name = "MarkdownPP", description = "Markdown preprocessor", version = "1.0", author = "John Reese", author_email = "john@noswap.com", url = "https://github.com/jreese/markdown-pp", classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Utilities', 'Development Status :: 4 - Beta', ], license='MIT License', scripts = ['bin/markdown-pp'], packages = ['MarkdownPP', 'MarkdownPP/Modules'], )
Copy readme.md to readme when building
Copy readme.md to readme when building
Python
mit
jreese/markdown-pp
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name = "MarkdownPP", description = "Markdown preprocessor", version = "1.0", author = "John Reese", author_email = "john@noswap.com", url = "https://github.com/jreese/markdown-pp", classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Utilities', 'Development Status :: 4 - Beta', ], license='MIT License', scripts = ['bin/markdown-pp'], packages = ['MarkdownPP', 'MarkdownPP/Modules'], ) Copy readme.md to readme when building
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup from os import path import shutil if path.isfile('README.md'): shutil.copyfile('README.md', 'README') setup(name = "MarkdownPP", description = "Markdown preprocessor", version = "1.0", author = "John Reese", author_email = "john@noswap.com", url = "https://github.com/jreese/markdown-pp", classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Utilities', 'Development Status :: 4 - Beta', ], license='MIT License', scripts = ['bin/markdown-pp'], packages = ['MarkdownPP', 'MarkdownPP/Modules'], )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name = "MarkdownPP", description = "Markdown preprocessor", version = "1.0", author = "John Reese", author_email = "john@noswap.com", url = "https://github.com/jreese/markdown-pp", classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Utilities', 'Development Status :: 4 - Beta', ], license='MIT License', scripts = ['bin/markdown-pp'], packages = ['MarkdownPP', 'MarkdownPP/Modules'], ) <commit_msg>Copy readme.md to readme when building<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup from os import path import shutil if path.isfile('README.md'): shutil.copyfile('README.md', 'README') setup(name = "MarkdownPP", description = "Markdown preprocessor", version = "1.0", author = "John Reese", author_email = "john@noswap.com", url = "https://github.com/jreese/markdown-pp", classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Utilities', 'Development Status :: 4 - Beta', ], license='MIT License', scripts = ['bin/markdown-pp'], packages = ['MarkdownPP', 'MarkdownPP/Modules'], )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name = "MarkdownPP", description = "Markdown preprocessor", version = "1.0", author = "John Reese", author_email = "john@noswap.com", url = "https://github.com/jreese/markdown-pp", classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Utilities', 'Development Status :: 4 - Beta', ], license='MIT License', scripts = ['bin/markdown-pp'], packages = ['MarkdownPP', 'MarkdownPP/Modules'], ) Copy readme.md to readme when building#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup from os import path import shutil if path.isfile('README.md'): shutil.copyfile('README.md', 'README') setup(name = "MarkdownPP", description = "Markdown preprocessor", version = "1.0", author = "John Reese", author_email = "john@noswap.com", url = "https://github.com/jreese/markdown-pp", classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Utilities', 'Development Status :: 4 - Beta', ], license='MIT License', scripts = ['bin/markdown-pp'], packages = ['MarkdownPP', 'MarkdownPP/Modules'], )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup setup(name = "MarkdownPP", description = "Markdown preprocessor", version = "1.0", author = "John Reese", author_email = "john@noswap.com", url = "https://github.com/jreese/markdown-pp", classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Utilities', 'Development Status :: 4 - Beta', ], license='MIT License', scripts = ['bin/markdown-pp'], packages = ['MarkdownPP', 'MarkdownPP/Modules'], ) <commit_msg>Copy readme.md to readme when building<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup from os import path import shutil if path.isfile('README.md'): shutil.copyfile('README.md', 'README') setup(name = "MarkdownPP", description = "Markdown preprocessor", version = "1.0", author = "John Reese", author_email = "john@noswap.com", url = "https://github.com/jreese/markdown-pp", classifiers=['License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Utilities', 'Development Status :: 4 - Beta', ], license='MIT License', scripts = ['bin/markdown-pp'], packages = ['MarkdownPP', 'MarkdownPP/Modules'], )
aba23cf821489971d9ec13c8fc4cc1dfbaba686d
setup.py
setup.py
from os.path import abspath, dirname, join, normpath from setuptools import setup setup( # Basic package information: name='django-heroku-memcacheify', version='1.0.0', py_modules=('memcacheify',), # Packaging options: zip_safe=False, include_package_data=True, # Package dependencies: install_requires=['django-pylibmc>=0.6.1'], # Metadata for PyPI: author='Randall Degges', author_email='r@rdegges.com', license='UNLICENSE', url='https://github.com/rdegges/django-heroku-memcacheify', keywords='django heroku cloud cache memcache memcached awesome epic', classifiers=[ 'Framework :: Django', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], description='Automatic Django memcached configuration on Heroku.', long_description=open(normpath(join(dirname(abspath(__file__)), 'README.md'))).read() )
from os.path import abspath, dirname, join, normpath from setuptools import setup setup( # Basic package information: name='django-heroku-memcacheify', version='1.0.0', py_modules=('memcacheify',), # Packaging options: zip_safe=False, include_package_data=True, # Package dependencies: install_requires=['django-pylibmc>=0.6.1'], # Metadata for PyPI: author='Randall Degges', author_email='r@rdegges.com', license='UNLICENSE', url='https://github.com/rdegges/django-heroku-memcacheify', keywords='django heroku cloud cache memcache memcached awesome epic', classifiers=[ 'Framework :: Django', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], description='Automatic Django memcached configuration on Heroku.', long_description=open(normpath(join(dirname(abspath(__file__)), 'README.md'))).read(), long_description_content_type='text/markdown' )
Fix PyPI README.MD showing problem.
Fix PyPI README.MD showing problem. There is a problem in the project's pypi page. To fix this I added the following line in the setup.py file: ```python long_description_content_type='text/markdown' ```
Python
unlicense
rdegges/django-heroku-memcacheify
from os.path import abspath, dirname, join, normpath from setuptools import setup setup( # Basic package information: name='django-heroku-memcacheify', version='1.0.0', py_modules=('memcacheify',), # Packaging options: zip_safe=False, include_package_data=True, # Package dependencies: install_requires=['django-pylibmc>=0.6.1'], # Metadata for PyPI: author='Randall Degges', author_email='r@rdegges.com', license='UNLICENSE', url='https://github.com/rdegges/django-heroku-memcacheify', keywords='django heroku cloud cache memcache memcached awesome epic', classifiers=[ 'Framework :: Django', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], description='Automatic Django memcached configuration on Heroku.', long_description=open(normpath(join(dirname(abspath(__file__)), 'README.md'))).read() ) Fix PyPI README.MD showing problem. There is a problem in the project's pypi page. To fix this I added the following line in the setup.py file: ```python long_description_content_type='text/markdown' ```
from os.path import abspath, dirname, join, normpath from setuptools import setup setup( # Basic package information: name='django-heroku-memcacheify', version='1.0.0', py_modules=('memcacheify',), # Packaging options: zip_safe=False, include_package_data=True, # Package dependencies: install_requires=['django-pylibmc>=0.6.1'], # Metadata for PyPI: author='Randall Degges', author_email='r@rdegges.com', license='UNLICENSE', url='https://github.com/rdegges/django-heroku-memcacheify', keywords='django heroku cloud cache memcache memcached awesome epic', classifiers=[ 'Framework :: Django', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], description='Automatic Django memcached configuration on Heroku.', long_description=open(normpath(join(dirname(abspath(__file__)), 'README.md'))).read(), long_description_content_type='text/markdown' )
<commit_before>from os.path import abspath, dirname, join, normpath from setuptools import setup setup( # Basic package information: name='django-heroku-memcacheify', version='1.0.0', py_modules=('memcacheify',), # Packaging options: zip_safe=False, include_package_data=True, # Package dependencies: install_requires=['django-pylibmc>=0.6.1'], # Metadata for PyPI: author='Randall Degges', author_email='r@rdegges.com', license='UNLICENSE', url='https://github.com/rdegges/django-heroku-memcacheify', keywords='django heroku cloud cache memcache memcached awesome epic', classifiers=[ 'Framework :: Django', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], description='Automatic Django memcached configuration on Heroku.', long_description=open(normpath(join(dirname(abspath(__file__)), 'README.md'))).read() ) <commit_msg>Fix PyPI README.MD showing problem. There is a problem in the project's pypi page. To fix this I added the following line in the setup.py file: ```python long_description_content_type='text/markdown' ```<commit_after>
from os.path import abspath, dirname, join, normpath from setuptools import setup setup( # Basic package information: name='django-heroku-memcacheify', version='1.0.0', py_modules=('memcacheify',), # Packaging options: zip_safe=False, include_package_data=True, # Package dependencies: install_requires=['django-pylibmc>=0.6.1'], # Metadata for PyPI: author='Randall Degges', author_email='r@rdegges.com', license='UNLICENSE', url='https://github.com/rdegges/django-heroku-memcacheify', keywords='django heroku cloud cache memcache memcached awesome epic', classifiers=[ 'Framework :: Django', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], description='Automatic Django memcached configuration on Heroku.', long_description=open(normpath(join(dirname(abspath(__file__)), 'README.md'))).read(), long_description_content_type='text/markdown' )
from os.path import abspath, dirname, join, normpath from setuptools import setup setup( # Basic package information: name='django-heroku-memcacheify', version='1.0.0', py_modules=('memcacheify',), # Packaging options: zip_safe=False, include_package_data=True, # Package dependencies: install_requires=['django-pylibmc>=0.6.1'], # Metadata for PyPI: author='Randall Degges', author_email='r@rdegges.com', license='UNLICENSE', url='https://github.com/rdegges/django-heroku-memcacheify', keywords='django heroku cloud cache memcache memcached awesome epic', classifiers=[ 'Framework :: Django', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], description='Automatic Django memcached configuration on Heroku.', long_description=open(normpath(join(dirname(abspath(__file__)), 'README.md'))).read() ) Fix PyPI README.MD showing problem. There is a problem in the project's pypi page. To fix this I added the following line in the setup.py file: ```python long_description_content_type='text/markdown' ```from os.path import abspath, dirname, join, normpath from setuptools import setup setup( # Basic package information: name='django-heroku-memcacheify', version='1.0.0', py_modules=('memcacheify',), # Packaging options: zip_safe=False, include_package_data=True, # Package dependencies: install_requires=['django-pylibmc>=0.6.1'], # Metadata for PyPI: author='Randall Degges', author_email='r@rdegges.com', license='UNLICENSE', url='https://github.com/rdegges/django-heroku-memcacheify', keywords='django heroku cloud cache memcache memcached awesome epic', classifiers=[ 'Framework :: Django', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], description='Automatic Django memcached configuration on Heroku.', long_description=open(normpath(join(dirname(abspath(__file__)), 'README.md'))).read(), long_description_content_type='text/markdown' )
<commit_before>from os.path import abspath, dirname, join, normpath from setuptools import setup setup( # Basic package information: name='django-heroku-memcacheify', version='1.0.0', py_modules=('memcacheify',), # Packaging options: zip_safe=False, include_package_data=True, # Package dependencies: install_requires=['django-pylibmc>=0.6.1'], # Metadata for PyPI: author='Randall Degges', author_email='r@rdegges.com', license='UNLICENSE', url='https://github.com/rdegges/django-heroku-memcacheify', keywords='django heroku cloud cache memcache memcached awesome epic', classifiers=[ 'Framework :: Django', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], description='Automatic Django memcached configuration on Heroku.', long_description=open(normpath(join(dirname(abspath(__file__)), 'README.md'))).read() ) <commit_msg>Fix PyPI README.MD showing problem. There is a problem in the project's pypi page. To fix this I added the following line in the setup.py file: ```python long_description_content_type='text/markdown' ```<commit_after>from os.path import abspath, dirname, join, normpath from setuptools import setup setup( # Basic package information: name='django-heroku-memcacheify', version='1.0.0', py_modules=('memcacheify',), # Packaging options: zip_safe=False, include_package_data=True, # Package dependencies: install_requires=['django-pylibmc>=0.6.1'], # Metadata for PyPI: author='Randall Degges', author_email='r@rdegges.com', license='UNLICENSE', url='https://github.com/rdegges/django-heroku-memcacheify', keywords='django heroku cloud cache memcache memcached awesome epic', classifiers=[ 'Framework :: Django', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], description='Automatic Django memcached configuration on Heroku.', long_description=open(normpath(join(dirname(abspath(__file__)), 'README.md'))).read(), long_description_content_type='text/markdown' )
166fd5d9f1e0ee7fc3cb494addb5564452e6aa7b
setup.py
setup.py
import setuptools setuptools.setup( name="Mongothon", version="0.7.13", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.2'], tests_require=['mock', 'nose'] )
import setuptools setuptools.setup( name="Mongothon", version="0.7.14", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.3'], tests_require=['mock', 'nose'] )
Use version 0.2.3 of schemer and bump the version number to 0.7.14 in the process
Use version 0.2.3 of schemer and bump the version number to 0.7.14 in the process
Python
mit
gamechanger/mongothon
import setuptools setuptools.setup( name="Mongothon", version="0.7.13", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.2'], tests_require=['mock', 'nose'] ) Use version 0.2.3 of schemer and bump the version number to 0.7.14 in the process
import setuptools setuptools.setup( name="Mongothon", version="0.7.14", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.3'], tests_require=['mock', 'nose'] )
<commit_before>import setuptools setuptools.setup( name="Mongothon", version="0.7.13", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.2'], tests_require=['mock', 'nose'] ) <commit_msg>Use version 0.2.3 of schemer and bump the version number to 0.7.14 in the process<commit_after>
import setuptools setuptools.setup( name="Mongothon", version="0.7.14", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.3'], tests_require=['mock', 'nose'] )
import setuptools setuptools.setup( name="Mongothon", version="0.7.13", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.2'], tests_require=['mock', 'nose'] ) Use version 0.2.3 of schemer and bump the version number to 0.7.14 in the processimport setuptools setuptools.setup( name="Mongothon", version="0.7.14", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.3'], tests_require=['mock', 'nose'] )
<commit_before>import setuptools setuptools.setup( name="Mongothon", version="0.7.13", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.2'], tests_require=['mock', 'nose'] ) <commit_msg>Use version 0.2.3 of schemer and bump the version number to 0.7.14 in the process<commit_after>import setuptools setuptools.setup( name="Mongothon", version="0.7.14", author="Tom Leach", author_email="tom@gc.io", description="A MongoDB object-document mapping layer for Python", license="BSD", keywords="mongo mongodb database pymongo odm validation", url="http://github.com/gamechanger/mongothon", packages=["mongothon"], long_description="Mongothon is a MongoDB object-document mapping " + "API for Python, loosely based on the awesome " + "mongoose.js library.", install_requires=['pymongo>=2.5.0', 'inflection==0.2.0', 'schemer==0.2.3'], tests_require=['mock', 'nose'] )
0b255fdec2d4763779a8b07bc043320b9b0236d5
setup.py
setup.py
import re import subprocess from setuptools import setup def _get_git_description(): try: return subprocess.check_output(["git", "describe"]).decode("utf-8").strip() except subprocess.CalledProcessError: return None def get_version(): description = _get_git_description() match = re.match(r'(?P<tag>[\d\.]+)-(?P<offset>[\d]+)-(?P<sha>\w{8})', description) if match: version = "{tag}.post{offset}".format(**match.groupdict()) else: version = description return version def main(): setup( name="workwork", url="https://github.com/DoWhileGeek/workwork", description="A flask api for managing aws ec2 instances", license="MIT License", author="Joeseph Rodrigues", author_email="dowhilegeek@gmail.com", version=get_version(), packages=["workwork", ], package_data={"workwork": ["workwork/*"], }, include_package_data=True, install_requires=[ "Flask==0.10.1", "boto3==1.2.3", ], extras_require={ "develop": [ "pytest==2.8.7", ], }, ) if __name__ == "__main__": main()
import re import subprocess from setuptools import setup def _get_git_description(): try: return subprocess.check_output(["git", "describe"]).decode("utf-8").strip() except subprocess.CalledProcessError: return None def get_version(): description = _get_git_description() match = re.match(r'(?P<tag>[\d\.]+)-(?P<offset>[\d]+)-(?P<sha>\w{8})', description) if match: version = "{tag}.post{offset}".format(**match.groupdict()) else: version = description return version def main(): setup( name="workwork", url="https://github.com/DoWhileGeek/workwork", description="A flask api for managing aws ec2 instances", license="MIT License", author="Joeseph Rodrigues", author_email="dowhilegeek@gmail.com", version=get_version(), packages=["workwork", ], package_data={"workwork": ["workwork/*"], }, include_package_data=True, install_requires=[ "boto3==1.2.3", "Flask==0.10.1", "webargs==1.2.0", ], extras_require={ "develop": [ "pytest==2.8.7", ], }, ) if __name__ == "__main__": main()
Add webargs requirement, and sort requirements.
Add webargs requirement, and sort requirements.
Python
mit
DoWhileGeek/workwork
import re import subprocess from setuptools import setup def _get_git_description(): try: return subprocess.check_output(["git", "describe"]).decode("utf-8").strip() except subprocess.CalledProcessError: return None def get_version(): description = _get_git_description() match = re.match(r'(?P<tag>[\d\.]+)-(?P<offset>[\d]+)-(?P<sha>\w{8})', description) if match: version = "{tag}.post{offset}".format(**match.groupdict()) else: version = description return version def main(): setup( name="workwork", url="https://github.com/DoWhileGeek/workwork", description="A flask api for managing aws ec2 instances", license="MIT License", author="Joeseph Rodrigues", author_email="dowhilegeek@gmail.com", version=get_version(), packages=["workwork", ], package_data={"workwork": ["workwork/*"], }, include_package_data=True, install_requires=[ "Flask==0.10.1", "boto3==1.2.3", ], extras_require={ "develop": [ "pytest==2.8.7", ], }, ) if __name__ == "__main__": main() Add webargs requirement, and sort requirements.
import re import subprocess from setuptools import setup def _get_git_description(): try: return subprocess.check_output(["git", "describe"]).decode("utf-8").strip() except subprocess.CalledProcessError: return None def get_version(): description = _get_git_description() match = re.match(r'(?P<tag>[\d\.]+)-(?P<offset>[\d]+)-(?P<sha>\w{8})', description) if match: version = "{tag}.post{offset}".format(**match.groupdict()) else: version = description return version def main(): setup( name="workwork", url="https://github.com/DoWhileGeek/workwork", description="A flask api for managing aws ec2 instances", license="MIT License", author="Joeseph Rodrigues", author_email="dowhilegeek@gmail.com", version=get_version(), packages=["workwork", ], package_data={"workwork": ["workwork/*"], }, include_package_data=True, install_requires=[ "boto3==1.2.3", "Flask==0.10.1", "webargs==1.2.0", ], extras_require={ "develop": [ "pytest==2.8.7", ], }, ) if __name__ == "__main__": main()
<commit_before>import re import subprocess from setuptools import setup def _get_git_description(): try: return subprocess.check_output(["git", "describe"]).decode("utf-8").strip() except subprocess.CalledProcessError: return None def get_version(): description = _get_git_description() match = re.match(r'(?P<tag>[\d\.]+)-(?P<offset>[\d]+)-(?P<sha>\w{8})', description) if match: version = "{tag}.post{offset}".format(**match.groupdict()) else: version = description return version def main(): setup( name="workwork", url="https://github.com/DoWhileGeek/workwork", description="A flask api for managing aws ec2 instances", license="MIT License", author="Joeseph Rodrigues", author_email="dowhilegeek@gmail.com", version=get_version(), packages=["workwork", ], package_data={"workwork": ["workwork/*"], }, include_package_data=True, install_requires=[ "Flask==0.10.1", "boto3==1.2.3", ], extras_require={ "develop": [ "pytest==2.8.7", ], }, ) if __name__ == "__main__": main() <commit_msg>Add webargs requirement, and sort requirements.<commit_after>
import re import subprocess from setuptools import setup def _get_git_description(): try: return subprocess.check_output(["git", "describe"]).decode("utf-8").strip() except subprocess.CalledProcessError: return None def get_version(): description = _get_git_description() match = re.match(r'(?P<tag>[\d\.]+)-(?P<offset>[\d]+)-(?P<sha>\w{8})', description) if match: version = "{tag}.post{offset}".format(**match.groupdict()) else: version = description return version def main(): setup( name="workwork", url="https://github.com/DoWhileGeek/workwork", description="A flask api for managing aws ec2 instances", license="MIT License", author="Joeseph Rodrigues", author_email="dowhilegeek@gmail.com", version=get_version(), packages=["workwork", ], package_data={"workwork": ["workwork/*"], }, include_package_data=True, install_requires=[ "boto3==1.2.3", "Flask==0.10.1", "webargs==1.2.0", ], extras_require={ "develop": [ "pytest==2.8.7", ], }, ) if __name__ == "__main__": main()
import re import subprocess from setuptools import setup def _get_git_description(): try: return subprocess.check_output(["git", "describe"]).decode("utf-8").strip() except subprocess.CalledProcessError: return None def get_version(): description = _get_git_description() match = re.match(r'(?P<tag>[\d\.]+)-(?P<offset>[\d]+)-(?P<sha>\w{8})', description) if match: version = "{tag}.post{offset}".format(**match.groupdict()) else: version = description return version def main(): setup( name="workwork", url="https://github.com/DoWhileGeek/workwork", description="A flask api for managing aws ec2 instances", license="MIT License", author="Joeseph Rodrigues", author_email="dowhilegeek@gmail.com", version=get_version(), packages=["workwork", ], package_data={"workwork": ["workwork/*"], }, include_package_data=True, install_requires=[ "Flask==0.10.1", "boto3==1.2.3", ], extras_require={ "develop": [ "pytest==2.8.7", ], }, ) if __name__ == "__main__": main() Add webargs requirement, and sort requirements.import re import subprocess from setuptools import setup def _get_git_description(): try: return subprocess.check_output(["git", "describe"]).decode("utf-8").strip() except subprocess.CalledProcessError: return None def get_version(): description = _get_git_description() match = re.match(r'(?P<tag>[\d\.]+)-(?P<offset>[\d]+)-(?P<sha>\w{8})', description) if match: version = "{tag}.post{offset}".format(**match.groupdict()) else: version = description return version def main(): setup( name="workwork", url="https://github.com/DoWhileGeek/workwork", description="A flask api for managing aws ec2 instances", license="MIT License", author="Joeseph Rodrigues", author_email="dowhilegeek@gmail.com", version=get_version(), packages=["workwork", ], package_data={"workwork": ["workwork/*"], }, include_package_data=True, install_requires=[ "boto3==1.2.3", "Flask==0.10.1", "webargs==1.2.0", ], extras_require={ "develop": [ "pytest==2.8.7", ], }, ) if __name__ == "__main__": main()
<commit_before>import re import subprocess from setuptools import setup def _get_git_description(): try: return subprocess.check_output(["git", "describe"]).decode("utf-8").strip() except subprocess.CalledProcessError: return None def get_version(): description = _get_git_description() match = re.match(r'(?P<tag>[\d\.]+)-(?P<offset>[\d]+)-(?P<sha>\w{8})', description) if match: version = "{tag}.post{offset}".format(**match.groupdict()) else: version = description return version def main(): setup( name="workwork", url="https://github.com/DoWhileGeek/workwork", description="A flask api for managing aws ec2 instances", license="MIT License", author="Joeseph Rodrigues", author_email="dowhilegeek@gmail.com", version=get_version(), packages=["workwork", ], package_data={"workwork": ["workwork/*"], }, include_package_data=True, install_requires=[ "Flask==0.10.1", "boto3==1.2.3", ], extras_require={ "develop": [ "pytest==2.8.7", ], }, ) if __name__ == "__main__": main() <commit_msg>Add webargs requirement, and sort requirements.<commit_after>import re import subprocess from setuptools import setup def _get_git_description(): try: return subprocess.check_output(["git", "describe"]).decode("utf-8").strip() except subprocess.CalledProcessError: return None def get_version(): description = _get_git_description() match = re.match(r'(?P<tag>[\d\.]+)-(?P<offset>[\d]+)-(?P<sha>\w{8})', description) if match: version = "{tag}.post{offset}".format(**match.groupdict()) else: version = description return version def main(): setup( name="workwork", url="https://github.com/DoWhileGeek/workwork", description="A flask api for managing aws ec2 instances", license="MIT License", author="Joeseph Rodrigues", author_email="dowhilegeek@gmail.com", version=get_version(), packages=["workwork", ], package_data={"workwork": ["workwork/*"], }, include_package_data=True, install_requires=[ "boto3==1.2.3", "Flask==0.10.1", "webargs==1.2.0", ], extras_require={ "develop": [ "pytest==2.8.7", ], }, ) if __name__ == "__main__": main()
b5a5b0ffbe17b859d07df98853263761bc2877e9
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages import keyvaluestore setup( name="django-keyvaluestore", version=keyvaluestore.__version__, url='https://github.com/vikingco/django-keyvaluestore', license='BSD', description="A Key-Value store for Django", long_description=open('README.rst', 'r').read(), author='VikingCo NV', packages=['keyvaluestore'], zip_safe=False, # Don't create egg files, Django cannot find templates in egg files. include_package_data=True, classifiers=[ 'Intended Audience :: Developers', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Environment :: Web Environment', 'Framework :: Django', ], )
#!/usr/bin/env python from setuptools import setup, find_packages import keyvaluestore setup( name="django-keyvaluestore", version=keyvaluestore.__version__, url='https://github.com/vikingco/django-keyvaluestore', license='BSD', description="A Key-Value store for Django", long_description=open('README.rst', 'r').read(), author='VikingCo NV', packages=find_packages(), zip_safe=False, # Don't create egg files, Django cannot find templates in egg files. include_package_data=True, classifiers=[ 'Intended Audience :: Developers', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Environment :: Web Environment', 'Framework :: Django', ], )
Use function 'findpackages' to collect packages.
Use function 'findpackages' to collect packages.
Python
bsd-3-clause
vikingco/django-keyvaluestore
#!/usr/bin/env python from setuptools import setup, find_packages import keyvaluestore setup( name="django-keyvaluestore", version=keyvaluestore.__version__, url='https://github.com/vikingco/django-keyvaluestore', license='BSD', description="A Key-Value store for Django", long_description=open('README.rst', 'r').read(), author='VikingCo NV', packages=['keyvaluestore'], zip_safe=False, # Don't create egg files, Django cannot find templates in egg files. include_package_data=True, classifiers=[ 'Intended Audience :: Developers', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Environment :: Web Environment', 'Framework :: Django', ], ) Use function 'findpackages' to collect packages.
#!/usr/bin/env python from setuptools import setup, find_packages import keyvaluestore setup( name="django-keyvaluestore", version=keyvaluestore.__version__, url='https://github.com/vikingco/django-keyvaluestore', license='BSD', description="A Key-Value store for Django", long_description=open('README.rst', 'r').read(), author='VikingCo NV', packages=find_packages(), zip_safe=False, # Don't create egg files, Django cannot find templates in egg files. include_package_data=True, classifiers=[ 'Intended Audience :: Developers', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Environment :: Web Environment', 'Framework :: Django', ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages import keyvaluestore setup( name="django-keyvaluestore", version=keyvaluestore.__version__, url='https://github.com/vikingco/django-keyvaluestore', license='BSD', description="A Key-Value store for Django", long_description=open('README.rst', 'r').read(), author='VikingCo NV', packages=['keyvaluestore'], zip_safe=False, # Don't create egg files, Django cannot find templates in egg files. include_package_data=True, classifiers=[ 'Intended Audience :: Developers', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Environment :: Web Environment', 'Framework :: Django', ], ) <commit_msg>Use function 'findpackages' to collect packages.<commit_after>
#!/usr/bin/env python from setuptools import setup, find_packages import keyvaluestore setup( name="django-keyvaluestore", version=keyvaluestore.__version__, url='https://github.com/vikingco/django-keyvaluestore', license='BSD', description="A Key-Value store for Django", long_description=open('README.rst', 'r').read(), author='VikingCo NV', packages=find_packages(), zip_safe=False, # Don't create egg files, Django cannot find templates in egg files. include_package_data=True, classifiers=[ 'Intended Audience :: Developers', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Environment :: Web Environment', 'Framework :: Django', ], )
#!/usr/bin/env python from setuptools import setup, find_packages import keyvaluestore setup( name="django-keyvaluestore", version=keyvaluestore.__version__, url='https://github.com/vikingco/django-keyvaluestore', license='BSD', description="A Key-Value store for Django", long_description=open('README.rst', 'r').read(), author='VikingCo NV', packages=['keyvaluestore'], zip_safe=False, # Don't create egg files, Django cannot find templates in egg files. include_package_data=True, classifiers=[ 'Intended Audience :: Developers', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Environment :: Web Environment', 'Framework :: Django', ], ) Use function 'findpackages' to collect packages.#!/usr/bin/env python from setuptools import setup, find_packages import keyvaluestore setup( name="django-keyvaluestore", version=keyvaluestore.__version__, url='https://github.com/vikingco/django-keyvaluestore', license='BSD', description="A Key-Value store for Django", long_description=open('README.rst', 'r').read(), author='VikingCo NV', packages=find_packages(), zip_safe=False, # Don't create egg files, Django cannot find templates in egg files. include_package_data=True, classifiers=[ 'Intended Audience :: Developers', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Environment :: Web Environment', 'Framework :: Django', ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages import keyvaluestore setup( name="django-keyvaluestore", version=keyvaluestore.__version__, url='https://github.com/vikingco/django-keyvaluestore', license='BSD', description="A Key-Value store for Django", long_description=open('README.rst', 'r').read(), author='VikingCo NV', packages=['keyvaluestore'], zip_safe=False, # Don't create egg files, Django cannot find templates in egg files. include_package_data=True, classifiers=[ 'Intended Audience :: Developers', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Environment :: Web Environment', 'Framework :: Django', ], ) <commit_msg>Use function 'findpackages' to collect packages.<commit_after>#!/usr/bin/env python from setuptools import setup, find_packages import keyvaluestore setup( name="django-keyvaluestore", version=keyvaluestore.__version__, url='https://github.com/vikingco/django-keyvaluestore', license='BSD', description="A Key-Value store for Django", long_description=open('README.rst', 'r').read(), author='VikingCo NV', packages=find_packages(), zip_safe=False, # Don't create egg files, Django cannot find templates in egg files. include_package_data=True, classifiers=[ 'Intended Audience :: Developers', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Environment :: Web Environment', 'Framework :: Django', ], )
5dcec96b7af384f7f753cb2d67d7cbd0c361c504
tests/helpers.py
tests/helpers.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals import json from elasticsearch import ( Elasticsearch, TransportError ) ELASTICSEARCH_URL = "localhost" conn = Elasticsearch(ELASTICSEARCH_URL) def homogeneous(a, b): json.dumps(a).should.equal(json.dumps(b)) def heterogeneous(a, b): json.dumps(a).shouldnt.equal(json.dumps(b)) def add_document(index, document, **kwargs): document_type = "my_doc_type" conn.create(index=index, doc_type=document_type, body=document, refresh=True, **kwargs) def clean_elasticsearch(context): _delete_es_index("foo") def prepare_elasticsearch(context): clean_elasticsearch(context) _create_foo_index() conn.cluster.health(wait_for_status='yellow') def _create_foo_index(): conn.indices.create(index="foo", ignore=400) def _delete_es_index(index): conn.indices.delete(index=index, ignore=[400, 404]) prepare_data = [ prepare_elasticsearch ] cleanup_data = [ clean_elasticsearch ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals import json from elasticsearch import ( Elasticsearch, TransportError ) ELASTICSEARCH_URL = "localhost" conn = Elasticsearch(ELASTICSEARCH_URL) def homogeneous(a, b): json.dumps(a).should.equal(json.dumps(b)) def heterogeneous(a, b): json.dumps(a).shouldnt.equal(json.dumps(b)) def add_document(index, document, **kwargs): if "doc_type" not in kwargs: # Allow overriding doc type defaults doc_type = "my_doc_type" kwargs["doc_type"] = doc_type conn.create(index=index, body=document, refresh=True, **kwargs) def clean_elasticsearch(context): _delete_es_index("foo") def prepare_elasticsearch(context): clean_elasticsearch(context) _create_foo_index() conn.cluster.health(wait_for_status='yellow') def _create_foo_index(): conn.indices.create(index="foo", ignore=400) def _delete_es_index(index): conn.indices.delete(index=index, ignore=[400, 404]) prepare_data = [ prepare_elasticsearch ] cleanup_data = [ clean_elasticsearch ]
Allow overriding doc type defaults
Allow overriding doc type defaults
Python
mit
Yipit/pyeqs
# -*- coding: utf-8 -*- from __future__ import unicode_literals import json from elasticsearch import ( Elasticsearch, TransportError ) ELASTICSEARCH_URL = "localhost" conn = Elasticsearch(ELASTICSEARCH_URL) def homogeneous(a, b): json.dumps(a).should.equal(json.dumps(b)) def heterogeneous(a, b): json.dumps(a).shouldnt.equal(json.dumps(b)) def add_document(index, document, **kwargs): document_type = "my_doc_type" conn.create(index=index, doc_type=document_type, body=document, refresh=True, **kwargs) def clean_elasticsearch(context): _delete_es_index("foo") def prepare_elasticsearch(context): clean_elasticsearch(context) _create_foo_index() conn.cluster.health(wait_for_status='yellow') def _create_foo_index(): conn.indices.create(index="foo", ignore=400) def _delete_es_index(index): conn.indices.delete(index=index, ignore=[400, 404]) prepare_data = [ prepare_elasticsearch ] cleanup_data = [ clean_elasticsearch ] Allow overriding doc type defaults
# -*- coding: utf-8 -*- from __future__ import unicode_literals import json from elasticsearch import ( Elasticsearch, TransportError ) ELASTICSEARCH_URL = "localhost" conn = Elasticsearch(ELASTICSEARCH_URL) def homogeneous(a, b): json.dumps(a).should.equal(json.dumps(b)) def heterogeneous(a, b): json.dumps(a).shouldnt.equal(json.dumps(b)) def add_document(index, document, **kwargs): if "doc_type" not in kwargs: # Allow overriding doc type defaults doc_type = "my_doc_type" kwargs["doc_type"] = doc_type conn.create(index=index, body=document, refresh=True, **kwargs) def clean_elasticsearch(context): _delete_es_index("foo") def prepare_elasticsearch(context): clean_elasticsearch(context) _create_foo_index() conn.cluster.health(wait_for_status='yellow') def _create_foo_index(): conn.indices.create(index="foo", ignore=400) def _delete_es_index(index): conn.indices.delete(index=index, ignore=[400, 404]) prepare_data = [ prepare_elasticsearch ] cleanup_data = [ clean_elasticsearch ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals import json from elasticsearch import ( Elasticsearch, TransportError ) ELASTICSEARCH_URL = "localhost" conn = Elasticsearch(ELASTICSEARCH_URL) def homogeneous(a, b): json.dumps(a).should.equal(json.dumps(b)) def heterogeneous(a, b): json.dumps(a).shouldnt.equal(json.dumps(b)) def add_document(index, document, **kwargs): document_type = "my_doc_type" conn.create(index=index, doc_type=document_type, body=document, refresh=True, **kwargs) def clean_elasticsearch(context): _delete_es_index("foo") def prepare_elasticsearch(context): clean_elasticsearch(context) _create_foo_index() conn.cluster.health(wait_for_status='yellow') def _create_foo_index(): conn.indices.create(index="foo", ignore=400) def _delete_es_index(index): conn.indices.delete(index=index, ignore=[400, 404]) prepare_data = [ prepare_elasticsearch ] cleanup_data = [ clean_elasticsearch ] <commit_msg>Allow overriding doc type defaults<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals import json from elasticsearch import ( Elasticsearch, TransportError ) ELASTICSEARCH_URL = "localhost" conn = Elasticsearch(ELASTICSEARCH_URL) def homogeneous(a, b): json.dumps(a).should.equal(json.dumps(b)) def heterogeneous(a, b): json.dumps(a).shouldnt.equal(json.dumps(b)) def add_document(index, document, **kwargs): if "doc_type" not in kwargs: # Allow overriding doc type defaults doc_type = "my_doc_type" kwargs["doc_type"] = doc_type conn.create(index=index, body=document, refresh=True, **kwargs) def clean_elasticsearch(context): _delete_es_index("foo") def prepare_elasticsearch(context): clean_elasticsearch(context) _create_foo_index() conn.cluster.health(wait_for_status='yellow') def _create_foo_index(): conn.indices.create(index="foo", ignore=400) def _delete_es_index(index): conn.indices.delete(index=index, ignore=[400, 404]) prepare_data = [ prepare_elasticsearch ] cleanup_data = [ clean_elasticsearch ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals import json from elasticsearch import ( Elasticsearch, TransportError ) ELASTICSEARCH_URL = "localhost" conn = Elasticsearch(ELASTICSEARCH_URL) def homogeneous(a, b): json.dumps(a).should.equal(json.dumps(b)) def heterogeneous(a, b): json.dumps(a).shouldnt.equal(json.dumps(b)) def add_document(index, document, **kwargs): document_type = "my_doc_type" conn.create(index=index, doc_type=document_type, body=document, refresh=True, **kwargs) def clean_elasticsearch(context): _delete_es_index("foo") def prepare_elasticsearch(context): clean_elasticsearch(context) _create_foo_index() conn.cluster.health(wait_for_status='yellow') def _create_foo_index(): conn.indices.create(index="foo", ignore=400) def _delete_es_index(index): conn.indices.delete(index=index, ignore=[400, 404]) prepare_data = [ prepare_elasticsearch ] cleanup_data = [ clean_elasticsearch ] Allow overriding doc type defaults# -*- coding: utf-8 -*- from __future__ import unicode_literals import json from elasticsearch import ( Elasticsearch, TransportError ) ELASTICSEARCH_URL = "localhost" conn = Elasticsearch(ELASTICSEARCH_URL) def homogeneous(a, b): json.dumps(a).should.equal(json.dumps(b)) def heterogeneous(a, b): json.dumps(a).shouldnt.equal(json.dumps(b)) def add_document(index, document, **kwargs): if "doc_type" not in kwargs: # Allow overriding doc type defaults doc_type = "my_doc_type" kwargs["doc_type"] = doc_type conn.create(index=index, body=document, refresh=True, **kwargs) def clean_elasticsearch(context): _delete_es_index("foo") def prepare_elasticsearch(context): clean_elasticsearch(context) _create_foo_index() conn.cluster.health(wait_for_status='yellow') def _create_foo_index(): conn.indices.create(index="foo", ignore=400) def _delete_es_index(index): conn.indices.delete(index=index, ignore=[400, 404]) prepare_data = [ prepare_elasticsearch ] cleanup_data = [ clean_elasticsearch ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals import json from elasticsearch import ( Elasticsearch, TransportError ) ELASTICSEARCH_URL = "localhost" conn = Elasticsearch(ELASTICSEARCH_URL) def homogeneous(a, b): json.dumps(a).should.equal(json.dumps(b)) def heterogeneous(a, b): json.dumps(a).shouldnt.equal(json.dumps(b)) def add_document(index, document, **kwargs): document_type = "my_doc_type" conn.create(index=index, doc_type=document_type, body=document, refresh=True, **kwargs) def clean_elasticsearch(context): _delete_es_index("foo") def prepare_elasticsearch(context): clean_elasticsearch(context) _create_foo_index() conn.cluster.health(wait_for_status='yellow') def _create_foo_index(): conn.indices.create(index="foo", ignore=400) def _delete_es_index(index): conn.indices.delete(index=index, ignore=[400, 404]) prepare_data = [ prepare_elasticsearch ] cleanup_data = [ clean_elasticsearch ] <commit_msg>Allow overriding doc type defaults<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals import json from elasticsearch import ( Elasticsearch, TransportError ) ELASTICSEARCH_URL = "localhost" conn = Elasticsearch(ELASTICSEARCH_URL) def homogeneous(a, b): json.dumps(a).should.equal(json.dumps(b)) def heterogeneous(a, b): json.dumps(a).shouldnt.equal(json.dumps(b)) def add_document(index, document, **kwargs): if "doc_type" not in kwargs: # Allow overriding doc type defaults doc_type = "my_doc_type" kwargs["doc_type"] = doc_type conn.create(index=index, body=document, refresh=True, **kwargs) def clean_elasticsearch(context): _delete_es_index("foo") def prepare_elasticsearch(context): clean_elasticsearch(context) _create_foo_index() conn.cluster.health(wait_for_status='yellow') def _create_foo_index(): conn.indices.create(index="foo", ignore=400) def _delete_es_index(index): conn.indices.delete(index=index, ignore=[400, 404]) prepare_data = [ prepare_elasticsearch ] cleanup_data = [ clean_elasticsearch ]
6b1d3220ef631d8a81504d1c7875d97314eb1826
setup.py
setup.py
from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) PACKAGES = [ 'lib', 'lib.scripts', 'lib.scripts.biosql', 'lib.scripts.blast', 'lib.scripts.ftp', 'lib.scripts.genbank', 'lib.scripts.manager', 'lib.scripts.multiprocessing', 'lib.scripts.phylogenetic_analyses' ] # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='orthologs', description="A project that will help to analyze orthologous gense.", version='0.1.0', long_description=long_description, url='https://github.com/robear22890/Orthologs-Project', license='?', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Visualization', 'Programming Language :: Python :: 3', 'Operating System :: Unix', 'Natural Language :: English' ], packages= )
# Used: # https://github.com/pypa/sampleproject/blob/master/setup.py # https://github.com/biopython/biopython/blob/master/setup.py from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) PACKAGES = [ 'lib', 'lib.scripts', 'lib.scripts.biosql', 'lib.scripts.blast', 'lib.scripts.ftp', 'lib.scripts.genbank', 'lib.scripts.manager', 'lib.scripts.multiprocessing', 'lib.scripts.phylogenetic_analyses' ] # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='orthologs', description="A project that will help to analyze orthologous gense.", version='0.1.0', long_description=long_description, url='https://github.com/robear22890/Orthologs-Project', license='?', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Visualization', 'Programming Language :: Python :: 3', 'Operating System :: Unix', 'Natural Language :: English' ], packages=PACKAGES, install_requires=[], )
Develop here. Belongs in top level Orthologs Project.
Develop here. Belongs in top level Orthologs Project.
Python
mit
datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts
from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) PACKAGES = [ 'lib', 'lib.scripts', 'lib.scripts.biosql', 'lib.scripts.blast', 'lib.scripts.ftp', 'lib.scripts.genbank', 'lib.scripts.manager', 'lib.scripts.multiprocessing', 'lib.scripts.phylogenetic_analyses' ] # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='orthologs', description="A project that will help to analyze orthologous gense.", version='0.1.0', long_description=long_description, url='https://github.com/robear22890/Orthologs-Project', license='?', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Visualization', 'Programming Language :: Python :: 3', 'Operating System :: Unix', 'Natural Language :: English' ], packages= ) Develop here. Belongs in top level Orthologs Project.
# Used: # https://github.com/pypa/sampleproject/blob/master/setup.py # https://github.com/biopython/biopython/blob/master/setup.py from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) PACKAGES = [ 'lib', 'lib.scripts', 'lib.scripts.biosql', 'lib.scripts.blast', 'lib.scripts.ftp', 'lib.scripts.genbank', 'lib.scripts.manager', 'lib.scripts.multiprocessing', 'lib.scripts.phylogenetic_analyses' ] # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='orthologs', description="A project that will help to analyze orthologous gense.", version='0.1.0', long_description=long_description, url='https://github.com/robear22890/Orthologs-Project', license='?', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Visualization', 'Programming Language :: Python :: 3', 'Operating System :: Unix', 'Natural Language :: English' ], packages=PACKAGES, install_requires=[], )
<commit_before> from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) PACKAGES = [ 'lib', 'lib.scripts', 'lib.scripts.biosql', 'lib.scripts.blast', 'lib.scripts.ftp', 'lib.scripts.genbank', 'lib.scripts.manager', 'lib.scripts.multiprocessing', 'lib.scripts.phylogenetic_analyses' ] # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='orthologs', description="A project that will help to analyze orthologous gense.", version='0.1.0', long_description=long_description, url='https://github.com/robear22890/Orthologs-Project', license='?', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Visualization', 'Programming Language :: Python :: 3', 'Operating System :: Unix', 'Natural Language :: English' ], packages= ) <commit_msg>Develop here. Belongs in top level Orthologs Project.<commit_after>
# Used: # https://github.com/pypa/sampleproject/blob/master/setup.py # https://github.com/biopython/biopython/blob/master/setup.py from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) PACKAGES = [ 'lib', 'lib.scripts', 'lib.scripts.biosql', 'lib.scripts.blast', 'lib.scripts.ftp', 'lib.scripts.genbank', 'lib.scripts.manager', 'lib.scripts.multiprocessing', 'lib.scripts.phylogenetic_analyses' ] # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='orthologs', description="A project that will help to analyze orthologous gense.", version='0.1.0', long_description=long_description, url='https://github.com/robear22890/Orthologs-Project', license='?', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Visualization', 'Programming Language :: Python :: 3', 'Operating System :: Unix', 'Natural Language :: English' ], packages=PACKAGES, install_requires=[], )
from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) PACKAGES = [ 'lib', 'lib.scripts', 'lib.scripts.biosql', 'lib.scripts.blast', 'lib.scripts.ftp', 'lib.scripts.genbank', 'lib.scripts.manager', 'lib.scripts.multiprocessing', 'lib.scripts.phylogenetic_analyses' ] # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='orthologs', description="A project that will help to analyze orthologous gense.", version='0.1.0', long_description=long_description, url='https://github.com/robear22890/Orthologs-Project', license='?', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Visualization', 'Programming Language :: Python :: 3', 'Operating System :: Unix', 'Natural Language :: English' ], packages= ) Develop here. Belongs in top level Orthologs Project.# Used: # https://github.com/pypa/sampleproject/blob/master/setup.py # https://github.com/biopython/biopython/blob/master/setup.py from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) PACKAGES = [ 'lib', 'lib.scripts', 'lib.scripts.biosql', 'lib.scripts.blast', 'lib.scripts.ftp', 'lib.scripts.genbank', 'lib.scripts.manager', 'lib.scripts.multiprocessing', 'lib.scripts.phylogenetic_analyses' ] # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='orthologs', description="A project that will help to analyze orthologous gense.", version='0.1.0', long_description=long_description, url='https://github.com/robear22890/Orthologs-Project', license='?', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Visualization', 'Programming Language :: Python :: 3', 'Operating System :: Unix', 'Natural Language :: English' ], packages=PACKAGES, install_requires=[], )
<commit_before> from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) PACKAGES = [ 'lib', 'lib.scripts', 'lib.scripts.biosql', 'lib.scripts.blast', 'lib.scripts.ftp', 'lib.scripts.genbank', 'lib.scripts.manager', 'lib.scripts.multiprocessing', 'lib.scripts.phylogenetic_analyses' ] # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='orthologs', description="A project that will help to analyze orthologous gense.", version='0.1.0', long_description=long_description, url='https://github.com/robear22890/Orthologs-Project', license='?', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Visualization', 'Programming Language :: Python :: 3', 'Operating System :: Unix', 'Natural Language :: English' ], packages= ) <commit_msg>Develop here. Belongs in top level Orthologs Project.<commit_after># Used: # https://github.com/pypa/sampleproject/blob/master/setup.py # https://github.com/biopython/biopython/blob/master/setup.py from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) PACKAGES = [ 'lib', 'lib.scripts', 'lib.scripts.biosql', 'lib.scripts.blast', 'lib.scripts.ftp', 'lib.scripts.genbank', 'lib.scripts.manager', 'lib.scripts.multiprocessing', 'lib.scripts.phylogenetic_analyses' ] # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='orthologs', description="A project that will help to analyze orthologous gense.", version='0.1.0', long_description=long_description, url='https://github.com/robear22890/Orthologs-Project', license='?', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Visualization', 'Programming Language :: Python :: 3', 'Operating System :: Unix', 'Natural Language :: English' ], packages=PACKAGES, install_requires=[], )
a10fb75a45bbb647f8071842773d79101c797529
corehq/project_limits/models.py
corehq/project_limits/models.py
from django.db import models class DynamicRateDefinition(models.Model): key = models.CharField(max_length=512, blank=False, null=False, unique=True, db_index=True) per_week = models.FloatField(default=None, blank=True, null=True) per_day = models.FloatField(default=None, blank=True, null=True) per_hour = models.FloatField(default=None, blank=True, null=True) per_minute = models.FloatField(default=None, blank=True, null=True) per_second = models.FloatField(default=None, blank=True, null=True) def save(self, *args, **kwargs): from corehq.project_limits.rate_limiter import get_dynamic_rate_definition get_dynamic_rate_definition.clear(self.key, {}) super().save(*args, **kwargs)
from django.db import models class DynamicRateDefinition(models.Model): key = models.CharField(max_length=512, blank=False, null=False, unique=True, db_index=True) per_week = models.FloatField(default=None, blank=True, null=True) per_day = models.FloatField(default=None, blank=True, null=True) per_hour = models.FloatField(default=None, blank=True, null=True) per_minute = models.FloatField(default=None, blank=True, null=True) per_second = models.FloatField(default=None, blank=True, null=True) def save(self, *args, **kwargs): self._clear_caches() super().save(*args, **kwargs) def delete(self, *args, **kwargs): self._clear_caches() super().delete(*args, **kwargs) def _clear_caches(self): from corehq.project_limits.rate_limiter import get_dynamic_rate_definition get_dynamic_rate_definition.clear(self.key, {})
Clear caches on DynamicRateDefinition deletion for completeness
Clear caches on DynamicRateDefinition deletion for completeness and to help with tests
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
from django.db import models class DynamicRateDefinition(models.Model): key = models.CharField(max_length=512, blank=False, null=False, unique=True, db_index=True) per_week = models.FloatField(default=None, blank=True, null=True) per_day = models.FloatField(default=None, blank=True, null=True) per_hour = models.FloatField(default=None, blank=True, null=True) per_minute = models.FloatField(default=None, blank=True, null=True) per_second = models.FloatField(default=None, blank=True, null=True) def save(self, *args, **kwargs): from corehq.project_limits.rate_limiter import get_dynamic_rate_definition get_dynamic_rate_definition.clear(self.key, {}) super().save(*args, **kwargs) Clear caches on DynamicRateDefinition deletion for completeness and to help with tests
from django.db import models class DynamicRateDefinition(models.Model): key = models.CharField(max_length=512, blank=False, null=False, unique=True, db_index=True) per_week = models.FloatField(default=None, blank=True, null=True) per_day = models.FloatField(default=None, blank=True, null=True) per_hour = models.FloatField(default=None, blank=True, null=True) per_minute = models.FloatField(default=None, blank=True, null=True) per_second = models.FloatField(default=None, blank=True, null=True) def save(self, *args, **kwargs): self._clear_caches() super().save(*args, **kwargs) def delete(self, *args, **kwargs): self._clear_caches() super().delete(*args, **kwargs) def _clear_caches(self): from corehq.project_limits.rate_limiter import get_dynamic_rate_definition get_dynamic_rate_definition.clear(self.key, {})
<commit_before>from django.db import models class DynamicRateDefinition(models.Model): key = models.CharField(max_length=512, blank=False, null=False, unique=True, db_index=True) per_week = models.FloatField(default=None, blank=True, null=True) per_day = models.FloatField(default=None, blank=True, null=True) per_hour = models.FloatField(default=None, blank=True, null=True) per_minute = models.FloatField(default=None, blank=True, null=True) per_second = models.FloatField(default=None, blank=True, null=True) def save(self, *args, **kwargs): from corehq.project_limits.rate_limiter import get_dynamic_rate_definition get_dynamic_rate_definition.clear(self.key, {}) super().save(*args, **kwargs) <commit_msg>Clear caches on DynamicRateDefinition deletion for completeness and to help with tests<commit_after>
from django.db import models class DynamicRateDefinition(models.Model): key = models.CharField(max_length=512, blank=False, null=False, unique=True, db_index=True) per_week = models.FloatField(default=None, blank=True, null=True) per_day = models.FloatField(default=None, blank=True, null=True) per_hour = models.FloatField(default=None, blank=True, null=True) per_minute = models.FloatField(default=None, blank=True, null=True) per_second = models.FloatField(default=None, blank=True, null=True) def save(self, *args, **kwargs): self._clear_caches() super().save(*args, **kwargs) def delete(self, *args, **kwargs): self._clear_caches() super().delete(*args, **kwargs) def _clear_caches(self): from corehq.project_limits.rate_limiter import get_dynamic_rate_definition get_dynamic_rate_definition.clear(self.key, {})
from django.db import models class DynamicRateDefinition(models.Model): key = models.CharField(max_length=512, blank=False, null=False, unique=True, db_index=True) per_week = models.FloatField(default=None, blank=True, null=True) per_day = models.FloatField(default=None, blank=True, null=True) per_hour = models.FloatField(default=None, blank=True, null=True) per_minute = models.FloatField(default=None, blank=True, null=True) per_second = models.FloatField(default=None, blank=True, null=True) def save(self, *args, **kwargs): from corehq.project_limits.rate_limiter import get_dynamic_rate_definition get_dynamic_rate_definition.clear(self.key, {}) super().save(*args, **kwargs) Clear caches on DynamicRateDefinition deletion for completeness and to help with testsfrom django.db import models class DynamicRateDefinition(models.Model): key = models.CharField(max_length=512, blank=False, null=False, unique=True, db_index=True) per_week = models.FloatField(default=None, blank=True, null=True) per_day = models.FloatField(default=None, blank=True, null=True) per_hour = models.FloatField(default=None, blank=True, null=True) per_minute = models.FloatField(default=None, blank=True, null=True) per_second = models.FloatField(default=None, blank=True, null=True) def save(self, *args, **kwargs): self._clear_caches() super().save(*args, **kwargs) def delete(self, *args, **kwargs): self._clear_caches() super().delete(*args, **kwargs) def _clear_caches(self): from corehq.project_limits.rate_limiter import get_dynamic_rate_definition get_dynamic_rate_definition.clear(self.key, {})
<commit_before>from django.db import models class DynamicRateDefinition(models.Model): key = models.CharField(max_length=512, blank=False, null=False, unique=True, db_index=True) per_week = models.FloatField(default=None, blank=True, null=True) per_day = models.FloatField(default=None, blank=True, null=True) per_hour = models.FloatField(default=None, blank=True, null=True) per_minute = models.FloatField(default=None, blank=True, null=True) per_second = models.FloatField(default=None, blank=True, null=True) def save(self, *args, **kwargs): from corehq.project_limits.rate_limiter import get_dynamic_rate_definition get_dynamic_rate_definition.clear(self.key, {}) super().save(*args, **kwargs) <commit_msg>Clear caches on DynamicRateDefinition deletion for completeness and to help with tests<commit_after>from django.db import models class DynamicRateDefinition(models.Model): key = models.CharField(max_length=512, blank=False, null=False, unique=True, db_index=True) per_week = models.FloatField(default=None, blank=True, null=True) per_day = models.FloatField(default=None, blank=True, null=True) per_hour = models.FloatField(default=None, blank=True, null=True) per_minute = models.FloatField(default=None, blank=True, null=True) per_second = models.FloatField(default=None, blank=True, null=True) def save(self, *args, **kwargs): self._clear_caches() super().save(*args, **kwargs) def delete(self, *args, **kwargs): self._clear_caches() super().delete(*args, **kwargs) def _clear_caches(self): from corehq.project_limits.rate_limiter import get_dynamic_rate_definition get_dynamic_rate_definition.clear(self.key, {})
cacc32895850c7f7bf162c749b93b25b32d98429
setup.py
setup.py
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-session-cleanup', version='2.0.0', description=('A periodic task for removing expired Django sessions ' 'with Celery.'), long_description=readme, author='Elijah Rutschman', author_email='elijahr+django-session-cleanup@gmail.com', maintainer='Martey Dodoo', maintainer_email='martey+django-session-cleanup@mobolic.com', url='https://github.com/mobolic/django-session-cleanup', classifiers=[ 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.0', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], packages=find_packages(exclude=('tests',)) )
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-session-cleanup', version='2.0.0', description=('A periodic task for removing expired Django sessions ' 'with Celery.'), long_description=readme, author='Elijah Rutschman', author_email='elijahr+django-session-cleanup@gmail.com', maintainer='Martey Dodoo', maintainer_email='martey+django-session-cleanup@mobolic.com', url='https://github.com/mobolic/django-session-cleanup', classifiers=[ 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.0', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], packages=find_packages(exclude=('tests',)) )
Remove outdated Python 3.4 classifier.
Remove outdated Python 3.4 classifier.
Python
bsd-2-clause
sandersnewmedia/django-session-cleanup
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-session-cleanup', version='2.0.0', description=('A periodic task for removing expired Django sessions ' 'with Celery.'), long_description=readme, author='Elijah Rutschman', author_email='elijahr+django-session-cleanup@gmail.com', maintainer='Martey Dodoo', maintainer_email='martey+django-session-cleanup@mobolic.com', url='https://github.com/mobolic/django-session-cleanup', classifiers=[ 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.0', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], packages=find_packages(exclude=('tests',)) ) Remove outdated Python 3.4 classifier.
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-session-cleanup', version='2.0.0', description=('A periodic task for removing expired Django sessions ' 'with Celery.'), long_description=readme, author='Elijah Rutschman', author_email='elijahr+django-session-cleanup@gmail.com', maintainer='Martey Dodoo', maintainer_email='martey+django-session-cleanup@mobolic.com', url='https://github.com/mobolic/django-session-cleanup', classifiers=[ 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.0', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], packages=find_packages(exclude=('tests',)) )
<commit_before># -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-session-cleanup', version='2.0.0', description=('A periodic task for removing expired Django sessions ' 'with Celery.'), long_description=readme, author='Elijah Rutschman', author_email='elijahr+django-session-cleanup@gmail.com', maintainer='Martey Dodoo', maintainer_email='martey+django-session-cleanup@mobolic.com', url='https://github.com/mobolic/django-session-cleanup', classifiers=[ 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.0', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], packages=find_packages(exclude=('tests',)) ) <commit_msg>Remove outdated Python 3.4 classifier.<commit_after>
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-session-cleanup', version='2.0.0', description=('A periodic task for removing expired Django sessions ' 'with Celery.'), long_description=readme, author='Elijah Rutschman', author_email='elijahr+django-session-cleanup@gmail.com', maintainer='Martey Dodoo', maintainer_email='martey+django-session-cleanup@mobolic.com', url='https://github.com/mobolic/django-session-cleanup', classifiers=[ 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.0', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], packages=find_packages(exclude=('tests',)) )
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-session-cleanup', version='2.0.0', description=('A periodic task for removing expired Django sessions ' 'with Celery.'), long_description=readme, author='Elijah Rutschman', author_email='elijahr+django-session-cleanup@gmail.com', maintainer='Martey Dodoo', maintainer_email='martey+django-session-cleanup@mobolic.com', url='https://github.com/mobolic/django-session-cleanup', classifiers=[ 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.0', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], packages=find_packages(exclude=('tests',)) ) Remove outdated Python 3.4 classifier.# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-session-cleanup', version='2.0.0', description=('A periodic task for removing expired Django sessions ' 'with Celery.'), long_description=readme, author='Elijah Rutschman', author_email='elijahr+django-session-cleanup@gmail.com', maintainer='Martey Dodoo', maintainer_email='martey+django-session-cleanup@mobolic.com', url='https://github.com/mobolic/django-session-cleanup', classifiers=[ 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.0', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], packages=find_packages(exclude=('tests',)) )
<commit_before># -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-session-cleanup', version='2.0.0', description=('A periodic task for removing expired Django sessions ' 'with Celery.'), long_description=readme, author='Elijah Rutschman', author_email='elijahr+django-session-cleanup@gmail.com', maintainer='Martey Dodoo', maintainer_email='martey+django-session-cleanup@mobolic.com', url='https://github.com/mobolic/django-session-cleanup', classifiers=[ 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.0', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], packages=find_packages(exclude=('tests',)) ) <commit_msg>Remove outdated Python 3.4 classifier.<commit_after># -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-session-cleanup', version='2.0.0', description=('A periodic task for removing expired Django sessions ' 'with Celery.'), long_description=readme, author='Elijah Rutschman', author_email='elijahr+django-session-cleanup@gmail.com', maintainer='Martey Dodoo', maintainer_email='martey+django-session-cleanup@mobolic.com', url='https://github.com/mobolic/django-session-cleanup', classifiers=[ 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.0', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Framework :: Django :: 3.0', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], packages=find_packages(exclude=('tests',)) )
2b8fca2bebd3acc179ac591908256a8173408cec
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages install_requires = [] with open("requirements.txt") as fp: for s in fp: install_requires.append(s.strip()) setup( name="luigi-td", version='0.0.0', description="Luigi integration for Treasure Data", author="Treasure Data, Inc.", author_email="support@treasure-data.com", url="http://treasuredata.com/", install_requires=install_requires, packages=find_packages(), license="Apache Software License", platforms="Posix; MacOS X; Windows", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Topic :: Internet", ], )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="luigi-td", version='0.0.0', description="Luigi integration for Treasure Data", author="Treasure Data, Inc.", author_email="support@treasure-data.com", url="http://treasuredata.com/", install_requires=open("requirements.txt").read().splitlines(), packages=find_packages(), license="Apache Software License", platforms="Posix; MacOS X; Windows", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Topic :: Internet", ], )
Simplify the definision of install_requires
Simplify the definision of install_requires
Python
apache-2.0
treasure-data/luigi-td
#!/usr/bin/env python from setuptools import setup, find_packages install_requires = [] with open("requirements.txt") as fp: for s in fp: install_requires.append(s.strip()) setup( name="luigi-td", version='0.0.0', description="Luigi integration for Treasure Data", author="Treasure Data, Inc.", author_email="support@treasure-data.com", url="http://treasuredata.com/", install_requires=install_requires, packages=find_packages(), license="Apache Software License", platforms="Posix; MacOS X; Windows", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Topic :: Internet", ], ) Simplify the definision of install_requires
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="luigi-td", version='0.0.0', description="Luigi integration for Treasure Data", author="Treasure Data, Inc.", author_email="support@treasure-data.com", url="http://treasuredata.com/", install_requires=open("requirements.txt").read().splitlines(), packages=find_packages(), license="Apache Software License", platforms="Posix; MacOS X; Windows", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Topic :: Internet", ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages install_requires = [] with open("requirements.txt") as fp: for s in fp: install_requires.append(s.strip()) setup( name="luigi-td", version='0.0.0', description="Luigi integration for Treasure Data", author="Treasure Data, Inc.", author_email="support@treasure-data.com", url="http://treasuredata.com/", install_requires=install_requires, packages=find_packages(), license="Apache Software License", platforms="Posix; MacOS X; Windows", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Topic :: Internet", ], ) <commit_msg>Simplify the definision of install_requires<commit_after>
#!/usr/bin/env python from setuptools import setup, find_packages setup( name="luigi-td", version='0.0.0', description="Luigi integration for Treasure Data", author="Treasure Data, Inc.", author_email="support@treasure-data.com", url="http://treasuredata.com/", install_requires=open("requirements.txt").read().splitlines(), packages=find_packages(), license="Apache Software License", platforms="Posix; MacOS X; Windows", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Topic :: Internet", ], )
#!/usr/bin/env python from setuptools import setup, find_packages install_requires = [] with open("requirements.txt") as fp: for s in fp: install_requires.append(s.strip()) setup( name="luigi-td", version='0.0.0', description="Luigi integration for Treasure Data", author="Treasure Data, Inc.", author_email="support@treasure-data.com", url="http://treasuredata.com/", install_requires=install_requires, packages=find_packages(), license="Apache Software License", platforms="Posix; MacOS X; Windows", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Topic :: Internet", ], ) Simplify the definision of install_requires#!/usr/bin/env python from setuptools import setup, find_packages setup( name="luigi-td", version='0.0.0', description="Luigi integration for Treasure Data", author="Treasure Data, Inc.", author_email="support@treasure-data.com", url="http://treasuredata.com/", install_requires=open("requirements.txt").read().splitlines(), packages=find_packages(), license="Apache Software License", platforms="Posix; MacOS X; Windows", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Topic :: Internet", ], )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages install_requires = [] with open("requirements.txt") as fp: for s in fp: install_requires.append(s.strip()) setup( name="luigi-td", version='0.0.0', description="Luigi integration for Treasure Data", author="Treasure Data, Inc.", author_email="support@treasure-data.com", url="http://treasuredata.com/", install_requires=install_requires, packages=find_packages(), license="Apache Software License", platforms="Posix; MacOS X; Windows", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Topic :: Internet", ], ) <commit_msg>Simplify the definision of install_requires<commit_after>#!/usr/bin/env python from setuptools import setup, find_packages setup( name="luigi-td", version='0.0.0', description="Luigi integration for Treasure Data", author="Treasure Data, Inc.", author_email="support@treasure-data.com", url="http://treasuredata.com/", install_requires=open("requirements.txt").read().splitlines(), packages=find_packages(), license="Apache Software License", platforms="Posix; MacOS X; Windows", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Topic :: Internet", ], )
0759900db9530d2bd2d36f74a5381c48f801b76a
setup.py
setup.py
import os from setuptools import setup from withtool import __version__ def read(fname): path = os.path.join(os.path.dirname(__file__), fname) with open(path, encoding='utf-8') as f: return f.read() setup( name='with', version=__version__, description='A shell context manager', long_description=read('README.rst'), author='Renan Ivo', author_email='renanivom@gmail.com', url='https://github.com/renanivo/with', keywords='context manager shell command line repl', scripts=['bin/with'], install_requires=[ 'appdirs==1.4.0', 'docopt==0.6.2', 'prompt-toolkit==1.0', 'python-slugify==1.2.1', ], packages=['withtool'], classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] )
import os from setuptools import setup from withtool import __version__ def read(fname): path = os.path.join(os.path.dirname(__file__), fname) with open(path, encoding='utf-8') as f: return f.read() setup( name='with', version=__version__, description='A shell context manager', long_description=read('README.rst'), author='Renan Ivo', author_email='renanivom@gmail.com', url='https://github.com/renanivo/with', keywords='context manager shell command line repl', scripts=['bin/with'], install_requires=[ 'appdirs==1.4.1', 'docopt==0.6.2', 'prompt-toolkit==1.0', 'python-slugify==1.2.1', ], packages=['withtool'], classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] )
Upgrade dependency appdirs to ==1.4.1
Upgrade dependency appdirs to ==1.4.1
Python
mit
renanivo/with
import os from setuptools import setup from withtool import __version__ def read(fname): path = os.path.join(os.path.dirname(__file__), fname) with open(path, encoding='utf-8') as f: return f.read() setup( name='with', version=__version__, description='A shell context manager', long_description=read('README.rst'), author='Renan Ivo', author_email='renanivom@gmail.com', url='https://github.com/renanivo/with', keywords='context manager shell command line repl', scripts=['bin/with'], install_requires=[ 'appdirs==1.4.0', 'docopt==0.6.2', 'prompt-toolkit==1.0', 'python-slugify==1.2.1', ], packages=['withtool'], classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] ) Upgrade dependency appdirs to ==1.4.1
import os from setuptools import setup from withtool import __version__ def read(fname): path = os.path.join(os.path.dirname(__file__), fname) with open(path, encoding='utf-8') as f: return f.read() setup( name='with', version=__version__, description='A shell context manager', long_description=read('README.rst'), author='Renan Ivo', author_email='renanivom@gmail.com', url='https://github.com/renanivo/with', keywords='context manager shell command line repl', scripts=['bin/with'], install_requires=[ 'appdirs==1.4.1', 'docopt==0.6.2', 'prompt-toolkit==1.0', 'python-slugify==1.2.1', ], packages=['withtool'], classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] )
<commit_before>import os from setuptools import setup from withtool import __version__ def read(fname): path = os.path.join(os.path.dirname(__file__), fname) with open(path, encoding='utf-8') as f: return f.read() setup( name='with', version=__version__, description='A shell context manager', long_description=read('README.rst'), author='Renan Ivo', author_email='renanivom@gmail.com', url='https://github.com/renanivo/with', keywords='context manager shell command line repl', scripts=['bin/with'], install_requires=[ 'appdirs==1.4.0', 'docopt==0.6.2', 'prompt-toolkit==1.0', 'python-slugify==1.2.1', ], packages=['withtool'], classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] ) <commit_msg>Upgrade dependency appdirs to ==1.4.1<commit_after>
import os from setuptools import setup from withtool import __version__ def read(fname): path = os.path.join(os.path.dirname(__file__), fname) with open(path, encoding='utf-8') as f: return f.read() setup( name='with', version=__version__, description='A shell context manager', long_description=read('README.rst'), author='Renan Ivo', author_email='renanivom@gmail.com', url='https://github.com/renanivo/with', keywords='context manager shell command line repl', scripts=['bin/with'], install_requires=[ 'appdirs==1.4.1', 'docopt==0.6.2', 'prompt-toolkit==1.0', 'python-slugify==1.2.1', ], packages=['withtool'], classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] )
import os from setuptools import setup from withtool import __version__ def read(fname): path = os.path.join(os.path.dirname(__file__), fname) with open(path, encoding='utf-8') as f: return f.read() setup( name='with', version=__version__, description='A shell context manager', long_description=read('README.rst'), author='Renan Ivo', author_email='renanivom@gmail.com', url='https://github.com/renanivo/with', keywords='context manager shell command line repl', scripts=['bin/with'], install_requires=[ 'appdirs==1.4.0', 'docopt==0.6.2', 'prompt-toolkit==1.0', 'python-slugify==1.2.1', ], packages=['withtool'], classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] ) Upgrade dependency appdirs to ==1.4.1import os from setuptools import setup from withtool import __version__ def read(fname): path = os.path.join(os.path.dirname(__file__), fname) with open(path, encoding='utf-8') as f: return f.read() setup( name='with', version=__version__, description='A shell context manager', long_description=read('README.rst'), author='Renan Ivo', author_email='renanivom@gmail.com', url='https://github.com/renanivo/with', keywords='context manager shell command line repl', scripts=['bin/with'], install_requires=[ 'appdirs==1.4.1', 'docopt==0.6.2', 'prompt-toolkit==1.0', 'python-slugify==1.2.1', ], packages=['withtool'], classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] )
<commit_before>import os from setuptools import setup from withtool import __version__ def read(fname): path = os.path.join(os.path.dirname(__file__), fname) with open(path, encoding='utf-8') as f: return f.read() setup( name='with', version=__version__, description='A shell context manager', long_description=read('README.rst'), author='Renan Ivo', author_email='renanivom@gmail.com', url='https://github.com/renanivo/with', keywords='context manager shell command line repl', scripts=['bin/with'], install_requires=[ 'appdirs==1.4.0', 'docopt==0.6.2', 'prompt-toolkit==1.0', 'python-slugify==1.2.1', ], packages=['withtool'], classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] ) <commit_msg>Upgrade dependency appdirs to ==1.4.1<commit_after>import os from setuptools import setup from withtool import __version__ def read(fname): path = os.path.join(os.path.dirname(__file__), fname) with open(path, encoding='utf-8') as f: return f.read() setup( name='with', version=__version__, description='A shell context manager', long_description=read('README.rst'), author='Renan Ivo', author_email='renanivom@gmail.com', url='https://github.com/renanivo/with', keywords='context manager shell command line repl', scripts=['bin/with'], install_requires=[ 'appdirs==1.4.1', 'docopt==0.6.2', 'prompt-toolkit==1.0', 'python-slugify==1.2.1', ], packages=['withtool'], classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ] )
6a71652e3cfdec22307b05539914aa6325cb4d53
setup.py
setup.py
#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror', version='0.1.0a', description='pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], )
#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror-simple', version='0.1.0a0', description='A simple pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], )
Change project name to avoid pypi conflict
Change project name to avoid pypi conflict
Python
mit
wilypomegranate/pypimirror
#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror', version='0.1.0a', description='pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], ) Change project name to avoid pypi conflict
#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror-simple', version='0.1.0a0', description='A simple pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], )
<commit_before>#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror', version='0.1.0a', description='pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], ) <commit_msg>Change project name to avoid pypi conflict<commit_after>
#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror-simple', version='0.1.0a0', description='A simple pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], )
#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror', version='0.1.0a', description='pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], ) Change project name to avoid pypi conflict#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror-simple', version='0.1.0a0', description='A simple pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], )
<commit_before>#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror', version='0.1.0a', description='pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], ) <commit_msg>Change project name to avoid pypi conflict<commit_after>#!/usr/bin/env python from __future__ import with_statement import sys from setuptools import setup, find_packages long_description = """ Pypimirror - A Pypi mirror script that uses threading and requests """ install_requires = [ 'beautifulsoup4==4.4.1', 'requests==2.9.1', ] setup( name='pypimirror-simple', version='0.1.0a0', description='A simple pypimirror', long_description=long_description, author='wilypomegranate', author_email='wilypomegranate@users.noreply.github.com>', packages=find_packages(), test_suite='py.test', tests_require=['pytest'], install_requires=install_requires, entry_points={ 'console_scripts': [ 'pypimirror = pypimirror.__main__:main', ] }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: System :: Systems Administration', ], )
dd98a76ac16888051e55b98cb26e28c3afae5842
setup.py
setup.py
from setuptools import setup, find_packages from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.1.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=find_packages(include='straitlets.*'), include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
from setuptools import setup, find_packages from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.2.0', description="Serializable IPython Traitlets", author="Quantopian Team", author_email="opensource@quantopian.com", packages=find_packages(include='straitlets.*'), include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
Tag 0.2 for correct org.
BUILD: Tag 0.2 for correct org.
Python
apache-2.0
quantopian/serializable-traitlets
from setuptools import setup, find_packages from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.1.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=find_packages(include='straitlets.*'), include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main() BUILD: Tag 0.2 for correct org.
from setuptools import setup, find_packages from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.2.0', description="Serializable IPython Traitlets", author="Quantopian Team", author_email="opensource@quantopian.com", packages=find_packages(include='straitlets.*'), include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
<commit_before>from setuptools import setup, find_packages from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.1.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=find_packages(include='straitlets.*'), include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main() <commit_msg>BUILD: Tag 0.2 for correct org.<commit_after>
from setuptools import setup, find_packages from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.2.0', description="Serializable IPython Traitlets", author="Quantopian Team", author_email="opensource@quantopian.com", packages=find_packages(include='straitlets.*'), include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
from setuptools import setup, find_packages from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.1.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=find_packages(include='straitlets.*'), include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main() BUILD: Tag 0.2 for correct org.from setuptools import setup, find_packages from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.2.0', description="Serializable IPython Traitlets", author="Quantopian Team", author_email="opensource@quantopian.com", packages=find_packages(include='straitlets.*'), include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
<commit_before>from setuptools import setup, find_packages from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.1.1', description="Serializable IPython Traitlets", author="Scott Sanderson", author_email="ssanderson@quantopian.com", packages=find_packages(include='straitlets.*'), include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main() <commit_msg>BUILD: Tag 0.2 for correct org.<commit_after>from setuptools import setup, find_packages from sys import version_info def install_requires(): requires = [ 'traitlets>=4.1', 'six>=1.9.0', 'pyyaml>=3.11', ] if (version_info.major, version_info.minor) < (3, 4): requires.append('singledispatch>=3.4.0') return requires def extras_require(): return { 'test': [ 'tox', 'pytest>=2.8.5', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', ], } def main(): setup( name='straitlets', version='0.2.0', description="Serializable IPython Traitlets", author="Quantopian Team", author_email="opensource@quantopian.com", packages=find_packages(include='straitlets.*'), include_package_data=True, zip_safe=True, url="https://github.com/quantopian/serializable-traitlets", classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python', ], install_requires=install_requires(), extras_require=extras_require() ) if __name__ == '__main__': main()
ab9d6dee8139c5fb5a3d98f41ff404e5e1df774c
setup.py
setup.py
# -*- encoding: utf8 -*- from setuptools import setup, find_packages import os setup( name = "python-redis-lock", version = "0.1.1", url = 'https://github.com/ionelmc/python-redis-lock', download_url = '', license = 'BSD', description = "Lock context manager implemented via redis SETNX/BLPOP.", long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), author = 'Ionel Cristian Mărieș', author_email = 'contact@ionelmc.ro', packages = find_packages('src'), package_dir = {'':'src'}, include_package_data = True, zip_safe = False, classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Utilities', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], install_requires=[ 'redis>=2.8.0', ], extras_require={ 'django': [ 'django-redis>=3.3', ] } )
# -*- encoding: utf8 -*- from setuptools import setup, find_packages import os setup( name = "python-redis-lock", version = "0.1.1", url = 'https://github.com/ionelmc/python-redis-lock', download_url = '', license = 'BSD', description = "Lock context manager implemented via redis SETNX/BLPOP.", long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), author = 'Ionel Cristian Mărieș', author_email = 'contact@ionelmc.ro', packages = find_packages('src'), package_dir = {'':'src'}, include_package_data = True, zip_safe = False, classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Utilities', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], install_requires=[ 'redis>=2.7.4', ], extras_require={ 'django': [ 'django-redis>=3.3', ] } )
Allow using redis-py 2.7.4 (lowest revision with set keyword arguments).
Allow using redis-py 2.7.4 (lowest revision with set keyword arguments).
Python
bsd-2-clause
buildingenergy/python-redis-lock,ionelmc/python-redis-lock,ByteInternet/python-redis-lock,zoni/python-redis-lock,victor-torres/python-redis-lock
# -*- encoding: utf8 -*- from setuptools import setup, find_packages import os setup( name = "python-redis-lock", version = "0.1.1", url = 'https://github.com/ionelmc/python-redis-lock', download_url = '', license = 'BSD', description = "Lock context manager implemented via redis SETNX/BLPOP.", long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), author = 'Ionel Cristian Mărieș', author_email = 'contact@ionelmc.ro', packages = find_packages('src'), package_dir = {'':'src'}, include_package_data = True, zip_safe = False, classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Utilities', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], install_requires=[ 'redis>=2.8.0', ], extras_require={ 'django': [ 'django-redis>=3.3', ] } ) Allow using redis-py 2.7.4 (lowest revision with set keyword arguments).
# -*- encoding: utf8 -*- from setuptools import setup, find_packages import os setup( name = "python-redis-lock", version = "0.1.1", url = 'https://github.com/ionelmc/python-redis-lock', download_url = '', license = 'BSD', description = "Lock context manager implemented via redis SETNX/BLPOP.", long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), author = 'Ionel Cristian Mărieș', author_email = 'contact@ionelmc.ro', packages = find_packages('src'), package_dir = {'':'src'}, include_package_data = True, zip_safe = False, classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Utilities', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], install_requires=[ 'redis>=2.7.4', ], extras_require={ 'django': [ 'django-redis>=3.3', ] } )
<commit_before># -*- encoding: utf8 -*- from setuptools import setup, find_packages import os setup( name = "python-redis-lock", version = "0.1.1", url = 'https://github.com/ionelmc/python-redis-lock', download_url = '', license = 'BSD', description = "Lock context manager implemented via redis SETNX/BLPOP.", long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), author = 'Ionel Cristian Mărieș', author_email = 'contact@ionelmc.ro', packages = find_packages('src'), package_dir = {'':'src'}, include_package_data = True, zip_safe = False, classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Utilities', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], install_requires=[ 'redis>=2.8.0', ], extras_require={ 'django': [ 'django-redis>=3.3', ] } ) <commit_msg>Allow using redis-py 2.7.4 (lowest revision with set keyword arguments).<commit_after>
# -*- encoding: utf8 -*- from setuptools import setup, find_packages import os setup( name = "python-redis-lock", version = "0.1.1", url = 'https://github.com/ionelmc/python-redis-lock', download_url = '', license = 'BSD', description = "Lock context manager implemented via redis SETNX/BLPOP.", long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), author = 'Ionel Cristian Mărieș', author_email = 'contact@ionelmc.ro', packages = find_packages('src'), package_dir = {'':'src'}, include_package_data = True, zip_safe = False, classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Utilities', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], install_requires=[ 'redis>=2.7.4', ], extras_require={ 'django': [ 'django-redis>=3.3', ] } )
# -*- encoding: utf8 -*- from setuptools import setup, find_packages import os setup( name = "python-redis-lock", version = "0.1.1", url = 'https://github.com/ionelmc/python-redis-lock', download_url = '', license = 'BSD', description = "Lock context manager implemented via redis SETNX/BLPOP.", long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), author = 'Ionel Cristian Mărieș', author_email = 'contact@ionelmc.ro', packages = find_packages('src'), package_dir = {'':'src'}, include_package_data = True, zip_safe = False, classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Utilities', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], install_requires=[ 'redis>=2.8.0', ], extras_require={ 'django': [ 'django-redis>=3.3', ] } ) Allow using redis-py 2.7.4 (lowest revision with set keyword arguments).# -*- encoding: utf8 -*- from setuptools import setup, find_packages import os setup( name = "python-redis-lock", version = "0.1.1", url = 'https://github.com/ionelmc/python-redis-lock', download_url = '', license = 'BSD', description = "Lock context manager implemented via redis SETNX/BLPOP.", long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), author = 'Ionel Cristian Mărieș', author_email = 'contact@ionelmc.ro', packages = find_packages('src'), package_dir = {'':'src'}, include_package_data = True, zip_safe = False, classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Utilities', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], install_requires=[ 'redis>=2.7.4', ], extras_require={ 'django': [ 'django-redis>=3.3', ] } )
<commit_before># -*- encoding: utf8 -*- from setuptools import setup, find_packages import os setup( name = "python-redis-lock", version = "0.1.1", url = 'https://github.com/ionelmc/python-redis-lock', download_url = '', license = 'BSD', description = "Lock context manager implemented via redis SETNX/BLPOP.", long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), author = 'Ionel Cristian Mărieș', author_email = 'contact@ionelmc.ro', packages = find_packages('src'), package_dir = {'':'src'}, include_package_data = True, zip_safe = False, classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Utilities', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], install_requires=[ 'redis>=2.8.0', ], extras_require={ 'django': [ 'django-redis>=3.3', ] } ) <commit_msg>Allow using redis-py 2.7.4 (lowest revision with set keyword arguments).<commit_after># -*- encoding: utf8 -*- from setuptools import setup, find_packages import os setup( name = "python-redis-lock", version = "0.1.1", url = 'https://github.com/ionelmc/python-redis-lock', download_url = '', license = 'BSD', description = "Lock context manager implemented via redis SETNX/BLPOP.", long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(), author = 'Ionel Cristian Mărieș', author_email = 'contact@ionelmc.ro', packages = find_packages('src'), package_dir = {'':'src'}, include_package_data = True, zip_safe = False, classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Utilities', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], install_requires=[ 'redis>=2.7.4', ], extras_require={ 'django': [ 'django-redis>=3.3', ] } )
500f2a5965fc170a142e679b4909478ed3bc3b36
setup.py
setup.py
from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] )
from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'GenomicConsensus', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] )
Add undeclared dependency on GenomicConsensus
Add undeclared dependency on GenomicConsensus
Python
bsd-3-clause
afif-elghraoui/CorrelatedVariants
from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] ) Add undeclared dependency on GenomicConsensus
from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'GenomicConsensus', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] )
<commit_before>from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] ) <commit_msg>Add undeclared dependency on GenomicConsensus<commit_after>
from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'GenomicConsensus', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] )
from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] ) Add undeclared dependency on GenomicConsensusfrom setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'GenomicConsensus', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] )
<commit_before>from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] ) <commit_msg>Add undeclared dependency on GenomicConsensus<commit_after>from setuptools import setup, find_packages setup( name = 'CorrelatedVariants', version = '0.1.0', author = 'Pacific Biosciences', author_email = 'devnet@pacificbiosciences.com', license = open('LICENSE.txt').read(), packages = find_packages('.'), package_dir = {'':'.'}, zip_safe = False, scripts=[ 'bin/correlatedVariants', 'bin/rareCaller' ], install_requires = [ 'pbcore >= 0.2', 'GenomicConsensus', 'numpy >= 1.6.0', 'scipy >= 0.9.0', 'h5py >= 1.3.0' ] )
27d86d856a1a9b78bcfe4d399f38e2440bb7dccf
setup.py
setup.py
import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], )
import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', url='https://github.com/signalpillar/tox-battery', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], )
Add missing project URL to the project meta
Add missing project URL to the project meta
Python
mit
signalpillar/tox-battery
import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], ) Add missing project URL to the project meta
import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', url='https://github.com/signalpillar/tox-battery', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], )
<commit_before>import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], ) <commit_msg>Add missing project URL to the project meta<commit_after>
import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', url='https://github.com/signalpillar/tox-battery', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], )
import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], ) Add missing project URL to the project metaimport setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', url='https://github.com/signalpillar/tox-battery', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], )
<commit_before>import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], ) <commit_msg>Add missing project URL to the project meta<commit_after>import setuptools def content_of(fpath): with open(fpath, 'r') as fd: return fd.read() setuptools.setup( name='tox-battery', description='Additional functionality for tox', long_description=content_of("README.rst"), license='http://opensource.org/licenses/MIT', version='0.0.1', author='Volodymyr Vitvitskyi', author_email='contact.volodymyr@gmail.com', url='https://github.com/signalpillar/tox-battery', packages=setuptools.find_packages(), entry_points={'tox': [ 'toxbat-requirements = toxbat.requirements', ]}, install_requires=['tox',], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python', 'Programming Language :: Python :: 3'], )
56a520fce1f60daaa0bc3cda33c1d32c8524865b
setup.py
setup.py
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests')
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2.0', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests')
Add third place in version number, why not
Add third place in version number, why not
Python
mpl-2.0
mostlygeek/tokenserver,mozilla-services/tokenserver,mostlygeek/tokenserver,mozilla-services/tokenserver
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests') Add third place in version number, why not
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2.0', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests')
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests') <commit_msg>Add third place in version number, why not<commit_after>
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2.0', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests')
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests') Add third place in version number, why not# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2.0', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests')
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests') <commit_msg>Add third place in version number, why not<commit_after># This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup, find_packages requires = ['cornice', 'mozsvc', 'powerhose', 'circus', 'wimms', 'PyBrowserID', 'metlog-py'] setup(name='tokenserver', version='1.2.0', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points="""\ [paste.app_factory] main = tokenserver:main """, install_requires=requires, tests_require=requires, test_suite='tokenserver.tests')
f235b4effc0baba197b4ab24a23126c40ba27377
todo/__init__.py
todo/__init__.py
""" A multi-user, multi-group task management and assignment system for Django. """ __version__ = '1.6' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License'
""" A multi-user, multi-group task management and assignment system for Django. """ __version__ = '1.6.1' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License'
Bump version for bug fixes
Bump version for bug fixes
Python
bsd-3-clause
shacker/django-todo,shacker/django-todo,shacker/django-todo
""" A multi-user, multi-group task management and assignment system for Django. """ __version__ = '1.6' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License' Bump version for bug fixes
""" A multi-user, multi-group task management and assignment system for Django. """ __version__ = '1.6.1' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License'
<commit_before>""" A multi-user, multi-group task management and assignment system for Django. """ __version__ = '1.6' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License' <commit_msg>Bump version for bug fixes<commit_after>
""" A multi-user, multi-group task management and assignment system for Django. """ __version__ = '1.6.1' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License'
""" A multi-user, multi-group task management and assignment system for Django. """ __version__ = '1.6' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License' Bump version for bug fixes""" A multi-user, multi-group task management and assignment system for Django. """ __version__ = '1.6.1' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License'
<commit_before>""" A multi-user, multi-group task management and assignment system for Django. """ __version__ = '1.6' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License' <commit_msg>Bump version for bug fixes<commit_after>""" A multi-user, multi-group task management and assignment system for Django. """ __version__ = '1.6.1' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License'
2a77f5e9a2bcce6b11c21f40574f73cad133c4b8
slack.py
slack.py
import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return json.dumps({'status': 'success'}) except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message}
import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return {'status': 'success'} except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message}
Fix return statement for `invite`
Fix return statement for `invite`
Python
mit
avinassh/slackipy,avinassh/slackipy,avinassh/slackipy
import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return json.dumps({'status': 'success'}) except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message} Fix return statement for `invite`
import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return {'status': 'success'} except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message}
<commit_before>import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return json.dumps({'status': 'success'}) except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message} <commit_msg>Fix return statement for `invite`<commit_after>
import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return {'status': 'success'} except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message}
import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return json.dumps({'status': 'success'}) except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message} Fix return statement for `invite`import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return {'status': 'success'} except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message}
<commit_before>import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return json.dumps({'status': 'success'}) except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message} <commit_msg>Fix return statement for `invite`<commit_after>import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return {'status': 'success'} except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message}
603f2204327c5cac8dbae0a567676465e1ab0f70
data/settings.py
data/settings.py
import os PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'operations.db'), } } INSTALLED_APPS = ( 'data', ) SECRET_KEY = '63cFWu$$lhT3bVP9U1k1Iv@Jo02SuM' LOG_FILE = os.path.join(PROJECT_ROOT, 'sorter.logs') SORTER_IGNORE_FILENAME = '.signore' # Should start with a dot SORTER_FOLDER_IDENTITY_FILENAME = '.sorter' # Should start with a dot
import os PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'operations.db'), } } INSTALLED_APPS = ( 'data', ) SECRET_KEY = '63cFWu$$lhT3bVP9U1k1Iv@Jo02SuM' LOG_FILE = os.path.join(PROJECT_ROOT, 'sorter.logs') SORTER_IGNORE_FILENAME = '.signore' # Should start with a dot SORTER_FOLDER_IDENTITY_FILENAME = '.sorter' # Should start with a dot MIDDLEWARE_CLASSES = []
Set MIDDLEWARE_CLASSES to empty list
Set MIDDLEWARE_CLASSES to empty list
Python
bsd-3-clause
giantas/sorter,giantas/sorter
import os PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'operations.db'), } } INSTALLED_APPS = ( 'data', ) SECRET_KEY = '63cFWu$$lhT3bVP9U1k1Iv@Jo02SuM' LOG_FILE = os.path.join(PROJECT_ROOT, 'sorter.logs') SORTER_IGNORE_FILENAME = '.signore' # Should start with a dot SORTER_FOLDER_IDENTITY_FILENAME = '.sorter' # Should start with a dot Set MIDDLEWARE_CLASSES to empty list
import os PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'operations.db'), } } INSTALLED_APPS = ( 'data', ) SECRET_KEY = '63cFWu$$lhT3bVP9U1k1Iv@Jo02SuM' LOG_FILE = os.path.join(PROJECT_ROOT, 'sorter.logs') SORTER_IGNORE_FILENAME = '.signore' # Should start with a dot SORTER_FOLDER_IDENTITY_FILENAME = '.sorter' # Should start with a dot MIDDLEWARE_CLASSES = []
<commit_before>import os PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'operations.db'), } } INSTALLED_APPS = ( 'data', ) SECRET_KEY = '63cFWu$$lhT3bVP9U1k1Iv@Jo02SuM' LOG_FILE = os.path.join(PROJECT_ROOT, 'sorter.logs') SORTER_IGNORE_FILENAME = '.signore' # Should start with a dot SORTER_FOLDER_IDENTITY_FILENAME = '.sorter' # Should start with a dot <commit_msg>Set MIDDLEWARE_CLASSES to empty list<commit_after>
import os PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'operations.db'), } } INSTALLED_APPS = ( 'data', ) SECRET_KEY = '63cFWu$$lhT3bVP9U1k1Iv@Jo02SuM' LOG_FILE = os.path.join(PROJECT_ROOT, 'sorter.logs') SORTER_IGNORE_FILENAME = '.signore' # Should start with a dot SORTER_FOLDER_IDENTITY_FILENAME = '.sorter' # Should start with a dot MIDDLEWARE_CLASSES = []
import os PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'operations.db'), } } INSTALLED_APPS = ( 'data', ) SECRET_KEY = '63cFWu$$lhT3bVP9U1k1Iv@Jo02SuM' LOG_FILE = os.path.join(PROJECT_ROOT, 'sorter.logs') SORTER_IGNORE_FILENAME = '.signore' # Should start with a dot SORTER_FOLDER_IDENTITY_FILENAME = '.sorter' # Should start with a dot Set MIDDLEWARE_CLASSES to empty listimport os PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'operations.db'), } } INSTALLED_APPS = ( 'data', ) SECRET_KEY = '63cFWu$$lhT3bVP9U1k1Iv@Jo02SuM' LOG_FILE = os.path.join(PROJECT_ROOT, 'sorter.logs') SORTER_IGNORE_FILENAME = '.signore' # Should start with a dot SORTER_FOLDER_IDENTITY_FILENAME = '.sorter' # Should start with a dot MIDDLEWARE_CLASSES = []
<commit_before>import os PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'operations.db'), } } INSTALLED_APPS = ( 'data', ) SECRET_KEY = '63cFWu$$lhT3bVP9U1k1Iv@Jo02SuM' LOG_FILE = os.path.join(PROJECT_ROOT, 'sorter.logs') SORTER_IGNORE_FILENAME = '.signore' # Should start with a dot SORTER_FOLDER_IDENTITY_FILENAME = '.sorter' # Should start with a dot <commit_msg>Set MIDDLEWARE_CLASSES to empty list<commit_after>import os PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'operations.db'), } } INSTALLED_APPS = ( 'data', ) SECRET_KEY = '63cFWu$$lhT3bVP9U1k1Iv@Jo02SuM' LOG_FILE = os.path.join(PROJECT_ROOT, 'sorter.logs') SORTER_IGNORE_FILENAME = '.signore' # Should start with a dot SORTER_FOLDER_IDENTITY_FILENAME = '.sorter' # Should start with a dot MIDDLEWARE_CLASSES = []
f04b85d6536cdfcf3d51e237bde7c2e63a5c2946
server/server.py
server/server.py
import SimpleHTTPServer import SocketServer class KcaaHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_HEAD(self): # Note: HTTP request handlers are not new-style classes. # super() cannot be used. SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) def do_GET(self): SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) def setup(args): httpd = SocketServer.TCPServer(('', args.server_port), KcaaHTTPRequestHandler) _, port = httpd.server_address root_url = 'http://127.0.0.1:{}/web/'.format(port) print 'KCAA server ready at {}'.format(root_url) return httpd, root_url
import SimpleHTTPServer import SocketServer class KcaaHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): CLIENT_PREFIX = '/client/' def do_HEAD(self): # Note: HTTP request handlers are not new-style classes. # super() cannot be used. if self.rewrite_to_client_path(): SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) def do_GET(self): if self.rewrite_to_client_path(): SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) def rewrite_to_client_path(self): if self.path.startswith(KcaaHTTPRequestHandler.CLIENT_PREFIX): self.path = '/' + self.path[len( KcaaHTTPRequestHandler.CLIENT_PREFIX):] return True else: return False def setup(args): httpd = SocketServer.TCPServer(('', args.server_port), KcaaHTTPRequestHandler) _, port = httpd.server_address root_url = 'http://127.0.0.1:{}/client/'.format(port) print 'KCAA server ready at {}'.format(root_url) return httpd, root_url
Handle only /client requests to file serving.
Handle only /client requests to file serving.
Python
apache-2.0
kcaa/kcaa,kcaa/kcaa,kcaa/kcaa,kcaa/kcaa
import SimpleHTTPServer import SocketServer class KcaaHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_HEAD(self): # Note: HTTP request handlers are not new-style classes. # super() cannot be used. SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) def do_GET(self): SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) def setup(args): httpd = SocketServer.TCPServer(('', args.server_port), KcaaHTTPRequestHandler) _, port = httpd.server_address root_url = 'http://127.0.0.1:{}/web/'.format(port) print 'KCAA server ready at {}'.format(root_url) return httpd, root_url Handle only /client requests to file serving.
import SimpleHTTPServer import SocketServer class KcaaHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): CLIENT_PREFIX = '/client/' def do_HEAD(self): # Note: HTTP request handlers are not new-style classes. # super() cannot be used. if self.rewrite_to_client_path(): SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) def do_GET(self): if self.rewrite_to_client_path(): SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) def rewrite_to_client_path(self): if self.path.startswith(KcaaHTTPRequestHandler.CLIENT_PREFIX): self.path = '/' + self.path[len( KcaaHTTPRequestHandler.CLIENT_PREFIX):] return True else: return False def setup(args): httpd = SocketServer.TCPServer(('', args.server_port), KcaaHTTPRequestHandler) _, port = httpd.server_address root_url = 'http://127.0.0.1:{}/client/'.format(port) print 'KCAA server ready at {}'.format(root_url) return httpd, root_url
<commit_before>import SimpleHTTPServer import SocketServer class KcaaHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_HEAD(self): # Note: HTTP request handlers are not new-style classes. # super() cannot be used. SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) def do_GET(self): SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) def setup(args): httpd = SocketServer.TCPServer(('', args.server_port), KcaaHTTPRequestHandler) _, port = httpd.server_address root_url = 'http://127.0.0.1:{}/web/'.format(port) print 'KCAA server ready at {}'.format(root_url) return httpd, root_url <commit_msg>Handle only /client requests to file serving.<commit_after>
import SimpleHTTPServer import SocketServer class KcaaHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): CLIENT_PREFIX = '/client/' def do_HEAD(self): # Note: HTTP request handlers are not new-style classes. # super() cannot be used. if self.rewrite_to_client_path(): SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) def do_GET(self): if self.rewrite_to_client_path(): SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) def rewrite_to_client_path(self): if self.path.startswith(KcaaHTTPRequestHandler.CLIENT_PREFIX): self.path = '/' + self.path[len( KcaaHTTPRequestHandler.CLIENT_PREFIX):] return True else: return False def setup(args): httpd = SocketServer.TCPServer(('', args.server_port), KcaaHTTPRequestHandler) _, port = httpd.server_address root_url = 'http://127.0.0.1:{}/client/'.format(port) print 'KCAA server ready at {}'.format(root_url) return httpd, root_url
import SimpleHTTPServer import SocketServer class KcaaHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_HEAD(self): # Note: HTTP request handlers are not new-style classes. # super() cannot be used. SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) def do_GET(self): SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) def setup(args): httpd = SocketServer.TCPServer(('', args.server_port), KcaaHTTPRequestHandler) _, port = httpd.server_address root_url = 'http://127.0.0.1:{}/web/'.format(port) print 'KCAA server ready at {}'.format(root_url) return httpd, root_url Handle only /client requests to file serving.import SimpleHTTPServer import SocketServer class KcaaHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): CLIENT_PREFIX = '/client/' def do_HEAD(self): # Note: HTTP request handlers are not new-style classes. # super() cannot be used. if self.rewrite_to_client_path(): SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) def do_GET(self): if self.rewrite_to_client_path(): SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) def rewrite_to_client_path(self): if self.path.startswith(KcaaHTTPRequestHandler.CLIENT_PREFIX): self.path = '/' + self.path[len( KcaaHTTPRequestHandler.CLIENT_PREFIX):] return True else: return False def setup(args): httpd = SocketServer.TCPServer(('', args.server_port), KcaaHTTPRequestHandler) _, port = httpd.server_address root_url = 'http://127.0.0.1:{}/client/'.format(port) print 'KCAA server ready at {}'.format(root_url) return httpd, root_url
<commit_before>import SimpleHTTPServer import SocketServer class KcaaHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_HEAD(self): # Note: HTTP request handlers are not new-style classes. # super() cannot be used. SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) def do_GET(self): SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) def setup(args): httpd = SocketServer.TCPServer(('', args.server_port), KcaaHTTPRequestHandler) _, port = httpd.server_address root_url = 'http://127.0.0.1:{}/web/'.format(port) print 'KCAA server ready at {}'.format(root_url) return httpd, root_url <commit_msg>Handle only /client requests to file serving.<commit_after>import SimpleHTTPServer import SocketServer class KcaaHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): CLIENT_PREFIX = '/client/' def do_HEAD(self): # Note: HTTP request handlers are not new-style classes. # super() cannot be used. if self.rewrite_to_client_path(): SimpleHTTPServer.SimpleHTTPRequestHandler.do_HEAD(self) def do_GET(self): if self.rewrite_to_client_path(): SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self) def rewrite_to_client_path(self): if self.path.startswith(KcaaHTTPRequestHandler.CLIENT_PREFIX): self.path = '/' + self.path[len( KcaaHTTPRequestHandler.CLIENT_PREFIX):] return True else: return False def setup(args): httpd = SocketServer.TCPServer(('', args.server_port), KcaaHTTPRequestHandler) _, port = httpd.server_address root_url = 'http://127.0.0.1:{}/client/'.format(port) print 'KCAA server ready at {}'.format(root_url) return httpd, root_url
3735c090702cc8c290dbf8930223ff794c80775a
versionsapp.py
versionsapp.py
from webob import Response from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def APIVersionList(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json([ { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } for version in self.version_classes ])) def APIVersion(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json({ 'todo': 'Report detail' })) def factory(global_config, **settings): return VersionsApp()
from webob import Response import webob.exc from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def _api_version_detail(self, version): return { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } def APIVersionList(self, args): return Response(status = 300, content_type = 'application/json', body = self._resultset_to_json([ self._api_version_detail(version) for version in self.version_classes ])) def APIVersion(self, version_identifier): versions = [ version for version in self.version_classes if version._version_identifier() == version_identifier ] if not versions: return webob.exc.HTTPNotFound() if len(versions) > 1: raise RuntimeError("Multiple API versions with identifier '%s'" % version_identifier) return Response(content_type = 'application/json', body = self._resultset_to_json({ self._api_version_detail(versions[0]) })) def factory(global_config, **settings): return VersionsApp()
Correct the HTTP status from GET / - it should be 300 (Multiple Choices) not 200. Implement the details of a given version.
Correct the HTTP status from GET / - it should be 300 (Multiple Choices) not 200. Implement the details of a given version.
Python
apache-2.0
NeCTAR-RC/reporting-api,NCI-Cloud/reporting-api,NeCTAR-RC/reporting-api,NCI-Cloud/reporting-api
from webob import Response from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def APIVersionList(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json([ { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } for version in self.version_classes ])) def APIVersion(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json({ 'todo': 'Report detail' })) def factory(global_config, **settings): return VersionsApp() Correct the HTTP status from GET / - it should be 300 (Multiple Choices) not 200. Implement the details of a given version.
from webob import Response import webob.exc from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def _api_version_detail(self, version): return { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } def APIVersionList(self, args): return Response(status = 300, content_type = 'application/json', body = self._resultset_to_json([ self._api_version_detail(version) for version in self.version_classes ])) def APIVersion(self, version_identifier): versions = [ version for version in self.version_classes if version._version_identifier() == version_identifier ] if not versions: return webob.exc.HTTPNotFound() if len(versions) > 1: raise RuntimeError("Multiple API versions with identifier '%s'" % version_identifier) return Response(content_type = 'application/json', body = self._resultset_to_json({ self._api_version_detail(versions[0]) })) def factory(global_config, **settings): return VersionsApp()
<commit_before>from webob import Response from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def APIVersionList(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json([ { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } for version in self.version_classes ])) def APIVersion(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json({ 'todo': 'Report detail' })) def factory(global_config, **settings): return VersionsApp() <commit_msg>Correct the HTTP status from GET / - it should be 300 (Multiple Choices) not 200. Implement the details of a given version.<commit_after>
from webob import Response import webob.exc from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def _api_version_detail(self, version): return { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } def APIVersionList(self, args): return Response(status = 300, content_type = 'application/json', body = self._resultset_to_json([ self._api_version_detail(version) for version in self.version_classes ])) def APIVersion(self, version_identifier): versions = [ version for version in self.version_classes if version._version_identifier() == version_identifier ] if not versions: return webob.exc.HTTPNotFound() if len(versions) > 1: raise RuntimeError("Multiple API versions with identifier '%s'" % version_identifier) return Response(content_type = 'application/json', body = self._resultset_to_json({ self._api_version_detail(versions[0]) })) def factory(global_config, **settings): return VersionsApp()
from webob import Response from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def APIVersionList(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json([ { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } for version in self.version_classes ])) def APIVersion(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json({ 'todo': 'Report detail' })) def factory(global_config, **settings): return VersionsApp() Correct the HTTP status from GET / - it should be 300 (Multiple Choices) not 200. Implement the details of a given version.from webob import Response import webob.exc from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def _api_version_detail(self, version): return { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } def APIVersionList(self, args): return Response(status = 300, content_type = 'application/json', body = self._resultset_to_json([ self._api_version_detail(version) for version in self.version_classes ])) def APIVersion(self, version_identifier): versions = [ version for version in self.version_classes if version._version_identifier() == version_identifier ] if not versions: return webob.exc.HTTPNotFound() if len(versions) > 1: raise RuntimeError("Multiple API versions with identifier '%s'" % version_identifier) return Response(content_type = 'application/json', body = self._resultset_to_json({ self._api_version_detail(versions[0]) })) def factory(global_config, **settings): return VersionsApp()
<commit_before>from webob import Response from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def APIVersionList(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json([ { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } for version in self.version_classes ])) def APIVersion(self, args): return Response(content_type = 'application/json', body = self._resultset_to_json({ 'todo': 'Report detail' })) def factory(global_config, **settings): return VersionsApp() <commit_msg>Correct the HTTP status from GET / - it should be 300 (Multiple Choices) not 200. Implement the details of a given version.<commit_after>from webob import Response import webob.exc from apiversion import APIVersion from application import Application from apiv1app import APIv1App class VersionsApp(Application): version_classes = [ APIv1App ] def _api_version_detail(self, version): return { "id": version._version_identifier(), "links": [ { "href": "/" + version._version_identifier(), "rel": "self" } ] } def APIVersionList(self, args): return Response(status = 300, content_type = 'application/json', body = self._resultset_to_json([ self._api_version_detail(version) for version in self.version_classes ])) def APIVersion(self, version_identifier): versions = [ version for version in self.version_classes if version._version_identifier() == version_identifier ] if not versions: return webob.exc.HTTPNotFound() if len(versions) > 1: raise RuntimeError("Multiple API versions with identifier '%s'" % version_identifier) return Response(content_type = 'application/json', body = self._resultset_to_json({ self._api_version_detail(versions[0]) })) def factory(global_config, **settings): return VersionsApp()
5690bc4d2be2b0c51fc95fe79fa3c858f70e9181
shortest_path.py
shortest_path.py
from simple_graph.weighted_graph import Wgraph def dijkstra(weighted_graph, start, end): list_of_tuples_node_totalweight = [] list_of_tuples_node_totalweight.append((start, 0)) # weight_dict[start] = 0 # total weight/distance prev = [] # previous node # unvisited = [] for node in weighted_graph.nodes(): if node is not start: list_of_tuples_node_totalweight.append(node, float("inf")) unvisited = list_of_tuples_node_totalweight while unvisited: sorted_list = sorted(unvisited, key=lambda x: x[1]) temp = sorted_list[0] unvisited = sorted_list[1:] for neighbor in temp.neighbors(): alt = temp[1] + weighted_graph[temp[0]][neighbor] if alt < list_of_tuples_node_totalweight[neighbor][1]: list_of_tuples_node_totalweight[neighbor][1] = alt prev.append(neighbor) if temp == end: break return list_of_tuples_node_totalweight, prev # already_visited = [start] # for node in weighted_graph: # if node is not start: # weight = 100 # # previous = undefined # pq = Pq.insert(node, weight) # while Pq: # temp = pq.pop() # for neighbor in weighted_graph.neighbors(temp): # alt =
Add Dijkstra implementation of shortest path.
Add Dijkstra implementation of shortest path.
Python
mit
efrainc/data_structures
Add Dijkstra implementation of shortest path.
from simple_graph.weighted_graph import Wgraph def dijkstra(weighted_graph, start, end): list_of_tuples_node_totalweight = [] list_of_tuples_node_totalweight.append((start, 0)) # weight_dict[start] = 0 # total weight/distance prev = [] # previous node # unvisited = [] for node in weighted_graph.nodes(): if node is not start: list_of_tuples_node_totalweight.append(node, float("inf")) unvisited = list_of_tuples_node_totalweight while unvisited: sorted_list = sorted(unvisited, key=lambda x: x[1]) temp = sorted_list[0] unvisited = sorted_list[1:] for neighbor in temp.neighbors(): alt = temp[1] + weighted_graph[temp[0]][neighbor] if alt < list_of_tuples_node_totalweight[neighbor][1]: list_of_tuples_node_totalweight[neighbor][1] = alt prev.append(neighbor) if temp == end: break return list_of_tuples_node_totalweight, prev # already_visited = [start] # for node in weighted_graph: # if node is not start: # weight = 100 # # previous = undefined # pq = Pq.insert(node, weight) # while Pq: # temp = pq.pop() # for neighbor in weighted_graph.neighbors(temp): # alt =
<commit_before><commit_msg>Add Dijkstra implementation of shortest path.<commit_after>
from simple_graph.weighted_graph import Wgraph def dijkstra(weighted_graph, start, end): list_of_tuples_node_totalweight = [] list_of_tuples_node_totalweight.append((start, 0)) # weight_dict[start] = 0 # total weight/distance prev = [] # previous node # unvisited = [] for node in weighted_graph.nodes(): if node is not start: list_of_tuples_node_totalweight.append(node, float("inf")) unvisited = list_of_tuples_node_totalweight while unvisited: sorted_list = sorted(unvisited, key=lambda x: x[1]) temp = sorted_list[0] unvisited = sorted_list[1:] for neighbor in temp.neighbors(): alt = temp[1] + weighted_graph[temp[0]][neighbor] if alt < list_of_tuples_node_totalweight[neighbor][1]: list_of_tuples_node_totalweight[neighbor][1] = alt prev.append(neighbor) if temp == end: break return list_of_tuples_node_totalweight, prev # already_visited = [start] # for node in weighted_graph: # if node is not start: # weight = 100 # # previous = undefined # pq = Pq.insert(node, weight) # while Pq: # temp = pq.pop() # for neighbor in weighted_graph.neighbors(temp): # alt =
Add Dijkstra implementation of shortest path.from simple_graph.weighted_graph import Wgraph def dijkstra(weighted_graph, start, end): list_of_tuples_node_totalweight = [] list_of_tuples_node_totalweight.append((start, 0)) # weight_dict[start] = 0 # total weight/distance prev = [] # previous node # unvisited = [] for node in weighted_graph.nodes(): if node is not start: list_of_tuples_node_totalweight.append(node, float("inf")) unvisited = list_of_tuples_node_totalweight while unvisited: sorted_list = sorted(unvisited, key=lambda x: x[1]) temp = sorted_list[0] unvisited = sorted_list[1:] for neighbor in temp.neighbors(): alt = temp[1] + weighted_graph[temp[0]][neighbor] if alt < list_of_tuples_node_totalweight[neighbor][1]: list_of_tuples_node_totalweight[neighbor][1] = alt prev.append(neighbor) if temp == end: break return list_of_tuples_node_totalweight, prev # already_visited = [start] # for node in weighted_graph: # if node is not start: # weight = 100 # # previous = undefined # pq = Pq.insert(node, weight) # while Pq: # temp = pq.pop() # for neighbor in weighted_graph.neighbors(temp): # alt =
<commit_before><commit_msg>Add Dijkstra implementation of shortest path.<commit_after>from simple_graph.weighted_graph import Wgraph def dijkstra(weighted_graph, start, end): list_of_tuples_node_totalweight = [] list_of_tuples_node_totalweight.append((start, 0)) # weight_dict[start] = 0 # total weight/distance prev = [] # previous node # unvisited = [] for node in weighted_graph.nodes(): if node is not start: list_of_tuples_node_totalweight.append(node, float("inf")) unvisited = list_of_tuples_node_totalweight while unvisited: sorted_list = sorted(unvisited, key=lambda x: x[1]) temp = sorted_list[0] unvisited = sorted_list[1:] for neighbor in temp.neighbors(): alt = temp[1] + weighted_graph[temp[0]][neighbor] if alt < list_of_tuples_node_totalweight[neighbor][1]: list_of_tuples_node_totalweight[neighbor][1] = alt prev.append(neighbor) if temp == end: break return list_of_tuples_node_totalweight, prev # already_visited = [start] # for node in weighted_graph: # if node is not start: # weight = 100 # # previous = undefined # pq = Pq.insert(node, weight) # while Pq: # temp = pq.pop() # for neighbor in weighted_graph.neighbors(temp): # alt =
b836b2c39299cc6dbcbdbc8bcffe046f25909edc
test_portend.py
test_portend.py
import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = '' port = portend.find_available_local_port() return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) def id_for_info(info): af, = info[:1] return str(af) def build_listening_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_listening_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2])
import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = '' port = portend.find_available_local_port() return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) def id_for_info(info): af, = info[:1] return str(af) def build_addr_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_addr_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() @pytest.fixture(**build_addr_infos()) def nonlistening_addr(request): af, socktype, proto, canonname, sa = request.param return sa class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) def test_check_port_nonlistening(self, nonlistening_addr): portend._check_port(*nonlistening_addr[:2])
Add tests for nonlistening addresses as well.
Add tests for nonlistening addresses as well.
Python
mit
jaraco/portend
import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = '' port = portend.find_available_local_port() return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) def id_for_info(info): af, = info[:1] return str(af) def build_listening_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_listening_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) Add tests for nonlistening addresses as well.
import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = '' port = portend.find_available_local_port() return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) def id_for_info(info): af, = info[:1] return str(af) def build_addr_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_addr_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() @pytest.fixture(**build_addr_infos()) def nonlistening_addr(request): af, socktype, proto, canonname, sa = request.param return sa class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) def test_check_port_nonlistening(self, nonlistening_addr): portend._check_port(*nonlistening_addr[:2])
<commit_before>import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = '' port = portend.find_available_local_port() return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) def id_for_info(info): af, = info[:1] return str(af) def build_listening_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_listening_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) <commit_msg>Add tests for nonlistening addresses as well.<commit_after>
import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = '' port = portend.find_available_local_port() return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) def id_for_info(info): af, = info[:1] return str(af) def build_addr_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_addr_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() @pytest.fixture(**build_addr_infos()) def nonlistening_addr(request): af, socktype, proto, canonname, sa = request.param return sa class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) def test_check_port_nonlistening(self, nonlistening_addr): portend._check_port(*nonlistening_addr[:2])
import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = '' port = portend.find_available_local_port() return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) def id_for_info(info): af, = info[:1] return str(af) def build_listening_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_listening_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) Add tests for nonlistening addresses as well.import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = '' port = portend.find_available_local_port() return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) def id_for_info(info): af, = info[:1] return str(af) def build_addr_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_addr_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() @pytest.fixture(**build_addr_infos()) def nonlistening_addr(request): af, socktype, proto, canonname, sa = request.param return sa class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) def test_check_port_nonlistening(self, nonlistening_addr): portend._check_port(*nonlistening_addr[:2])
<commit_before>import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = '' port = portend.find_available_local_port() return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) def id_for_info(info): af, = info[:1] return str(af) def build_listening_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_listening_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) <commit_msg>Add tests for nonlistening addresses as well.<commit_after>import socket import pytest import portend def socket_infos(): """ Generate addr infos for connections to localhost """ host = '' port = portend.find_available_local_port() return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) def id_for_info(info): af, = info[:1] return str(af) def build_addr_infos(): params = list(socket_infos()) ids = list(map(id_for_info, params)) return locals() @pytest.fixture(**build_addr_infos()) def listening_addr(request): af, socktype, proto, canonname, sa = request.param sock = socket.socket(af, socktype, proto) sock.bind(sa) sock.listen(5) try: yield sa finally: sock.close() @pytest.fixture(**build_addr_infos()) def nonlistening_addr(request): af, socktype, proto, canonname, sa = request.param return sa class TestCheckPort: def test_check_port_listening(self, listening_addr): with pytest.raises(IOError): portend._check_port(*listening_addr[:2]) def test_check_port_nonlistening(self, nonlistening_addr): portend._check_port(*nonlistening_addr[:2])
21651120925cc3e51aeada4eac4dbfaa5bf98fae
src/header_filter/__init__.py
src/header_filter/__init__.py
from header_filter.matchers import Header # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
from header_filter.matchers import Header, HeaderRegexp # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
Allow HeaderRegexp to be imported directly from header_filter package.
Allow HeaderRegexp to be imported directly from header_filter package.
Python
mit
sanjioh/django-header-filter
from header_filter.matchers import Header # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401 Allow HeaderRegexp to be imported directly from header_filter package.
from header_filter.matchers import Header, HeaderRegexp # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
<commit_before>from header_filter.matchers import Header # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401 <commit_msg>Allow HeaderRegexp to be imported directly from header_filter package.<commit_after>
from header_filter.matchers import Header, HeaderRegexp # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
from header_filter.matchers import Header # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401 Allow HeaderRegexp to be imported directly from header_filter package.from header_filter.matchers import Header, HeaderRegexp # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
<commit_before>from header_filter.matchers import Header # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401 <commit_msg>Allow HeaderRegexp to be imported directly from header_filter package.<commit_after>from header_filter.matchers import Header, HeaderRegexp # noqa: F401 from header_filter.middleware import HeaderFilterMiddleware # noqa: F401 from header_filter.rules import Enforce, Forbid # noqa: F401
5ebc53fccd79e479d1a39cf02160c8eb2eab247a
vulk/__init__.py
vulk/__init__.py
"""Vulk 3D engine Cross-plateform 3D engine """ __version__ = "0.2.0"
"""Vulk 3D engine Cross-plateform 3D engine """ from os import path as p __version__ = "0.2.0" PATH_VULK = p.dirname(p.abspath(__file__)) PATH_VULK_ASSET = p.join(PATH_VULK, 'asset') PATH_VULK_SHADER = p.join(PATH_VULK_ASSET, 'shader')
Add Path to Vulk package
Add Path to Vulk package
Python
apache-2.0
Echelon9/vulk,realitix/vulk,realitix/vulk,Echelon9/vulk
"""Vulk 3D engine Cross-plateform 3D engine """ __version__ = "0.2.0" Add Path to Vulk package
"""Vulk 3D engine Cross-plateform 3D engine """ from os import path as p __version__ = "0.2.0" PATH_VULK = p.dirname(p.abspath(__file__)) PATH_VULK_ASSET = p.join(PATH_VULK, 'asset') PATH_VULK_SHADER = p.join(PATH_VULK_ASSET, 'shader')
<commit_before>"""Vulk 3D engine Cross-plateform 3D engine """ __version__ = "0.2.0" <commit_msg>Add Path to Vulk package<commit_after>
"""Vulk 3D engine Cross-plateform 3D engine """ from os import path as p __version__ = "0.2.0" PATH_VULK = p.dirname(p.abspath(__file__)) PATH_VULK_ASSET = p.join(PATH_VULK, 'asset') PATH_VULK_SHADER = p.join(PATH_VULK_ASSET, 'shader')
"""Vulk 3D engine Cross-plateform 3D engine """ __version__ = "0.2.0" Add Path to Vulk package"""Vulk 3D engine Cross-plateform 3D engine """ from os import path as p __version__ = "0.2.0" PATH_VULK = p.dirname(p.abspath(__file__)) PATH_VULK_ASSET = p.join(PATH_VULK, 'asset') PATH_VULK_SHADER = p.join(PATH_VULK_ASSET, 'shader')
<commit_before>"""Vulk 3D engine Cross-plateform 3D engine """ __version__ = "0.2.0" <commit_msg>Add Path to Vulk package<commit_after>"""Vulk 3D engine Cross-plateform 3D engine """ from os import path as p __version__ = "0.2.0" PATH_VULK = p.dirname(p.abspath(__file__)) PATH_VULK_ASSET = p.join(PATH_VULK, 'asset') PATH_VULK_SHADER = p.join(PATH_VULK_ASSET, 'shader')
c3b1fef64b3a383b017ec2e155cbdc5b58a6bf5c
average_pixels/get_images.py
average_pixels/get_images.py
import os import urllib import requests from api_key import API_KEY from IPython import embed as qq URL = "https://bingapis.azure-api.net/api/v5/images/search" NUMBER_OF_IMAGES = 10 DIR = os.path.realpath('img') def search_images(term): params = {"q": term, "count":NUMBER_OF_IMAGES} headers = {'ocp-apim-subscription-key': API_KEY} response = requests.request("GET", URL, headers=headers, params=params) return response.json()['value'] def download_image(url, filename): urllib.request.urlretrieve(url, filename) def save_images(term): images = search_images(term) filenames = [] for i, img in enumerate(images): name = "{path}/{filename}.{ext}".format( path=DIR, filename="_".join(term.split()) + str(i), ext=img['encodingFormat']) try: download_image(img['contentUrl'], name) filenames.append(name) except urllib.error.HTTPError: pass return filenames
import os import urllib import urllib.error import requests URL = "https://bingapis.azure-api.net/api/v5/images/search" NUMBER_OF_IMAGES = 10 DIR = '/tmp/average_images' def search_images(term, api_key): params = {"q": term, "count":NUMBER_OF_IMAGES} headers = {'ocp-apim-subscription-key': api_key} response = requests.request("GET", URL, headers=headers, params=params) return response.json()['value'] def download_image(url, filename): urllib.request.urlretrieve(url, filename) def get_api_key(): try: api_key_file = os.path.join( os.path.expanduser('~'), ".average_pixels_api") with open(api_key_file, 'r') as f: api_key = f.read().replace('\n','') except FileNotFoundError: api_key = input("Please insert your API key: ") return api_key def save_images(term): api_key = get_api_key() images = search_images(term, api_key) filenames = [] if not os.path.exists(DIR): os.makedirs(DIR) for i, img in enumerate(images): if img['encodingFormat'] == 'unknown': continue name = "{path}/{filename}.{ext}".format( path=DIR, filename="_".join(term.split()) + str(i), ext=img['encodingFormat']) try: download_image(img['contentUrl'], name) filenames.append(name) except urllib.error.HTTPError: pass return filenames
Store files in /tmp/ and fetch API key from $HOME
Store files in /tmp/ and fetch API key from $HOME
Python
mit
liviu-/average-pixels
import os import urllib import requests from api_key import API_KEY from IPython import embed as qq URL = "https://bingapis.azure-api.net/api/v5/images/search" NUMBER_OF_IMAGES = 10 DIR = os.path.realpath('img') def search_images(term): params = {"q": term, "count":NUMBER_OF_IMAGES} headers = {'ocp-apim-subscription-key': API_KEY} response = requests.request("GET", URL, headers=headers, params=params) return response.json()['value'] def download_image(url, filename): urllib.request.urlretrieve(url, filename) def save_images(term): images = search_images(term) filenames = [] for i, img in enumerate(images): name = "{path}/{filename}.{ext}".format( path=DIR, filename="_".join(term.split()) + str(i), ext=img['encodingFormat']) try: download_image(img['contentUrl'], name) filenames.append(name) except urllib.error.HTTPError: pass return filenames Store files in /tmp/ and fetch API key from $HOME
import os import urllib import urllib.error import requests URL = "https://bingapis.azure-api.net/api/v5/images/search" NUMBER_OF_IMAGES = 10 DIR = '/tmp/average_images' def search_images(term, api_key): params = {"q": term, "count":NUMBER_OF_IMAGES} headers = {'ocp-apim-subscription-key': api_key} response = requests.request("GET", URL, headers=headers, params=params) return response.json()['value'] def download_image(url, filename): urllib.request.urlretrieve(url, filename) def get_api_key(): try: api_key_file = os.path.join( os.path.expanduser('~'), ".average_pixels_api") with open(api_key_file, 'r') as f: api_key = f.read().replace('\n','') except FileNotFoundError: api_key = input("Please insert your API key: ") return api_key def save_images(term): api_key = get_api_key() images = search_images(term, api_key) filenames = [] if not os.path.exists(DIR): os.makedirs(DIR) for i, img in enumerate(images): if img['encodingFormat'] == 'unknown': continue name = "{path}/{filename}.{ext}".format( path=DIR, filename="_".join(term.split()) + str(i), ext=img['encodingFormat']) try: download_image(img['contentUrl'], name) filenames.append(name) except urllib.error.HTTPError: pass return filenames
<commit_before>import os import urllib import requests from api_key import API_KEY from IPython import embed as qq URL = "https://bingapis.azure-api.net/api/v5/images/search" NUMBER_OF_IMAGES = 10 DIR = os.path.realpath('img') def search_images(term): params = {"q": term, "count":NUMBER_OF_IMAGES} headers = {'ocp-apim-subscription-key': API_KEY} response = requests.request("GET", URL, headers=headers, params=params) return response.json()['value'] def download_image(url, filename): urllib.request.urlretrieve(url, filename) def save_images(term): images = search_images(term) filenames = [] for i, img in enumerate(images): name = "{path}/{filename}.{ext}".format( path=DIR, filename="_".join(term.split()) + str(i), ext=img['encodingFormat']) try: download_image(img['contentUrl'], name) filenames.append(name) except urllib.error.HTTPError: pass return filenames <commit_msg>Store files in /tmp/ and fetch API key from $HOME<commit_after>
import os import urllib import urllib.error import requests URL = "https://bingapis.azure-api.net/api/v5/images/search" NUMBER_OF_IMAGES = 10 DIR = '/tmp/average_images' def search_images(term, api_key): params = {"q": term, "count":NUMBER_OF_IMAGES} headers = {'ocp-apim-subscription-key': api_key} response = requests.request("GET", URL, headers=headers, params=params) return response.json()['value'] def download_image(url, filename): urllib.request.urlretrieve(url, filename) def get_api_key(): try: api_key_file = os.path.join( os.path.expanduser('~'), ".average_pixels_api") with open(api_key_file, 'r') as f: api_key = f.read().replace('\n','') except FileNotFoundError: api_key = input("Please insert your API key: ") return api_key def save_images(term): api_key = get_api_key() images = search_images(term, api_key) filenames = [] if not os.path.exists(DIR): os.makedirs(DIR) for i, img in enumerate(images): if img['encodingFormat'] == 'unknown': continue name = "{path}/{filename}.{ext}".format( path=DIR, filename="_".join(term.split()) + str(i), ext=img['encodingFormat']) try: download_image(img['contentUrl'], name) filenames.append(name) except urllib.error.HTTPError: pass return filenames
import os import urllib import requests from api_key import API_KEY from IPython import embed as qq URL = "https://bingapis.azure-api.net/api/v5/images/search" NUMBER_OF_IMAGES = 10 DIR = os.path.realpath('img') def search_images(term): params = {"q": term, "count":NUMBER_OF_IMAGES} headers = {'ocp-apim-subscription-key': API_KEY} response = requests.request("GET", URL, headers=headers, params=params) return response.json()['value'] def download_image(url, filename): urllib.request.urlretrieve(url, filename) def save_images(term): images = search_images(term) filenames = [] for i, img in enumerate(images): name = "{path}/{filename}.{ext}".format( path=DIR, filename="_".join(term.split()) + str(i), ext=img['encodingFormat']) try: download_image(img['contentUrl'], name) filenames.append(name) except urllib.error.HTTPError: pass return filenames Store files in /tmp/ and fetch API key from $HOMEimport os import urllib import urllib.error import requests URL = "https://bingapis.azure-api.net/api/v5/images/search" NUMBER_OF_IMAGES = 10 DIR = '/tmp/average_images' def search_images(term, api_key): params = {"q": term, "count":NUMBER_OF_IMAGES} headers = {'ocp-apim-subscription-key': api_key} response = requests.request("GET", URL, headers=headers, params=params) return response.json()['value'] def download_image(url, filename): urllib.request.urlretrieve(url, filename) def get_api_key(): try: api_key_file = os.path.join( os.path.expanduser('~'), ".average_pixels_api") with open(api_key_file, 'r') as f: api_key = f.read().replace('\n','') except FileNotFoundError: api_key = input("Please insert your API key: ") return api_key def save_images(term): api_key = get_api_key() images = search_images(term, api_key) filenames = [] if not os.path.exists(DIR): os.makedirs(DIR) for i, img in enumerate(images): if img['encodingFormat'] == 'unknown': continue name = "{path}/{filename}.{ext}".format( path=DIR, filename="_".join(term.split()) + str(i), ext=img['encodingFormat']) try: download_image(img['contentUrl'], name) filenames.append(name) except urllib.error.HTTPError: pass return filenames
<commit_before>import os import urllib import requests from api_key import API_KEY from IPython import embed as qq URL = "https://bingapis.azure-api.net/api/v5/images/search" NUMBER_OF_IMAGES = 10 DIR = os.path.realpath('img') def search_images(term): params = {"q": term, "count":NUMBER_OF_IMAGES} headers = {'ocp-apim-subscription-key': API_KEY} response = requests.request("GET", URL, headers=headers, params=params) return response.json()['value'] def download_image(url, filename): urllib.request.urlretrieve(url, filename) def save_images(term): images = search_images(term) filenames = [] for i, img in enumerate(images): name = "{path}/{filename}.{ext}".format( path=DIR, filename="_".join(term.split()) + str(i), ext=img['encodingFormat']) try: download_image(img['contentUrl'], name) filenames.append(name) except urllib.error.HTTPError: pass return filenames <commit_msg>Store files in /tmp/ and fetch API key from $HOME<commit_after>import os import urllib import urllib.error import requests URL = "https://bingapis.azure-api.net/api/v5/images/search" NUMBER_OF_IMAGES = 10 DIR = '/tmp/average_images' def search_images(term, api_key): params = {"q": term, "count":NUMBER_OF_IMAGES} headers = {'ocp-apim-subscription-key': api_key} response = requests.request("GET", URL, headers=headers, params=params) return response.json()['value'] def download_image(url, filename): urllib.request.urlretrieve(url, filename) def get_api_key(): try: api_key_file = os.path.join( os.path.expanduser('~'), ".average_pixels_api") with open(api_key_file, 'r') as f: api_key = f.read().replace('\n','') except FileNotFoundError: api_key = input("Please insert your API key: ") return api_key def save_images(term): api_key = get_api_key() images = search_images(term, api_key) filenames = [] if not os.path.exists(DIR): os.makedirs(DIR) for i, img in enumerate(images): if img['encodingFormat'] == 'unknown': continue name = "{path}/{filename}.{ext}".format( path=DIR, filename="_".join(term.split()) + str(i), ext=img['encodingFormat']) try: download_image(img['contentUrl'], name) filenames.append(name) except urllib.error.HTTPError: pass return filenames
8dbd58443e908257cee31fa4e00ef4316a660c5b
bot/action/standard/group_admin.py
bot/action/standard/group_admin.py
from bot.action.core.action import IntermediateAction from bot.api.domain import Message class GroupAdminAction(IntermediateAction): def process(self, event): chat = event.message.chat if chat.type == "private": # lets consider private chat members are admins :) self._continue(event) else: user = event.message.from_ if user is not None: chat_member = self.api.getChatMember(chat_id=chat.id, user_id=user.id) if chat_member.status in ("creator", "administrator"): self._continue(event) else: error_response = "Sorry, this command is only available to group admins." self.api.send_message(Message.create_reply(event.message, error_response))
from bot.action.core.action import IntermediateAction from bot.api.domain import Message class GroupAdminAction(IntermediateAction): def process(self, event): chat = event.message.chat if chat.type == "private": # lets consider private chat members are admins :) self._continue(event) else: user = event.message.from_ if user is not None: chat_member = self.api.no_async.getChatMember(chat_id=chat.id, user_id=user.id) if chat_member.status in ("creator", "administrator"): self._continue(event) else: error_response = "Sorry, this command is only available to group admins." self.api.send_message(Message.create_reply(event.message, error_response))
Use no_async api to query if a chat member is a group admin
Use no_async api to query if a chat member is a group admin
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
from bot.action.core.action import IntermediateAction from bot.api.domain import Message class GroupAdminAction(IntermediateAction): def process(self, event): chat = event.message.chat if chat.type == "private": # lets consider private chat members are admins :) self._continue(event) else: user = event.message.from_ if user is not None: chat_member = self.api.getChatMember(chat_id=chat.id, user_id=user.id) if chat_member.status in ("creator", "administrator"): self._continue(event) else: error_response = "Sorry, this command is only available to group admins." self.api.send_message(Message.create_reply(event.message, error_response)) Use no_async api to query if a chat member is a group admin
from bot.action.core.action import IntermediateAction from bot.api.domain import Message class GroupAdminAction(IntermediateAction): def process(self, event): chat = event.message.chat if chat.type == "private": # lets consider private chat members are admins :) self._continue(event) else: user = event.message.from_ if user is not None: chat_member = self.api.no_async.getChatMember(chat_id=chat.id, user_id=user.id) if chat_member.status in ("creator", "administrator"): self._continue(event) else: error_response = "Sorry, this command is only available to group admins." self.api.send_message(Message.create_reply(event.message, error_response))
<commit_before>from bot.action.core.action import IntermediateAction from bot.api.domain import Message class GroupAdminAction(IntermediateAction): def process(self, event): chat = event.message.chat if chat.type == "private": # lets consider private chat members are admins :) self._continue(event) else: user = event.message.from_ if user is not None: chat_member = self.api.getChatMember(chat_id=chat.id, user_id=user.id) if chat_member.status in ("creator", "administrator"): self._continue(event) else: error_response = "Sorry, this command is only available to group admins." self.api.send_message(Message.create_reply(event.message, error_response)) <commit_msg>Use no_async api to query if a chat member is a group admin<commit_after>
from bot.action.core.action import IntermediateAction from bot.api.domain import Message class GroupAdminAction(IntermediateAction): def process(self, event): chat = event.message.chat if chat.type == "private": # lets consider private chat members are admins :) self._continue(event) else: user = event.message.from_ if user is not None: chat_member = self.api.no_async.getChatMember(chat_id=chat.id, user_id=user.id) if chat_member.status in ("creator", "administrator"): self._continue(event) else: error_response = "Sorry, this command is only available to group admins." self.api.send_message(Message.create_reply(event.message, error_response))
from bot.action.core.action import IntermediateAction from bot.api.domain import Message class GroupAdminAction(IntermediateAction): def process(self, event): chat = event.message.chat if chat.type == "private": # lets consider private chat members are admins :) self._continue(event) else: user = event.message.from_ if user is not None: chat_member = self.api.getChatMember(chat_id=chat.id, user_id=user.id) if chat_member.status in ("creator", "administrator"): self._continue(event) else: error_response = "Sorry, this command is only available to group admins." self.api.send_message(Message.create_reply(event.message, error_response)) Use no_async api to query if a chat member is a group adminfrom bot.action.core.action import IntermediateAction from bot.api.domain import Message class GroupAdminAction(IntermediateAction): def process(self, event): chat = event.message.chat if chat.type == "private": # lets consider private chat members are admins :) self._continue(event) else: user = event.message.from_ if user is not None: chat_member = self.api.no_async.getChatMember(chat_id=chat.id, user_id=user.id) if chat_member.status in ("creator", "administrator"): self._continue(event) else: error_response = "Sorry, this command is only available to group admins." self.api.send_message(Message.create_reply(event.message, error_response))
<commit_before>from bot.action.core.action import IntermediateAction from bot.api.domain import Message class GroupAdminAction(IntermediateAction): def process(self, event): chat = event.message.chat if chat.type == "private": # lets consider private chat members are admins :) self._continue(event) else: user = event.message.from_ if user is not None: chat_member = self.api.getChatMember(chat_id=chat.id, user_id=user.id) if chat_member.status in ("creator", "administrator"): self._continue(event) else: error_response = "Sorry, this command is only available to group admins." self.api.send_message(Message.create_reply(event.message, error_response)) <commit_msg>Use no_async api to query if a chat member is a group admin<commit_after>from bot.action.core.action import IntermediateAction from bot.api.domain import Message class GroupAdminAction(IntermediateAction): def process(self, event): chat = event.message.chat if chat.type == "private": # lets consider private chat members are admins :) self._continue(event) else: user = event.message.from_ if user is not None: chat_member = self.api.no_async.getChatMember(chat_id=chat.id, user_id=user.id) if chat_member.status in ("creator", "administrator"): self._continue(event) else: error_response = "Sorry, this command is only available to group admins." self.api.send_message(Message.create_reply(event.message, error_response))
5aefffff8a1004bc9a8289bf5907472e3434e6b3
modelreg/registration_view.py
modelreg/registration_view.py
#!/usr/bin/env python3 """Documentation about the module... may be multi-line""" from registration.backends.hmac.views import RegistrationView as BaseRegistrationView from django.contrib.sites.shortcuts import get_current_site class RegistrationView(BaseRegistrationView): def get_email_context(self, activation_key): context = super().get_email_context(activation_key) import ipdb ipdb.set_trace() schema = 'https' if self.request.is_secure() else 'http' domain = get_current_site(self.request).domain context['base_url'] = '%s://%s' % (schema, domain) return context
#!/usr/bin/env python3 """Documentation about the module... may be multi-line""" from registration.backends.hmac.views import RegistrationView as BaseRegistrationView from django.contrib.sites.shortcuts import get_current_site class RegistrationView(BaseRegistrationView): def get_email_context(self, activation_key): context = super().get_email_context(activation_key) schema = 'https' if self.request.is_secure() else 'http' domain = get_current_site(self.request).domain context['base_url'] = '%s://%s' % (schema, domain) return context
Remove debugging code, not needed outside of DEV
Remove debugging code, not needed outside of DEV
Python
agpl-3.0
modelreg/modelreg,modelreg/modelreg,modelreg/modelreg
#!/usr/bin/env python3 """Documentation about the module... may be multi-line""" from registration.backends.hmac.views import RegistrationView as BaseRegistrationView from django.contrib.sites.shortcuts import get_current_site class RegistrationView(BaseRegistrationView): def get_email_context(self, activation_key): context = super().get_email_context(activation_key) import ipdb ipdb.set_trace() schema = 'https' if self.request.is_secure() else 'http' domain = get_current_site(self.request).domain context['base_url'] = '%s://%s' % (schema, domain) return context Remove debugging code, not needed outside of DEV
#!/usr/bin/env python3 """Documentation about the module... may be multi-line""" from registration.backends.hmac.views import RegistrationView as BaseRegistrationView from django.contrib.sites.shortcuts import get_current_site class RegistrationView(BaseRegistrationView): def get_email_context(self, activation_key): context = super().get_email_context(activation_key) schema = 'https' if self.request.is_secure() else 'http' domain = get_current_site(self.request).domain context['base_url'] = '%s://%s' % (schema, domain) return context
<commit_before>#!/usr/bin/env python3 """Documentation about the module... may be multi-line""" from registration.backends.hmac.views import RegistrationView as BaseRegistrationView from django.contrib.sites.shortcuts import get_current_site class RegistrationView(BaseRegistrationView): def get_email_context(self, activation_key): context = super().get_email_context(activation_key) import ipdb ipdb.set_trace() schema = 'https' if self.request.is_secure() else 'http' domain = get_current_site(self.request).domain context['base_url'] = '%s://%s' % (schema, domain) return context <commit_msg>Remove debugging code, not needed outside of DEV<commit_after>
#!/usr/bin/env python3 """Documentation about the module... may be multi-line""" from registration.backends.hmac.views import RegistrationView as BaseRegistrationView from django.contrib.sites.shortcuts import get_current_site class RegistrationView(BaseRegistrationView): def get_email_context(self, activation_key): context = super().get_email_context(activation_key) schema = 'https' if self.request.is_secure() else 'http' domain = get_current_site(self.request).domain context['base_url'] = '%s://%s' % (schema, domain) return context
#!/usr/bin/env python3 """Documentation about the module... may be multi-line""" from registration.backends.hmac.views import RegistrationView as BaseRegistrationView from django.contrib.sites.shortcuts import get_current_site class RegistrationView(BaseRegistrationView): def get_email_context(self, activation_key): context = super().get_email_context(activation_key) import ipdb ipdb.set_trace() schema = 'https' if self.request.is_secure() else 'http' domain = get_current_site(self.request).domain context['base_url'] = '%s://%s' % (schema, domain) return context Remove debugging code, not needed outside of DEV#!/usr/bin/env python3 """Documentation about the module... may be multi-line""" from registration.backends.hmac.views import RegistrationView as BaseRegistrationView from django.contrib.sites.shortcuts import get_current_site class RegistrationView(BaseRegistrationView): def get_email_context(self, activation_key): context = super().get_email_context(activation_key) schema = 'https' if self.request.is_secure() else 'http' domain = get_current_site(self.request).domain context['base_url'] = '%s://%s' % (schema, domain) return context
<commit_before>#!/usr/bin/env python3 """Documentation about the module... may be multi-line""" from registration.backends.hmac.views import RegistrationView as BaseRegistrationView from django.contrib.sites.shortcuts import get_current_site class RegistrationView(BaseRegistrationView): def get_email_context(self, activation_key): context = super().get_email_context(activation_key) import ipdb ipdb.set_trace() schema = 'https' if self.request.is_secure() else 'http' domain = get_current_site(self.request).domain context['base_url'] = '%s://%s' % (schema, domain) return context <commit_msg>Remove debugging code, not needed outside of DEV<commit_after>#!/usr/bin/env python3 """Documentation about the module... may be multi-line""" from registration.backends.hmac.views import RegistrationView as BaseRegistrationView from django.contrib.sites.shortcuts import get_current_site class RegistrationView(BaseRegistrationView): def get_email_context(self, activation_key): context = super().get_email_context(activation_key) schema = 'https' if self.request.is_secure() else 'http' domain = get_current_site(self.request).domain context['base_url'] = '%s://%s' % (schema, domain) return context
5d97b41a7b814b078b0b7b7d930317342d0db3de
yaml_writer.py
yaml_writer.py
import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with open(file_path, 'w') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with open(file_path, 'r') as f: return yaml.load(f.read())
import io import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with io.open(file_path, 'w', encoding='utf-8') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with io.open(file_path, 'r', encoding='utf-8') as f: return yaml.load(f.read())
Support python 2 with io.open
Support python 2 with io.open
Python
mit
Aalto-LeTech/a-plus-rst-tools,Aalto-LeTech/a-plus-rst-tools,Aalto-LeTech/a-plus-rst-tools
import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with open(file_path, 'w') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with open(file_path, 'r') as f: return yaml.load(f.read()) Support python 2 with io.open
import io import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with io.open(file_path, 'w', encoding='utf-8') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with io.open(file_path, 'r', encoding='utf-8') as f: return yaml.load(f.read())
<commit_before>import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with open(file_path, 'w') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with open(file_path, 'r') as f: return yaml.load(f.read()) <commit_msg>Support python 2 with io.open<commit_after>
import io import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with io.open(file_path, 'w', encoding='utf-8') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with io.open(file_path, 'r', encoding='utf-8') as f: return yaml.load(f.read())
import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with open(file_path, 'w') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with open(file_path, 'r') as f: return yaml.load(f.read()) Support python 2 with io.openimport io import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with io.open(file_path, 'w', encoding='utf-8') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with io.open(file_path, 'r', encoding='utf-8') as f: return yaml.load(f.read())
<commit_before>import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with open(file_path, 'w') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with open(file_path, 'r') as f: return yaml.load(f.read()) <commit_msg>Support python 2 with io.open<commit_after>import io import os.path import yaml from sphinx.util.osutil import ensuredir def create_directory(app): ''' Creates the yaml directory if necessary ''' app.env.yaml_dir = os.path.join(app.builder.confdir, '_build', 'yaml') ensuredir(app.env.yaml_dir) def file_path(env, name): ''' Creates complete yaml file path for a name ''' return os.path.join( env.yaml_dir, name if name.endswith('.yaml') else (name + '.yaml') ) def write(file_path, data_dict): ''' Writes dictionary into a yaml file ''' with io.open(file_path, 'w', encoding='utf-8') as f: f.write(yaml.dump(data_dict, default_flow_style=False, allow_unicode=True)) def read(file_path): ''' Reads dictionary from a yaml file ''' with io.open(file_path, 'r', encoding='utf-8') as f: return yaml.load(f.read())
a2fe7d1bb38bedee808c6b1a21cd5e3d93863c6c
winthrop/urls.py
winthrop/urls.py
"""winthrop URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), ]
"""winthrop URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ """ from django.conf.urls import url from django.contrib import admin from django.views.generic.base import RedirectView urlpatterns = [ # for now, since there is not yet any public-facing site, # redirect base url to admin index page url(r'^$', RedirectView.as_view(pattern_name='admin:index')), url(r'^admin/', admin.site.urls), ]
Add redirect from site base url to admin index for now
Add redirect from site base url to admin index for now
Python
apache-2.0
Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django
"""winthrop URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), ] Add redirect from site base url to admin index for now
"""winthrop URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ """ from django.conf.urls import url from django.contrib import admin from django.views.generic.base import RedirectView urlpatterns = [ # for now, since there is not yet any public-facing site, # redirect base url to admin index page url(r'^$', RedirectView.as_view(pattern_name='admin:index')), url(r'^admin/', admin.site.urls), ]
<commit_before>"""winthrop URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), ] <commit_msg>Add redirect from site base url to admin index for now<commit_after>
"""winthrop URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ """ from django.conf.urls import url from django.contrib import admin from django.views.generic.base import RedirectView urlpatterns = [ # for now, since there is not yet any public-facing site, # redirect base url to admin index page url(r'^$', RedirectView.as_view(pattern_name='admin:index')), url(r'^admin/', admin.site.urls), ]
"""winthrop URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), ] Add redirect from site base url to admin index for now"""winthrop URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ """ from django.conf.urls import url from django.contrib import admin from django.views.generic.base import RedirectView urlpatterns = [ # for now, since there is not yet any public-facing site, # redirect base url to admin index page url(r'^$', RedirectView.as_view(pattern_name='admin:index')), url(r'^admin/', admin.site.urls), ]
<commit_before>"""winthrop URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), ] <commit_msg>Add redirect from site base url to admin index for now<commit_after>"""winthrop URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ """ from django.conf.urls import url from django.contrib import admin from django.views.generic.base import RedirectView urlpatterns = [ # for now, since there is not yet any public-facing site, # redirect base url to admin index page url(r'^$', RedirectView.as_view(pattern_name='admin:index')), url(r'^admin/', admin.site.urls), ]
303bd2c3cd605581bd46410b3680f2ec5d193429
peripydic/util/functions.py
peripydic/util/functions.py
import numpy as np from ..util import linalgebra def w(problem,X,type): if type == "ONE": return 1. if type == "EXP": len = linalgebra.norm(X) return np.exp(- (len*len) / problem.neighbors.horizon) return 1.
import numpy as np from ..util import linalgebra def w(problem,X,type): if type == "ONE": return 1. if type == "EXP": len = linalgebra.norm(X) return np.exp(- (len*len) / problem.neighbors.horizon / problem.neighbors.horizon) if type == "NORM": return 1. / linalgebra.norm(X) return 1.
Add NORM as influence function
Add NORM as influence function
Python
mit
ilyasst/peridynamics_1D,lm2-poly/peridynamics_1D,lm2-poly/peridynamics_1D
import numpy as np from ..util import linalgebra def w(problem,X,type): if type == "ONE": return 1. if type == "EXP": len = linalgebra.norm(X) return np.exp(- (len*len) / problem.neighbors.horizon) return 1.Add NORM as influence function
import numpy as np from ..util import linalgebra def w(problem,X,type): if type == "ONE": return 1. if type == "EXP": len = linalgebra.norm(X) return np.exp(- (len*len) / problem.neighbors.horizon / problem.neighbors.horizon) if type == "NORM": return 1. / linalgebra.norm(X) return 1.
<commit_before>import numpy as np from ..util import linalgebra def w(problem,X,type): if type == "ONE": return 1. if type == "EXP": len = linalgebra.norm(X) return np.exp(- (len*len) / problem.neighbors.horizon) return 1.<commit_msg>Add NORM as influence function<commit_after>
import numpy as np from ..util import linalgebra def w(problem,X,type): if type == "ONE": return 1. if type == "EXP": len = linalgebra.norm(X) return np.exp(- (len*len) / problem.neighbors.horizon / problem.neighbors.horizon) if type == "NORM": return 1. / linalgebra.norm(X) return 1.
import numpy as np from ..util import linalgebra def w(problem,X,type): if type == "ONE": return 1. if type == "EXP": len = linalgebra.norm(X) return np.exp(- (len*len) / problem.neighbors.horizon) return 1.Add NORM as influence functionimport numpy as np from ..util import linalgebra def w(problem,X,type): if type == "ONE": return 1. if type == "EXP": len = linalgebra.norm(X) return np.exp(- (len*len) / problem.neighbors.horizon / problem.neighbors.horizon) if type == "NORM": return 1. / linalgebra.norm(X) return 1.
<commit_before>import numpy as np from ..util import linalgebra def w(problem,X,type): if type == "ONE": return 1. if type == "EXP": len = linalgebra.norm(X) return np.exp(- (len*len) / problem.neighbors.horizon) return 1.<commit_msg>Add NORM as influence function<commit_after>import numpy as np from ..util import linalgebra def w(problem,X,type): if type == "ONE": return 1. if type == "EXP": len = linalgebra.norm(X) return np.exp(- (len*len) / problem.neighbors.horizon / problem.neighbors.horizon) if type == "NORM": return 1. / linalgebra.norm(X) return 1.
5e62db3e6abd19c99fb565c15cdd1527599dbd9d
tools/gyp_dart.py
tools/gyp_dart.py
#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This script is wrapper for Dart that adds some support for how GYP # is invoked by Dart beyond what can be done in the gclient hooks. import os import subprocess import sys def Execute(args): process = subprocess.Popen(args) process.wait() return process.returncode if __name__ == '__main__': args = ['python', "dart/third_party/gyp/gyp", "--depth=dart", "-Idart/tools/gyp/all.gypi", "dart/dart.gyp"] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(Execute(args))
#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Invoke gyp to generate build files for building the Dart VM. """ import os import subprocess import sys def execute(args): process = subprocess.Popen(args) process.wait() return process.returncode def main(): args = ['python', 'dart/third_party/gyp/gyp', '--depth=dart', '-Idart/tools/gyp/all.gypi', 'dart/dart.gyp'] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(execute(args)) if __name__ == '__main__': main()
Make code follow the Python style guidelines
Make code follow the Python style guidelines + Use a doc string for the whole file. + Lower case function names. + Consistently use single-quotes for quoted strings. + align wrapped elements with opening delimiter. + use a main() function Review URL: https://chromiumcodereview.appspot.com//10837127 git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@10307 260f80e4-7a28-3924-810f-c04153c831b5
Python
bsd-3-clause
dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk
#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This script is wrapper for Dart that adds some support for how GYP # is invoked by Dart beyond what can be done in the gclient hooks. import os import subprocess import sys def Execute(args): process = subprocess.Popen(args) process.wait() return process.returncode if __name__ == '__main__': args = ['python', "dart/third_party/gyp/gyp", "--depth=dart", "-Idart/tools/gyp/all.gypi", "dart/dart.gyp"] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(Execute(args)) Make code follow the Python style guidelines + Use a doc string for the whole file. + Lower case function names. + Consistently use single-quotes for quoted strings. + align wrapped elements with opening delimiter. + use a main() function Review URL: https://chromiumcodereview.appspot.com//10837127 git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@10307 260f80e4-7a28-3924-810f-c04153c831b5
#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Invoke gyp to generate build files for building the Dart VM. """ import os import subprocess import sys def execute(args): process = subprocess.Popen(args) process.wait() return process.returncode def main(): args = ['python', 'dart/third_party/gyp/gyp', '--depth=dart', '-Idart/tools/gyp/all.gypi', 'dart/dart.gyp'] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(execute(args)) if __name__ == '__main__': main()
<commit_before>#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This script is wrapper for Dart that adds some support for how GYP # is invoked by Dart beyond what can be done in the gclient hooks. import os import subprocess import sys def Execute(args): process = subprocess.Popen(args) process.wait() return process.returncode if __name__ == '__main__': args = ['python', "dart/third_party/gyp/gyp", "--depth=dart", "-Idart/tools/gyp/all.gypi", "dart/dart.gyp"] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(Execute(args)) <commit_msg>Make code follow the Python style guidelines + Use a doc string for the whole file. + Lower case function names. + Consistently use single-quotes for quoted strings. + align wrapped elements with opening delimiter. + use a main() function Review URL: https://chromiumcodereview.appspot.com//10837127 git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@10307 260f80e4-7a28-3924-810f-c04153c831b5<commit_after>
#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Invoke gyp to generate build files for building the Dart VM. """ import os import subprocess import sys def execute(args): process = subprocess.Popen(args) process.wait() return process.returncode def main(): args = ['python', 'dart/third_party/gyp/gyp', '--depth=dart', '-Idart/tools/gyp/all.gypi', 'dart/dart.gyp'] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(execute(args)) if __name__ == '__main__': main()
#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This script is wrapper for Dart that adds some support for how GYP # is invoked by Dart beyond what can be done in the gclient hooks. import os import subprocess import sys def Execute(args): process = subprocess.Popen(args) process.wait() return process.returncode if __name__ == '__main__': args = ['python', "dart/third_party/gyp/gyp", "--depth=dart", "-Idart/tools/gyp/all.gypi", "dart/dart.gyp"] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(Execute(args)) Make code follow the Python style guidelines + Use a doc string for the whole file. + Lower case function names. + Consistently use single-quotes for quoted strings. + align wrapped elements with opening delimiter. + use a main() function Review URL: https://chromiumcodereview.appspot.com//10837127 git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@10307 260f80e4-7a28-3924-810f-c04153c831b5#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Invoke gyp to generate build files for building the Dart VM. """ import os import subprocess import sys def execute(args): process = subprocess.Popen(args) process.wait() return process.returncode def main(): args = ['python', 'dart/third_party/gyp/gyp', '--depth=dart', '-Idart/tools/gyp/all.gypi', 'dart/dart.gyp'] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(execute(args)) if __name__ == '__main__': main()
<commit_before>#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This script is wrapper for Dart that adds some support for how GYP # is invoked by Dart beyond what can be done in the gclient hooks. import os import subprocess import sys def Execute(args): process = subprocess.Popen(args) process.wait() return process.returncode if __name__ == '__main__': args = ['python', "dart/third_party/gyp/gyp", "--depth=dart", "-Idart/tools/gyp/all.gypi", "dart/dart.gyp"] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(Execute(args)) <commit_msg>Make code follow the Python style guidelines + Use a doc string for the whole file. + Lower case function names. + Consistently use single-quotes for quoted strings. + align wrapped elements with opening delimiter. + use a main() function Review URL: https://chromiumcodereview.appspot.com//10837127 git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@10307 260f80e4-7a28-3924-810f-c04153c831b5<commit_after>#!/usr/bin/env python # Copyright (c) 2012 The Dart Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Invoke gyp to generate build files for building the Dart VM. """ import os import subprocess import sys def execute(args): process = subprocess.Popen(args) process.wait() return process.returncode def main(): args = ['python', 'dart/third_party/gyp/gyp', '--depth=dart', '-Idart/tools/gyp/all.gypi', 'dart/dart.gyp'] if sys.platform == 'win32': # Generate Visual Studio 2008 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2008']) sys.exit(execute(args)) if __name__ == '__main__': main()
1360df4f50417b472c51b679d64102f3b3d5ebec
property_transformation.py
property_transformation.py
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException( "Failed to find key for mapping in dict for field:%s" % (key,)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
Raise exception if unable to find a usable key in property mapping dict
Raise exception if unable to find a usable key in property mapping dict
Python
mit
OpenBounds/Processing
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results Raise exception if unable to find a usable key in property mapping dict
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException( "Failed to find key for mapping in dict for field:%s" % (key,)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
<commit_before>from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results <commit_msg>Raise exception if unable to find a usable key in property mapping dict<commit_after>
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException( "Failed to find key for mapping in dict for field:%s" % (key,)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results Raise exception if unable to find a usable key in property mapping dictfrom types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException( "Failed to find key for mapping in dict for field:%s" % (key,)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
<commit_before>from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results <commit_msg>Raise exception if unable to find a usable key in property mapping dict<commit_after>from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException( "Failed to find key for mapping in dict for field:%s" % (key,)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
05de5f3c951f334cc7a3f6dfbe780942d801e176
feincms/contrib/richtext.py
feincms/contrib/richtext.py
from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ formfield = RichTextFormField try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass
from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ def formfield(self, form_class=RichTextFormField, **kwargs): return super(RichTextField, self).formfield(form_class=form_class, **kwargs) try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass
Fix RichTextField form field generation.
Fix RichTextField form field generation. All standard properties that would affect formfield were being ignored (such as blank=True).
Python
bsd-3-clause
michaelkuty/feincms,mjl/feincms,feincms/feincms,nickburlett/feincms,mjl/feincms,matthiask/feincms2-content,matthiask/django-content-editor,pjdelport/feincms,joshuajonah/feincms,matthiask/django-content-editor,joshuajonah/feincms,joshuajonah/feincms,mjl/feincms,pjdelport/feincms,michaelkuty/feincms,nickburlett/feincms,michaelkuty/feincms,nickburlett/feincms,nickburlett/feincms,feincms/feincms,michaelkuty/feincms,matthiask/feincms2-content,matthiask/feincms2-content,joshuajonah/feincms,pjdelport/feincms,matthiask/django-content-editor,feincms/feincms,matthiask/django-content-editor
from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ formfield = RichTextFormField try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass Fix RichTextField form field generation. All standard properties that would affect formfield were being ignored (such as blank=True).
from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ def formfield(self, form_class=RichTextFormField, **kwargs): return super(RichTextField, self).formfield(form_class=form_class, **kwargs) try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass
<commit_before>from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ formfield = RichTextFormField try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass <commit_msg>Fix RichTextField form field generation. All standard properties that would affect formfield were being ignored (such as blank=True).<commit_after>
from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ def formfield(self, form_class=RichTextFormField, **kwargs): return super(RichTextField, self).formfield(form_class=form_class, **kwargs) try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass
from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ formfield = RichTextFormField try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass Fix RichTextField form field generation. All standard properties that would affect formfield were being ignored (such as blank=True).from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ def formfield(self, form_class=RichTextFormField, **kwargs): return super(RichTextField, self).formfield(form_class=form_class, **kwargs) try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass
<commit_before>from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ formfield = RichTextFormField try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass <commit_msg>Fix RichTextField form field generation. All standard properties that would affect formfield were being ignored (such as blank=True).<commit_after>from django import forms from django.db import models class RichTextFormField(forms.fields.CharField): def __init__(self, *args, **kwargs): super(RichTextFormField, self).__init__(*args, **kwargs) css_class = self.widget.attrs.get('class', '') css_class += ' item-richtext' self.widget.attrs['class'] = css_class def clean(self, value): # TODO add cleansing here? return super(RichTextFormField, self).clean(value) class RichTextField(models.TextField): """ Drop-in replacement for Django's ``models.TextField`` which allows editing rich text instead of plain text in the item editor. """ def formfield(self, form_class=RichTextFormField, **kwargs): return super(RichTextField, self).formfield(form_class=form_class, **kwargs) try: from south.modelsinspector import add_introspection_rules RichTextField_introspection_rule = ( (RichTextField,), [], {}, ) add_introspection_rules(rules=[RichTextField_introspection_rule], patterns=["^feincms\.contrib\.richtext"]) except ImportError: pass
200efbba25130b84da80720329794e4c47806573
NDIR_RasPi_Python/example.py
NDIR_RasPi_Python/example.py
import NDIR import time sensor = NDIR.Sensor(0x4D) sensor.begin() while True: sensor.measure() print("CO2 Concentration: " + str(sensor.ppm) + "ppm") time.sleep(1)
import NDIR import time sensor = NDIR.Sensor(0x4D) if sensor.begin() == False: print("Adaptor initialization FAILED!") exit() while True: if sensor.measure(): print("CO2 Concentration: " + str(sensor.ppm) + "ppm") else: print("Sensor communication ERROR.") time.sleep(1)
Make use of the return value of begin() and measure()
Make use of the return value of begin() and measure()
Python
mit
SandboxElectronics/NDIR,SandboxElectronics/NDIR,SandboxElectronics/NDIR
import NDIR import time sensor = NDIR.Sensor(0x4D) sensor.begin() while True: sensor.measure() print("CO2 Concentration: " + str(sensor.ppm) + "ppm") time.sleep(1) Make use of the return value of begin() and measure()
import NDIR import time sensor = NDIR.Sensor(0x4D) if sensor.begin() == False: print("Adaptor initialization FAILED!") exit() while True: if sensor.measure(): print("CO2 Concentration: " + str(sensor.ppm) + "ppm") else: print("Sensor communication ERROR.") time.sleep(1)
<commit_before>import NDIR import time sensor = NDIR.Sensor(0x4D) sensor.begin() while True: sensor.measure() print("CO2 Concentration: " + str(sensor.ppm) + "ppm") time.sleep(1) <commit_msg>Make use of the return value of begin() and measure()<commit_after>
import NDIR import time sensor = NDIR.Sensor(0x4D) if sensor.begin() == False: print("Adaptor initialization FAILED!") exit() while True: if sensor.measure(): print("CO2 Concentration: " + str(sensor.ppm) + "ppm") else: print("Sensor communication ERROR.") time.sleep(1)
import NDIR import time sensor = NDIR.Sensor(0x4D) sensor.begin() while True: sensor.measure() print("CO2 Concentration: " + str(sensor.ppm) + "ppm") time.sleep(1) Make use of the return value of begin() and measure()import NDIR import time sensor = NDIR.Sensor(0x4D) if sensor.begin() == False: print("Adaptor initialization FAILED!") exit() while True: if sensor.measure(): print("CO2 Concentration: " + str(sensor.ppm) + "ppm") else: print("Sensor communication ERROR.") time.sleep(1)
<commit_before>import NDIR import time sensor = NDIR.Sensor(0x4D) sensor.begin() while True: sensor.measure() print("CO2 Concentration: " + str(sensor.ppm) + "ppm") time.sleep(1) <commit_msg>Make use of the return value of begin() and measure()<commit_after>import NDIR import time sensor = NDIR.Sensor(0x4D) if sensor.begin() == False: print("Adaptor initialization FAILED!") exit() while True: if sensor.measure(): print("CO2 Concentration: " + str(sensor.ppm) + "ppm") else: print("Sensor communication ERROR.") time.sleep(1)
c75071ad2dd8c2e5efdef660f1aa33ffa28f0613
frontends/etiquette_repl.py
frontends/etiquette_repl.py
# Use with # py -i etiquette_easy.py import etiquette import os import sys P = etiquette.photodb.PhotoDB() import traceback def easytagger(): while True: i = input('> ') if i.startswith('?'): i = i.split('?')[1] or None try: etiquette.tag_export.stdout([P.get_tag(i)]) except: traceback.print_exc() else: P.easybake(i) def photag(photoid): photo = P.get_photo_by_id(photoid) print(photo.tags()) while True: photo.add_tag(input('> ')) get=P.get_tag
# Use with # py -i etiquette_easy.py import argparse import os import sys import traceback import etiquette P = etiquette.photodb.PhotoDB() def easytagger(): while True: i = input('> ') if i.startswith('?'): i = i.split('?')[1] or None try: etiquette.tag_export.stdout([P.get_tag(i)]) except: traceback.print_exc() else: P.easybake(i) def photag(photoid): photo = P.get_photo_by_id(photoid) print(photo.tags()) while True: photo.add_tag(input('> ')) get = P.get_tag def erepl_argparse(args): if args.exec_statement: exec(args.exec_statement) def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--exec', dest='exec_statement', default=None) parser.set_defaults(func=erepl_argparse) args = parser.parse_args(argv) args.func(args) if __name__ == '__main__': main(sys.argv[1:])
Clean up the erepl code a little bit.
Clean up the erepl code a little bit.
Python
bsd-3-clause
voussoir/etiquette,voussoir/etiquette,voussoir/etiquette
# Use with # py -i etiquette_easy.py import etiquette import os import sys P = etiquette.photodb.PhotoDB() import traceback def easytagger(): while True: i = input('> ') if i.startswith('?'): i = i.split('?')[1] or None try: etiquette.tag_export.stdout([P.get_tag(i)]) except: traceback.print_exc() else: P.easybake(i) def photag(photoid): photo = P.get_photo_by_id(photoid) print(photo.tags()) while True: photo.add_tag(input('> ')) get=P.get_tag Clean up the erepl code a little bit.
# Use with # py -i etiquette_easy.py import argparse import os import sys import traceback import etiquette P = etiquette.photodb.PhotoDB() def easytagger(): while True: i = input('> ') if i.startswith('?'): i = i.split('?')[1] or None try: etiquette.tag_export.stdout([P.get_tag(i)]) except: traceback.print_exc() else: P.easybake(i) def photag(photoid): photo = P.get_photo_by_id(photoid) print(photo.tags()) while True: photo.add_tag(input('> ')) get = P.get_tag def erepl_argparse(args): if args.exec_statement: exec(args.exec_statement) def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--exec', dest='exec_statement', default=None) parser.set_defaults(func=erepl_argparse) args = parser.parse_args(argv) args.func(args) if __name__ == '__main__': main(sys.argv[1:])
<commit_before># Use with # py -i etiquette_easy.py import etiquette import os import sys P = etiquette.photodb.PhotoDB() import traceback def easytagger(): while True: i = input('> ') if i.startswith('?'): i = i.split('?')[1] or None try: etiquette.tag_export.stdout([P.get_tag(i)]) except: traceback.print_exc() else: P.easybake(i) def photag(photoid): photo = P.get_photo_by_id(photoid) print(photo.tags()) while True: photo.add_tag(input('> ')) get=P.get_tag <commit_msg>Clean up the erepl code a little bit.<commit_after>
# Use with # py -i etiquette_easy.py import argparse import os import sys import traceback import etiquette P = etiquette.photodb.PhotoDB() def easytagger(): while True: i = input('> ') if i.startswith('?'): i = i.split('?')[1] or None try: etiquette.tag_export.stdout([P.get_tag(i)]) except: traceback.print_exc() else: P.easybake(i) def photag(photoid): photo = P.get_photo_by_id(photoid) print(photo.tags()) while True: photo.add_tag(input('> ')) get = P.get_tag def erepl_argparse(args): if args.exec_statement: exec(args.exec_statement) def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--exec', dest='exec_statement', default=None) parser.set_defaults(func=erepl_argparse) args = parser.parse_args(argv) args.func(args) if __name__ == '__main__': main(sys.argv[1:])
# Use with # py -i etiquette_easy.py import etiquette import os import sys P = etiquette.photodb.PhotoDB() import traceback def easytagger(): while True: i = input('> ') if i.startswith('?'): i = i.split('?')[1] or None try: etiquette.tag_export.stdout([P.get_tag(i)]) except: traceback.print_exc() else: P.easybake(i) def photag(photoid): photo = P.get_photo_by_id(photoid) print(photo.tags()) while True: photo.add_tag(input('> ')) get=P.get_tag Clean up the erepl code a little bit.# Use with # py -i etiquette_easy.py import argparse import os import sys import traceback import etiquette P = etiquette.photodb.PhotoDB() def easytagger(): while True: i = input('> ') if i.startswith('?'): i = i.split('?')[1] or None try: etiquette.tag_export.stdout([P.get_tag(i)]) except: traceback.print_exc() else: P.easybake(i) def photag(photoid): photo = P.get_photo_by_id(photoid) print(photo.tags()) while True: photo.add_tag(input('> ')) get = P.get_tag def erepl_argparse(args): if args.exec_statement: exec(args.exec_statement) def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--exec', dest='exec_statement', default=None) parser.set_defaults(func=erepl_argparse) args = parser.parse_args(argv) args.func(args) if __name__ == '__main__': main(sys.argv[1:])
<commit_before># Use with # py -i etiquette_easy.py import etiquette import os import sys P = etiquette.photodb.PhotoDB() import traceback def easytagger(): while True: i = input('> ') if i.startswith('?'): i = i.split('?')[1] or None try: etiquette.tag_export.stdout([P.get_tag(i)]) except: traceback.print_exc() else: P.easybake(i) def photag(photoid): photo = P.get_photo_by_id(photoid) print(photo.tags()) while True: photo.add_tag(input('> ')) get=P.get_tag <commit_msg>Clean up the erepl code a little bit.<commit_after># Use with # py -i etiquette_easy.py import argparse import os import sys import traceback import etiquette P = etiquette.photodb.PhotoDB() def easytagger(): while True: i = input('> ') if i.startswith('?'): i = i.split('?')[1] or None try: etiquette.tag_export.stdout([P.get_tag(i)]) except: traceback.print_exc() else: P.easybake(i) def photag(photoid): photo = P.get_photo_by_id(photoid) print(photo.tags()) while True: photo.add_tag(input('> ')) get = P.get_tag def erepl_argparse(args): if args.exec_statement: exec(args.exec_statement) def main(argv): parser = argparse.ArgumentParser() parser.add_argument('--exec', dest='exec_statement', default=None) parser.set_defaults(func=erepl_argparse) args = parser.parse_args(argv) args.func(args) if __name__ == '__main__': main(sys.argv[1:])
b69db7ff67abe185bcf7e8e7badfa868a9ec882c
script/update-frameworks.py
script/update-frameworks.py
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'http://atom-alpha.s3.amazonaws.com' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell/releases/download/v0.11.10' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
Move framework downloads to github release
Move framework downloads to github release
Python
mit
bpasero/electron,fomojola/electron,simonfork/electron,micalan/electron,neutrous/electron,stevemao/electron,jtburke/electron,medixdev/electron,Andrey-Pavlov/electron,yalexx/electron,synaptek/electron,Neron-X5/electron,stevekinney/electron,christian-bromann/electron,benweissmann/electron,mirrh/electron,leftstick/electron,shockone/electron,aecca/electron,adcentury/electron,cqqccqc/electron,brave/electron,jannishuebl/electron,deed02392/electron,christian-bromann/electron,jlhbaseball15/electron,smczk/electron,evgenyzinoviev/electron,gbn972/electron,fabien-d/electron,gamedevsam/electron,jlhbaseball15/electron,fireball-x/atom-shell,tomashanacek/electron,JussMee15/electron,destan/electron,egoist/electron,jjz/electron,Floato/electron,eric-seekas/electron,mrwizard82d1/electron,micalan/electron,brave/electron,micalan/electron,micalan/electron,digideskio/electron,eriser/electron,mjaniszew/electron,gabrielPeart/electron,benweissmann/electron,ervinb/electron,aichingm/electron,cos2004/electron,trankmichael/electron,kikong/electron,timruffles/electron,shaundunne/electron,ankitaggarwal011/electron,zhakui/electron,Jacobichou/electron,felixrieseberg/electron,lrlna/electron,eriser/electron,adcentury/electron,destan/electron,twolfson/electron,systembugtj/electron,aliib/electron,minggo/electron,davazp/electron,mattdesl/electron,jacksondc/electron,eric-seekas/electron,Ivshti/electron,preco21/electron,miniak/electron,yan-foto/electron,sircharleswatson/electron,anko/electron,greyhwndz/electron,benweissmann/electron,dongjoon-hyun/electron,fritx/electron,baiwyc119/electron,Gerhut/electron,matiasinsaurralde/electron,yan-foto/electron,tomashanacek/electron,nicholasess/electron,darwin/electron,joneit/electron,Rokt33r/electron,twolfson/electron,kazupon/electron,subblue/electron,gabriel/electron,sky7sea/electron,fffej/electron,xfstudio/electron,vipulroxx/electron,sshiting/electron,vHanda/electron,seanchas116/electron,nicholasess/electron,leolujuyi/electron,trigrass2/electron,JussMee15/electron,mrwizard82d1/electron,beni55/electron,Neron-X5/electron,webmechanicx/electron,shockone/electron,icattlecoder/electron,vaginessa/electron,simonfork/electron,saronwei/electron,leftstick/electron,minggo/electron,MaxWhere/electron,Evercoder/electron,leolujuyi/electron,chrisswk/electron,lrlna/electron,farmisen/electron,aaron-goshine/electron,mhkeller/electron,roadev/electron,Zagorakiss/electron,etiktin/electron,thingsinjars/electron,joaomoreno/atom-shell,BionicClick/electron,jannishuebl/electron,xiruibing/electron,John-Lin/electron,lrlna/electron,icattlecoder/electron,meowlab/electron,twolfson/electron,LadyNaggaga/electron,pirafrank/electron,maxogden/atom-shell,webmechanicx/electron,Jonekee/electron,brave/electron,stevemao/electron,iftekeriba/electron,cos2004/electron,gstack/infinium-shell,bitemyapp/electron,BionicClick/electron,setzer777/electron,Jonekee/electron,iftekeriba/electron,xfstudio/electron,cqqccqc/electron,evgenyzinoviev/electron,jsutcodes/electron,RobertJGabriel/electron,bobwol/electron,pandoraui/electron,xfstudio/electron,ankitaggarwal011/electron,systembugtj/electron,simongregory/electron,coderhaoxin/electron,shockone/electron,MaxGraey/electron,iftekeriba/electron,trankmichael/electron,JussMee15/electron,trankmichael/electron,fomojola/electron,pandoraui/electron,dongjoon-hyun/electron,felixrieseberg/electron,dahal/electron,DivyaKMenon/electron,SufianHassan/electron,tonyganch/electron,Gerhut/electron,Ivshti/electron,posix4e/electron,arusakov/electron,jtburke/electron,the-ress/electron,faizalpribadi/electron,pandoraui/electron,thomsonreuters/electron,hokein/atom-shell,faizalpribadi/electron,michaelchiche/electron,d-salas/electron,jacksondc/electron,leolujuyi/electron,michaelchiche/electron,faizalpribadi/electron,sky7sea/electron,bright-sparks/electron,deepak1556/atom-shell,IonicaBizauKitchen/electron,pandoraui/electron,ianscrivener/electron,seanchas116/electron,benweissmann/electron,matiasinsaurralde/electron,yalexx/electron,ervinb/electron,systembugtj/electron,medixdev/electron,Evercoder/electron,gabriel/electron,stevemao/electron,John-Lin/electron,rajatsingla28/electron,d-salas/electron,Zagorakiss/electron,wan-qy/electron,fireball-x/atom-shell,icattlecoder/electron,dahal/electron,rprichard/electron,mhkeller/electron,bruce/electron,Andrey-Pavlov/electron,darwin/electron,christian-bromann/electron,robinvandernoord/electron,MaxGraey/electron,renaesop/electron,medixdev/electron,mattdesl/electron,etiktin/electron,matiasinsaurralde/electron,bobwol/electron,deed02392/electron,saronwei/electron,natgolov/electron,jhen0409/electron,stevemao/electron,Faiz7412/electron,tincan24/electron,kikong/electron,Floato/electron,micalan/electron,jannishuebl/electron,takashi/electron,Andrey-Pavlov/electron,aliib/electron,leolujuyi/electron,simonfork/electron,vaginessa/electron,GoooIce/electron,michaelchiche/electron,takashi/electron,destan/electron,michaelchiche/electron,carsonmcdonald/electron,gamedevsam/electron,aecca/electron,SufianHassan/electron,nicobot/electron,coderhaoxin/electron,michaelchiche/electron,abhishekgahlot/electron,gamedevsam/electron,miniak/electron,pombredanne/electron,cos2004/electron,faizalpribadi/electron,adamjgray/electron,pandoraui/electron,mrwizard82d1/electron,meowlab/electron,rhencke/electron,christian-bromann/electron,IonicaBizauKitchen/electron,biblerule/UMCTelnetHub,mrwizard82d1/electron,jaanus/electron,jiaz/electron,yalexx/electron,thomsonreuters/electron,brenca/electron,rreimann/electron,tomashanacek/electron,arturts/electron,nicholasess/electron,evgenyzinoviev/electron,RIAEvangelist/electron,thompsonemerson/electron,Evercoder/electron,vHanda/electron,renaesop/electron,bwiggs/electron,JesselJohn/electron,gamedevsam/electron,vHanda/electron,wan-qy/electron,stevekinney/electron,matiasinsaurralde/electron,MaxWhere/electron,jjz/electron,rsvip/electron,darwin/electron,gabrielPeart/electron,SufianHassan/electron,gamedevsam/electron,beni55/electron,kazupon/electron,ervinb/electron,John-Lin/electron,brave/electron,fireball-x/atom-shell,pirafrank/electron,renaesop/electron,gstack/infinium-shell,eriser/electron,neutrous/electron,lzpfmh/electron,fabien-d/electron,thompsonemerson/electron,minggo/electron,aichingm/electron,eric-seekas/electron,GoooIce/electron,gerhardberger/electron,voidbridge/electron,Zagorakiss/electron,astoilkov/electron,dkfiresky/electron,preco21/electron,carsonmcdonald/electron,eric-seekas/electron,bpasero/electron,thingsinjars/electron,tylergibson/electron,fomojola/electron,noikiy/electron,icattlecoder/electron,pirafrank/electron,trigrass2/electron,Jacobichou/electron,roadev/electron,Evercoder/electron,Jacobichou/electron,deed02392/electron,mirrh/electron,JesselJohn/electron,jannishuebl/electron,farmisen/electron,davazp/electron,IonicaBizauKitchen/electron,bbondy/electron,jlhbaseball15/electron,electron/electron,Jacobichou/electron,gabrielPeart/electron,oiledCode/electron,zhakui/electron,kenmozi/electron,michaelchiche/electron,benweissmann/electron,mjaniszew/electron,simongregory/electron,chriskdon/electron,davazp/electron,maxogden/atom-shell,LadyNaggaga/electron,pombredanne/electron,carsonmcdonald/electron,meowlab/electron,Jacobichou/electron,rprichard/electron,christian-bromann/electron,electron/electron,pirafrank/electron,seanchas116/electron,trankmichael/electron,kcrt/electron,jiaz/electron,dkfiresky/electron,fabien-d/electron,chriskdon/electron,fritx/electron,mattdesl/electron,rhencke/electron,christian-bromann/electron,Gerhut/electron,howmuchcomputer/electron,JussMee15/electron,kikong/electron,jcblw/electron,tylergibson/electron,JussMee15/electron,neutrous/electron,kazupon/electron,tinydew4/electron,wolfflow/electron,tomashanacek/electron,subblue/electron,renaesop/electron,chrisswk/electron,leftstick/electron,jhen0409/electron,noikiy/electron,shennushi/electron,vipulroxx/electron,sircharleswatson/electron,oiledCode/electron,yan-foto/electron,cos2004/electron,fomojola/electron,jtburke/electron,RobertJGabriel/electron,coderhaoxin/electron,sshiting/electron,kostia/electron,saronwei/electron,biblerule/UMCTelnetHub,shiftkey/electron,RIAEvangelist/electron,bpasero/electron,dongjoon-hyun/electron,arusakov/electron,chriskdon/electron,the-ress/electron,dkfiresky/electron,webmechanicx/electron,astoilkov/electron,fireball-x/atom-shell,leethomas/electron,Floato/electron,Rokt33r/electron,joaomoreno/atom-shell,the-ress/electron,edulan/electron,fritx/electron,RobertJGabriel/electron,jiaz/electron,webmechanicx/electron,d-salas/electron,ianscrivener/electron,jcblw/electron,kazupon/electron,sky7sea/electron,leftstick/electron,the-ress/electron,RIAEvangelist/electron,faizalpribadi/electron,smczk/electron,simonfork/electron,brave/muon,tonyganch/electron,minggo/electron,shennushi/electron,jiaz/electron,leftstick/electron,DivyaKMenon/electron,jlord/electron,kostia/electron,Gerhut/electron,electron/electron,arturts/electron,rprichard/electron,timruffles/electron,lzpfmh/electron,anko/electron,thomsonreuters/electron,tomashanacek/electron,jannishuebl/electron,kostia/electron,pombredanne/electron,rsvip/electron,tinydew4/electron,aaron-goshine/electron,joneit/electron,jacksondc/electron,faizalpribadi/electron,brenca/electron,arturts/electron,zhakui/electron,MaxWhere/electron,adamjgray/electron,edulan/electron,bitemyapp/electron,brave/muon,sircharleswatson/electron,jcblw/electron,gbn972/electron,davazp/electron,astoilkov/electron,aichingm/electron,evgenyzinoviev/electron,jonatasfreitasv/electron,MaxGraey/electron,brave/muon,d-salas/electron,nagyistoce/electron-atom-shell,vipulroxx/electron,Rokt33r/electron,subblue/electron,maxogden/atom-shell,setzer777/electron,jhen0409/electron,jlhbaseball15/electron,rajatsingla28/electron,greyhwndz/electron,hokein/atom-shell,evgenyzinoviev/electron,mubassirhayat/electron,destan/electron,jiaz/electron,edulan/electron,ianscrivener/electron,kokdemo/electron,xfstudio/electron,vaginessa/electron,LadyNaggaga/electron,electron/electron,noikiy/electron,posix4e/electron,the-ress/electron,twolfson/electron,matiasinsaurralde/electron,trankmichael/electron,RobertJGabriel/electron,rsvip/electron,nicobot/electron,wolfflow/electron,aaron-goshine/electron,brenca/electron,mjaniszew/electron,pandoraui/electron,IonicaBizauKitchen/electron,saronwei/electron,jtburke/electron,dongjoon-hyun/electron,mirrh/electron,miniak/electron,kcrt/electron,roadev/electron,arturts/electron,ianscrivener/electron,synaptek/electron,neutrous/electron,kokdemo/electron,howmuchcomputer/electron,micalan/electron,bitemyapp/electron,thompsonemerson/electron,bbondy/electron,bright-sparks/electron,leethomas/electron,miniak/electron,kokdemo/electron,timruffles/electron,howmuchcomputer/electron,joaomoreno/atom-shell,sircharleswatson/electron,Faiz7412/electron,etiktin/electron,Floato/electron,kenmozi/electron,rreimann/electron,gstack/infinium-shell,nicholasess/electron,MaxWhere/electron,mrwizard82d1/electron,eriser/electron,thingsinjars/electron,shiftkey/electron,aaron-goshine/electron,thompsonemerson/electron,John-Lin/electron,tonyganch/electron,John-Lin/electron,leolujuyi/electron,bpasero/electron,vHanda/electron,jlord/electron,GoooIce/electron,medixdev/electron,gabrielPeart/electron,tinydew4/electron,yan-foto/electron,preco21/electron,jhen0409/electron,biblerule/UMCTelnetHub,LadyNaggaga/electron,jonatasfreitasv/electron,nicobot/electron,Rokt33r/electron,roadev/electron,natgolov/electron,carsonmcdonald/electron,aliib/electron,ankitaggarwal011/electron,adamjgray/electron,brave/muon,jcblw/electron,bobwol/electron,shaundunne/electron,natgolov/electron,BionicClick/electron,digideskio/electron,kcrt/electron,bruce/electron,miniak/electron,noikiy/electron,setzer777/electron,aecca/electron,Neron-X5/electron,iftekeriba/electron,robinvandernoord/electron,bruce/electron,arturts/electron,MaxWhere/electron,tincan24/electron,wan-qy/electron,shennushi/electron,gerhardberger/electron,coderhaoxin/electron,oiledCode/electron,simongregory/electron,nagyistoce/electron-atom-shell,setzer777/electron,fomojola/electron,posix4e/electron,dkfiresky/electron,simongregory/electron,yan-foto/electron,beni55/electron,chrisswk/electron,pombredanne/electron,anko/electron,mubassirhayat/electron,DivyaKMenon/electron,jjz/electron,jaanus/electron,adcentury/electron,tylergibson/electron,jsutcodes/electron,dongjoon-hyun/electron,thompsonemerson/electron,joneit/electron,deepak1556/atom-shell,mrwizard82d1/electron,tomashanacek/electron,aecca/electron,vipulroxx/electron,nagyistoce/electron-atom-shell,Gerhut/electron,sshiting/electron,Andrey-Pavlov/electron,jsutcodes/electron,synaptek/electron,astoilkov/electron,digideskio/electron,eriser/electron,trigrass2/electron,xiruibing/electron,mattotodd/electron,kenmozi/electron,nicobot/electron,yalexx/electron,shaundunne/electron,rhencke/electron,chrisswk/electron,mhkeller/electron,egoist/electron,zhakui/electron,fffej/electron,jsutcodes/electron,soulteary/electron,ervinb/electron,bbondy/electron,brenca/electron,bruce/electron,joneit/electron,Gerhut/electron,meowlab/electron,gabriel/electron,shiftkey/electron,aecca/electron,thingsinjars/electron,xiruibing/electron,rreimann/electron,yalexx/electron,Neron-X5/electron,meowlab/electron,mubassirhayat/electron,chriskdon/electron,RobertJGabriel/electron,rajatsingla28/electron,arturts/electron,maxogden/atom-shell,fritx/electron,nicobot/electron,zhakui/electron,rsvip/electron,Faiz7412/electron,abhishekgahlot/electron,Faiz7412/electron,leethomas/electron,soulteary/electron,renaesop/electron,voidbridge/electron,gerhardberger/electron,kostia/electron,leftstick/electron,tincan24/electron,jonatasfreitasv/electron,dahal/electron,d-salas/electron,rajatsingla28/electron,thomsonreuters/electron,pombredanne/electron,wan-qy/electron,benweissmann/electron,abhishekgahlot/electron,systembugtj/electron,vipulroxx/electron,mirrh/electron,tonyganch/electron,deepak1556/atom-shell,thingsinjars/electron,saronwei/electron,mattdesl/electron,lzpfmh/electron,yan-foto/electron,shockone/electron,fritx/electron,zhakui/electron,aichingm/electron,oiledCode/electron,jonatasfreitasv/electron,rajatsingla28/electron,jaanus/electron,fffej/electron,smczk/electron,bright-sparks/electron,adcentury/electron,BionicClick/electron,John-Lin/electron,rajatsingla28/electron,ervinb/electron,farmisen/electron,hokein/atom-shell,JesselJohn/electron,vipulroxx/electron,noikiy/electron,bwiggs/electron,Jonekee/electron,seanchas116/electron,tinydew4/electron,bwiggs/electron,lzpfmh/electron,trankmichael/electron,jlord/electron,abhishekgahlot/electron,biblerule/UMCTelnetHub,DivyaKMenon/electron,thompsonemerson/electron,GoooIce/electron,Zagorakiss/electron,jiaz/electron,systembugtj/electron,Neron-X5/electron,shennushi/electron,evgenyzinoviev/electron,pirafrank/electron,aaron-goshine/electron,baiwyc119/electron,jlord/electron,etiktin/electron,tylergibson/electron,stevekinney/electron,kcrt/electron,vaginessa/electron,tinydew4/electron,baiwyc119/electron,electron/electron,bbondy/electron,LadyNaggaga/electron,Zagorakiss/electron,nekuz0r/electron,sshiting/electron,voidbridge/electron,jtburke/electron,jonatasfreitasv/electron,mjaniszew/electron,dkfiresky/electron,electron/electron,bitemyapp/electron,destan/electron,Jacobichou/electron,JesselJohn/electron,synaptek/electron,aaron-goshine/electron,mattotodd/electron,trigrass2/electron,cos2004/electron,medixdev/electron,Floato/electron,dahal/electron,stevemao/electron,arusakov/electron,baiwyc119/electron,gabriel/electron,stevekinney/electron,etiktin/electron,joaomoreno/atom-shell,JesselJohn/electron,posix4e/electron,aecca/electron,fffej/electron,simongregory/electron,felixrieseberg/electron,bbondy/electron,cos2004/electron,rhencke/electron,Ivshti/electron,soulteary/electron,edulan/electron,xiruibing/electron,RIAEvangelist/electron,IonicaBizauKitchen/electron,stevemao/electron,fffej/electron,davazp/electron,ankitaggarwal011/electron,bpasero/electron,farmisen/electron,edulan/electron,deed02392/electron,takashi/electron,howmuchcomputer/electron,digideskio/electron,tincan24/electron,medixdev/electron,deed02392/electron,pombredanne/electron,jcblw/electron,rprichard/electron,howmuchcomputer/electron,kostia/electron,aichingm/electron,greyhwndz/electron,smczk/electron,deepak1556/atom-shell,takashi/electron,meowlab/electron,brenca/electron,thomsonreuters/electron,adcentury/electron,bwiggs/electron,ankitaggarwal011/electron,kokdemo/electron,sky7sea/electron,aichingm/electron,wolfflow/electron,JussMee15/electron,anko/electron,coderhaoxin/electron,natgolov/electron,joaomoreno/atom-shell,gbn972/electron,Neron-X5/electron,jaanus/electron,nicholasess/electron,timruffles/electron,simonfork/electron,lrlna/electron,bwiggs/electron,jsutcodes/electron,nekuz0r/electron,shockone/electron,jcblw/electron,digideskio/electron,dahal/electron,mattdesl/electron,jacksondc/electron,rhencke/electron,shennushi/electron,the-ress/electron,oiledCode/electron,subblue/electron,shiftkey/electron,preco21/electron,rreimann/electron,d-salas/electron,miniak/electron,bpasero/electron,cqqccqc/electron,xfstudio/electron,mhkeller/electron,trigrass2/electron,Evercoder/electron,systembugtj/electron,gstack/infinium-shell,felixrieseberg/electron,electron/electron,kostia/electron,fabien-d/electron,MaxGraey/electron,neutrous/electron,nicobot/electron,rsvip/electron,Ivshti/electron,tylergibson/electron,beni55/electron,xiruibing/electron,simonfork/electron,shiftkey/electron,kenmozi/electron,mhkeller/electron,aliib/electron,astoilkov/electron,fffej/electron,jacksondc/electron,natgolov/electron,vHanda/electron,kokdemo/electron,gabriel/electron,minggo/electron,tincan24/electron,farmisen/electron,kikong/electron,dkfiresky/electron,leethomas/electron,xfstudio/electron,greyhwndz/electron,joaomoreno/atom-shell,bwiggs/electron,SufianHassan/electron,kcrt/electron,Evercoder/electron,lzpfmh/electron,baiwyc119/electron,bpasero/electron,gerhardberger/electron,seanchas116/electron,subblue/electron,biblerule/UMCTelnetHub,wolfflow/electron,bright-sparks/electron,bbondy/electron,jhen0409/electron,jacksondc/electron,beni55/electron,joneit/electron,SufianHassan/electron,adcentury/electron,ianscrivener/electron,renaesop/electron,carsonmcdonald/electron,jlord/electron,felixrieseberg/electron,DivyaKMenon/electron,eric-seekas/electron,setzer777/electron,tinydew4/electron,SufianHassan/electron,vaginessa/electron,Ivshti/electron,dahal/electron,GoooIce/electron,chriskdon/electron,fritx/electron,roadev/electron,arusakov/electron,gamedevsam/electron,aliib/electron,davazp/electron,egoist/electron,synaptek/electron,vaginessa/electron,anko/electron,tylergibson/electron,shiftkey/electron,Andrey-Pavlov/electron,smczk/electron,astoilkov/electron,egoist/electron,lrlna/electron,Jonekee/electron,farmisen/electron,mattotodd/electron,howmuchcomputer/electron,egoist/electron,posix4e/electron,edulan/electron,fireball-x/atom-shell,greyhwndz/electron,destan/electron,jjz/electron,darwin/electron,robinvandernoord/electron,webmechanicx/electron,etiktin/electron,yalexx/electron,seanchas116/electron,shaundunne/electron,jsutcodes/electron,saronwei/electron,JesselJohn/electron,gabrielPeart/electron,abhishekgahlot/electron,bright-sparks/electron,twolfson/electron,mirrh/electron,Andrey-Pavlov/electron,Floato/electron,iftekeriba/electron,digideskio/electron,thomsonreuters/electron,nekuz0r/electron,tonyganch/electron,leolujuyi/electron,kenmozi/electron,brave/muon,Jonekee/electron,ankitaggarwal011/electron,beni55/electron,voidbridge/electron,ervinb/electron,jannishuebl/electron,BionicClick/electron,matiasinsaurralde/electron,voidbridge/electron,kikong/electron,stevekinney/electron,preco21/electron,shennushi/electron,cqqccqc/electron,baiwyc119/electron,kokdemo/electron,mirrh/electron,soulteary/electron,soulteary/electron,chrisswk/electron,RIAEvangelist/electron,carsonmcdonald/electron,Zagorakiss/electron,Faiz7412/electron,jjz/electron,jhen0409/electron,mattotodd/electron,robinvandernoord/electron,maxogden/atom-shell,lrlna/electron,IonicaBizauKitchen/electron,jaanus/electron,nekuz0r/electron,mubassirhayat/electron,robinvandernoord/electron,wan-qy/electron,cqqccqc/electron,gabriel/electron,mhkeller/electron,soulteary/electron,subblue/electron,gbn972/electron,icattlecoder/electron,smczk/electron,posix4e/electron,oiledCode/electron,synaptek/electron,preco21/electron,deepak1556/atom-shell,sky7sea/electron,bitemyapp/electron,Rokt33r/electron,brenca/electron,gerhardberger/electron,nagyistoce/electron-atom-shell,jonatasfreitasv/electron,thingsinjars/electron,joneit/electron,trigrass2/electron,brave/electron,bobwol/electron,arusakov/electron,hokein/atom-shell,takashi/electron,nagyistoce/electron-atom-shell,gstack/infinium-shell,kenmozi/electron,gabrielPeart/electron,leethomas/electron,wolfflow/electron,brave/electron,lzpfmh/electron,mubassirhayat/electron,DivyaKMenon/electron,mattotodd/electron,RIAEvangelist/electron,timruffles/electron,jlhbaseball15/electron,bruce/electron,RobertJGabriel/electron,gerhardberger/electron,setzer777/electron,biblerule/UMCTelnetHub,wan-qy/electron,sky7sea/electron,GoooIce/electron,gbn972/electron,jjz/electron,bitemyapp/electron,eric-seekas/electron,kazupon/electron,mattdesl/electron,roadev/electron,hokein/atom-shell,jaanus/electron,sshiting/electron,LadyNaggaga/electron,Jonekee/electron,deed02392/electron,pirafrank/electron,kazupon/electron,takashi/electron,coderhaoxin/electron,icattlecoder/electron,mattotodd/electron,Rokt33r/electron,jtburke/electron,arusakov/electron,cqqccqc/electron,wolfflow/electron,sircharleswatson/electron,bobwol/electron,rhencke/electron,brave/muon,nekuz0r/electron,simongregory/electron,darwin/electron,adamjgray/electron,mjaniszew/electron,gerhardberger/electron,stevekinney/electron,abhishekgahlot/electron,bruce/electron,shaundunne/electron,rreimann/electron,natgolov/electron,fomojola/electron,xiruibing/electron,dongjoon-hyun/electron,aliib/electron,shaundunne/electron,chriskdon/electron,rreimann/electron,nekuz0r/electron,MaxGraey/electron,kcrt/electron,BionicClick/electron,sshiting/electron,eriser/electron,nicholasess/electron,tincan24/electron,voidbridge/electron,neutrous/electron,sircharleswatson/electron,MaxWhere/electron,felixrieseberg/electron,adamjgray/electron,minggo/electron,twolfson/electron,adamjgray/electron,anko/electron,shockone/electron,gbn972/electron,robinvandernoord/electron,the-ress/electron,fabien-d/electron,greyhwndz/electron,iftekeriba/electron,egoist/electron,leethomas/electron,jlhbaseball15/electron,vHanda/electron,mjaniszew/electron,tonyganch/electron,bright-sparks/electron,bobwol/electron,webmechanicx/electron,ianscrivener/electron,noikiy/electron
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'http://atom-alpha.s3.amazonaws.com' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main()) Move framework downloads to github release
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell/releases/download/v0.11.10' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
<commit_before>#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'http://atom-alpha.s3.amazonaws.com' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main()) <commit_msg>Move framework downloads to github release<commit_after>
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell/releases/download/v0.11.10' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'http://atom-alpha.s3.amazonaws.com' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main()) Move framework downloads to github release#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell/releases/download/v0.11.10' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
<commit_before>#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'http://atom-alpha.s3.amazonaws.com' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main()) <commit_msg>Move framework downloads to github release<commit_after>#!/usr/bin/env python import sys import os from lib.util import safe_mkdir, extract_zip, tempdir, download SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) FRAMEWORKS_URL = 'https://github.com/atom/atom-shell/releases/download/v0.11.10' def main(): os.chdir(SOURCE_ROOT) safe_mkdir('frameworks') download_and_unzip('Mantle') download_and_unzip('ReactiveCocoa') download_and_unzip('Squirrel') def download_and_unzip(framework): zip_path = download_framework(framework) if zip_path: extract_zip(zip_path, 'frameworks') def download_framework(framework): framework_path = os.path.join('frameworks', framework) + '.framework' if os.path.exists(framework_path): return filename = framework + '.framework.zip' url = FRAMEWORKS_URL + '/' + filename download_dir = tempdir(prefix='atom-shell-') path = os.path.join(download_dir, filename) download('Download ' + framework, url, path) return path if __name__ == '__main__': sys.exit(main())
04110d34b5f385103a77e0a1459e984d8210fa92
updates/models.py
updates/models.py
from django.db import models from cms.models import CMSPlugin from django.utils.translation import ugettext_lazy as _ class Update(models.Model): """ Defines a date on which updates were made. """ date = models.DateField(_('Update Date')) def __str__(self): return str(self.date) class Meta: verbose_name = 'update' verbose_name_plural = 'updates' app_label = 'updates' class UpdateItem(models.Model): """ Defines one or many items that were updated on a single date. """ update = models.ForeignKey(Update) item = models.CharField(_('Update Item'), max_length=256) description = models.TextField( _('Item Description (Optional)'), help_text="This field is optional and should only be used when the description is too long for the sidebar plugin.", null=True, blank=True ) def __str__(self): return self.item class Meta: verbose_name = 'updateitem' verbose_name_plural = 'updateitems' app_label = 'updates' class UpdatesPlugin(CMSPlugin): NUMBER_CHOICES = ( (5, 5), (10, 10), (25, 25), (100, 100), ) number_to_show = models.IntegerField( max_length=10, choices=NUMBER_CHOICES)
from django.db import models from cms.models import CMSPlugin from django.utils.translation import ugettext_lazy as _ class Update(models.Model): """ Defines a date on which updates were made. """ date = models.DateField(_('Update Date')) def __str__(self): return str(self.date) class Meta: verbose_name = 'update' verbose_name_plural = 'updates' app_label = 'updates' class UpdateItem(models.Model): """ Defines one or many items that were updated on a single date. """ update = models.ForeignKey(Update) item = models.CharField(_('Update Item'), max_length=256) description = models.TextField( _('Item Description (Optional)'), help_text="This field is optional and should only be used when the description is too long for the sidebar plugin.", null=True, blank=True ) def __str__(self): return self.item class Meta: verbose_name = 'updateitem' verbose_name_plural = 'updateitems' app_label = 'updates' class UpdatesPlugin(CMSPlugin): number_to_show = models.IntegerField( max_length=10,)
Remove choices for any number of updates.
Remove choices for any number of updates.
Python
bsd-3-clause
theherk/django-theherk-updates
from django.db import models from cms.models import CMSPlugin from django.utils.translation import ugettext_lazy as _ class Update(models.Model): """ Defines a date on which updates were made. """ date = models.DateField(_('Update Date')) def __str__(self): return str(self.date) class Meta: verbose_name = 'update' verbose_name_plural = 'updates' app_label = 'updates' class UpdateItem(models.Model): """ Defines one or many items that were updated on a single date. """ update = models.ForeignKey(Update) item = models.CharField(_('Update Item'), max_length=256) description = models.TextField( _('Item Description (Optional)'), help_text="This field is optional and should only be used when the description is too long for the sidebar plugin.", null=True, blank=True ) def __str__(self): return self.item class Meta: verbose_name = 'updateitem' verbose_name_plural = 'updateitems' app_label = 'updates' class UpdatesPlugin(CMSPlugin): NUMBER_CHOICES = ( (5, 5), (10, 10), (25, 25), (100, 100), ) number_to_show = models.IntegerField( max_length=10, choices=NUMBER_CHOICES) Remove choices for any number of updates.
from django.db import models from cms.models import CMSPlugin from django.utils.translation import ugettext_lazy as _ class Update(models.Model): """ Defines a date on which updates were made. """ date = models.DateField(_('Update Date')) def __str__(self): return str(self.date) class Meta: verbose_name = 'update' verbose_name_plural = 'updates' app_label = 'updates' class UpdateItem(models.Model): """ Defines one or many items that were updated on a single date. """ update = models.ForeignKey(Update) item = models.CharField(_('Update Item'), max_length=256) description = models.TextField( _('Item Description (Optional)'), help_text="This field is optional and should only be used when the description is too long for the sidebar plugin.", null=True, blank=True ) def __str__(self): return self.item class Meta: verbose_name = 'updateitem' verbose_name_plural = 'updateitems' app_label = 'updates' class UpdatesPlugin(CMSPlugin): number_to_show = models.IntegerField( max_length=10,)
<commit_before>from django.db import models from cms.models import CMSPlugin from django.utils.translation import ugettext_lazy as _ class Update(models.Model): """ Defines a date on which updates were made. """ date = models.DateField(_('Update Date')) def __str__(self): return str(self.date) class Meta: verbose_name = 'update' verbose_name_plural = 'updates' app_label = 'updates' class UpdateItem(models.Model): """ Defines one or many items that were updated on a single date. """ update = models.ForeignKey(Update) item = models.CharField(_('Update Item'), max_length=256) description = models.TextField( _('Item Description (Optional)'), help_text="This field is optional and should only be used when the description is too long for the sidebar plugin.", null=True, blank=True ) def __str__(self): return self.item class Meta: verbose_name = 'updateitem' verbose_name_plural = 'updateitems' app_label = 'updates' class UpdatesPlugin(CMSPlugin): NUMBER_CHOICES = ( (5, 5), (10, 10), (25, 25), (100, 100), ) number_to_show = models.IntegerField( max_length=10, choices=NUMBER_CHOICES) <commit_msg>Remove choices for any number of updates.<commit_after>
from django.db import models from cms.models import CMSPlugin from django.utils.translation import ugettext_lazy as _ class Update(models.Model): """ Defines a date on which updates were made. """ date = models.DateField(_('Update Date')) def __str__(self): return str(self.date) class Meta: verbose_name = 'update' verbose_name_plural = 'updates' app_label = 'updates' class UpdateItem(models.Model): """ Defines one or many items that were updated on a single date. """ update = models.ForeignKey(Update) item = models.CharField(_('Update Item'), max_length=256) description = models.TextField( _('Item Description (Optional)'), help_text="This field is optional and should only be used when the description is too long for the sidebar plugin.", null=True, blank=True ) def __str__(self): return self.item class Meta: verbose_name = 'updateitem' verbose_name_plural = 'updateitems' app_label = 'updates' class UpdatesPlugin(CMSPlugin): number_to_show = models.IntegerField( max_length=10,)
from django.db import models from cms.models import CMSPlugin from django.utils.translation import ugettext_lazy as _ class Update(models.Model): """ Defines a date on which updates were made. """ date = models.DateField(_('Update Date')) def __str__(self): return str(self.date) class Meta: verbose_name = 'update' verbose_name_plural = 'updates' app_label = 'updates' class UpdateItem(models.Model): """ Defines one or many items that were updated on a single date. """ update = models.ForeignKey(Update) item = models.CharField(_('Update Item'), max_length=256) description = models.TextField( _('Item Description (Optional)'), help_text="This field is optional and should only be used when the description is too long for the sidebar plugin.", null=True, blank=True ) def __str__(self): return self.item class Meta: verbose_name = 'updateitem' verbose_name_plural = 'updateitems' app_label = 'updates' class UpdatesPlugin(CMSPlugin): NUMBER_CHOICES = ( (5, 5), (10, 10), (25, 25), (100, 100), ) number_to_show = models.IntegerField( max_length=10, choices=NUMBER_CHOICES) Remove choices for any number of updates.from django.db import models from cms.models import CMSPlugin from django.utils.translation import ugettext_lazy as _ class Update(models.Model): """ Defines a date on which updates were made. """ date = models.DateField(_('Update Date')) def __str__(self): return str(self.date) class Meta: verbose_name = 'update' verbose_name_plural = 'updates' app_label = 'updates' class UpdateItem(models.Model): """ Defines one or many items that were updated on a single date. """ update = models.ForeignKey(Update) item = models.CharField(_('Update Item'), max_length=256) description = models.TextField( _('Item Description (Optional)'), help_text="This field is optional and should only be used when the description is too long for the sidebar plugin.", null=True, blank=True ) def __str__(self): return self.item class Meta: verbose_name = 'updateitem' verbose_name_plural = 'updateitems' app_label = 'updates' class UpdatesPlugin(CMSPlugin): number_to_show = models.IntegerField( max_length=10,)
<commit_before>from django.db import models from cms.models import CMSPlugin from django.utils.translation import ugettext_lazy as _ class Update(models.Model): """ Defines a date on which updates were made. """ date = models.DateField(_('Update Date')) def __str__(self): return str(self.date) class Meta: verbose_name = 'update' verbose_name_plural = 'updates' app_label = 'updates' class UpdateItem(models.Model): """ Defines one or many items that were updated on a single date. """ update = models.ForeignKey(Update) item = models.CharField(_('Update Item'), max_length=256) description = models.TextField( _('Item Description (Optional)'), help_text="This field is optional and should only be used when the description is too long for the sidebar plugin.", null=True, blank=True ) def __str__(self): return self.item class Meta: verbose_name = 'updateitem' verbose_name_plural = 'updateitems' app_label = 'updates' class UpdatesPlugin(CMSPlugin): NUMBER_CHOICES = ( (5, 5), (10, 10), (25, 25), (100, 100), ) number_to_show = models.IntegerField( max_length=10, choices=NUMBER_CHOICES) <commit_msg>Remove choices for any number of updates.<commit_after>from django.db import models from cms.models import CMSPlugin from django.utils.translation import ugettext_lazy as _ class Update(models.Model): """ Defines a date on which updates were made. """ date = models.DateField(_('Update Date')) def __str__(self): return str(self.date) class Meta: verbose_name = 'update' verbose_name_plural = 'updates' app_label = 'updates' class UpdateItem(models.Model): """ Defines one or many items that were updated on a single date. """ update = models.ForeignKey(Update) item = models.CharField(_('Update Item'), max_length=256) description = models.TextField( _('Item Description (Optional)'), help_text="This field is optional and should only be used when the description is too long for the sidebar plugin.", null=True, blank=True ) def __str__(self): return self.item class Meta: verbose_name = 'updateitem' verbose_name_plural = 'updateitems' app_label = 'updates' class UpdatesPlugin(CMSPlugin): number_to_show = models.IntegerField( max_length=10,)
4efd5de76f9f192ab9ceb73254e500c47c46090a
django_git/management/commands/git_pull_utils/multiple_repo_updater.py
django_git/management/commands/git_pull_utils/multiple_repo_updater.py
import os import traceback from django.utils import timezone from django_git.management.commands.git_pull_utils.git_synchronizer import GitSynchronizer def no_action(msg): pass try: from iconizer.gui_client.notification_service_client import NotificationServiceClient notification_method = NotificationServiceClient().notify except: notification_method = no_action def pull_all_in_enumerable(enum_method): for repo in enum_method(): if os.path.exists(repo.full_path): p = GitSynchronizer(repo.full_path, NotificationServiceClient().notify) success = False try: p.pull_all_branches() print "pull and push done", p.sync_msg success = True except: traceback.print_exc() print "Pull error for: %s" % repo.full_path repo.last_checked = timezone.now() repo.is_last_pull_success = success repo.save()
import os import traceback from django.utils import timezone from django_git.management.commands.git_pull_utils.git_synchronizer import GitSynchronizer def no_action(msg): pass try: from iconizer.gui_client.notification_service_client import NotificationServiceClient notification_method = NotificationServiceClient().notify except: notification_method = no_action def pull_all_in_enumerable(enum_method): for repo in enum_method(): if os.path.exists(repo.full_path): p = GitSynchronizer(repo.full_path, notification_method()) success = False try: p.pull_all_branches() print "pull and push done", p.sync_msg success = True except: traceback.print_exc() print "Pull error for: %s" % repo.full_path repo.last_checked = timezone.now() repo.is_last_pull_success = success repo.save()
Fix no notification service client issue.
Fix no notification service client issue.
Python
bsd-3-clause
weijia/django-git,weijia/django-git
import os import traceback from django.utils import timezone from django_git.management.commands.git_pull_utils.git_synchronizer import GitSynchronizer def no_action(msg): pass try: from iconizer.gui_client.notification_service_client import NotificationServiceClient notification_method = NotificationServiceClient().notify except: notification_method = no_action def pull_all_in_enumerable(enum_method): for repo in enum_method(): if os.path.exists(repo.full_path): p = GitSynchronizer(repo.full_path, NotificationServiceClient().notify) success = False try: p.pull_all_branches() print "pull and push done", p.sync_msg success = True except: traceback.print_exc() print "Pull error for: %s" % repo.full_path repo.last_checked = timezone.now() repo.is_last_pull_success = success repo.save() Fix no notification service client issue.
import os import traceback from django.utils import timezone from django_git.management.commands.git_pull_utils.git_synchronizer import GitSynchronizer def no_action(msg): pass try: from iconizer.gui_client.notification_service_client import NotificationServiceClient notification_method = NotificationServiceClient().notify except: notification_method = no_action def pull_all_in_enumerable(enum_method): for repo in enum_method(): if os.path.exists(repo.full_path): p = GitSynchronizer(repo.full_path, notification_method()) success = False try: p.pull_all_branches() print "pull and push done", p.sync_msg success = True except: traceback.print_exc() print "Pull error for: %s" % repo.full_path repo.last_checked = timezone.now() repo.is_last_pull_success = success repo.save()
<commit_before>import os import traceback from django.utils import timezone from django_git.management.commands.git_pull_utils.git_synchronizer import GitSynchronizer def no_action(msg): pass try: from iconizer.gui_client.notification_service_client import NotificationServiceClient notification_method = NotificationServiceClient().notify except: notification_method = no_action def pull_all_in_enumerable(enum_method): for repo in enum_method(): if os.path.exists(repo.full_path): p = GitSynchronizer(repo.full_path, NotificationServiceClient().notify) success = False try: p.pull_all_branches() print "pull and push done", p.sync_msg success = True except: traceback.print_exc() print "Pull error for: %s" % repo.full_path repo.last_checked = timezone.now() repo.is_last_pull_success = success repo.save() <commit_msg>Fix no notification service client issue.<commit_after>
import os import traceback from django.utils import timezone from django_git.management.commands.git_pull_utils.git_synchronizer import GitSynchronizer def no_action(msg): pass try: from iconizer.gui_client.notification_service_client import NotificationServiceClient notification_method = NotificationServiceClient().notify except: notification_method = no_action def pull_all_in_enumerable(enum_method): for repo in enum_method(): if os.path.exists(repo.full_path): p = GitSynchronizer(repo.full_path, notification_method()) success = False try: p.pull_all_branches() print "pull and push done", p.sync_msg success = True except: traceback.print_exc() print "Pull error for: %s" % repo.full_path repo.last_checked = timezone.now() repo.is_last_pull_success = success repo.save()
import os import traceback from django.utils import timezone from django_git.management.commands.git_pull_utils.git_synchronizer import GitSynchronizer def no_action(msg): pass try: from iconizer.gui_client.notification_service_client import NotificationServiceClient notification_method = NotificationServiceClient().notify except: notification_method = no_action def pull_all_in_enumerable(enum_method): for repo in enum_method(): if os.path.exists(repo.full_path): p = GitSynchronizer(repo.full_path, NotificationServiceClient().notify) success = False try: p.pull_all_branches() print "pull and push done", p.sync_msg success = True except: traceback.print_exc() print "Pull error for: %s" % repo.full_path repo.last_checked = timezone.now() repo.is_last_pull_success = success repo.save() Fix no notification service client issue.import os import traceback from django.utils import timezone from django_git.management.commands.git_pull_utils.git_synchronizer import GitSynchronizer def no_action(msg): pass try: from iconizer.gui_client.notification_service_client import NotificationServiceClient notification_method = NotificationServiceClient().notify except: notification_method = no_action def pull_all_in_enumerable(enum_method): for repo in enum_method(): if os.path.exists(repo.full_path): p = GitSynchronizer(repo.full_path, notification_method()) success = False try: p.pull_all_branches() print "pull and push done", p.sync_msg success = True except: traceback.print_exc() print "Pull error for: %s" % repo.full_path repo.last_checked = timezone.now() repo.is_last_pull_success = success repo.save()
<commit_before>import os import traceback from django.utils import timezone from django_git.management.commands.git_pull_utils.git_synchronizer import GitSynchronizer def no_action(msg): pass try: from iconizer.gui_client.notification_service_client import NotificationServiceClient notification_method = NotificationServiceClient().notify except: notification_method = no_action def pull_all_in_enumerable(enum_method): for repo in enum_method(): if os.path.exists(repo.full_path): p = GitSynchronizer(repo.full_path, NotificationServiceClient().notify) success = False try: p.pull_all_branches() print "pull and push done", p.sync_msg success = True except: traceback.print_exc() print "Pull error for: %s" % repo.full_path repo.last_checked = timezone.now() repo.is_last_pull_success = success repo.save() <commit_msg>Fix no notification service client issue.<commit_after>import os import traceback from django.utils import timezone from django_git.management.commands.git_pull_utils.git_synchronizer import GitSynchronizer def no_action(msg): pass try: from iconizer.gui_client.notification_service_client import NotificationServiceClient notification_method = NotificationServiceClient().notify except: notification_method = no_action def pull_all_in_enumerable(enum_method): for repo in enum_method(): if os.path.exists(repo.full_path): p = GitSynchronizer(repo.full_path, notification_method()) success = False try: p.pull_all_branches() print "pull and push done", p.sync_msg success = True except: traceback.print_exc() print "Pull error for: %s" % repo.full_path repo.last_checked = timezone.now() repo.is_last_pull_success = success repo.save()
3d037ed7142ed7b1c7382eded4de6443050543ee
vimiv/__init__.py
vimiv/__init__.py
#!/usr/bin/env python3 # encoding: utf-8 try: import argparse import configparser import mimetypes import os import re import shutil import signal import sys from gi import require_version require_version('Gtk', '3.0') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, Pango from random import shuffle from subprocess import Popen, PIPE from threading import Thread from PIL import Image, ImageEnhance except ImportError as import_error: print(import_error) print("Are all dependencies installed?") sys.exit(1) from vimiv.main import main
#!/usr/bin/env python3 # encoding: utf-8 try: from gi import require_version require_version('Gtk', '3.0') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, Pango from PIL import Image, ImageEnhance except ImportError as import_error: print(import_error) print("Are all dependencies installed?") sys.exit(1) from vimiv.main import main
Remove standard imports from check in init
Remove standard imports from check in init
Python
mit
karlch/vimiv,karlch/vimiv,karlch/vimiv
#!/usr/bin/env python3 # encoding: utf-8 try: import argparse import configparser import mimetypes import os import re import shutil import signal import sys from gi import require_version require_version('Gtk', '3.0') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, Pango from random import shuffle from subprocess import Popen, PIPE from threading import Thread from PIL import Image, ImageEnhance except ImportError as import_error: print(import_error) print("Are all dependencies installed?") sys.exit(1) from vimiv.main import main Remove standard imports from check in init
#!/usr/bin/env python3 # encoding: utf-8 try: from gi import require_version require_version('Gtk', '3.0') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, Pango from PIL import Image, ImageEnhance except ImportError as import_error: print(import_error) print("Are all dependencies installed?") sys.exit(1) from vimiv.main import main
<commit_before>#!/usr/bin/env python3 # encoding: utf-8 try: import argparse import configparser import mimetypes import os import re import shutil import signal import sys from gi import require_version require_version('Gtk', '3.0') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, Pango from random import shuffle from subprocess import Popen, PIPE from threading import Thread from PIL import Image, ImageEnhance except ImportError as import_error: print(import_error) print("Are all dependencies installed?") sys.exit(1) from vimiv.main import main <commit_msg>Remove standard imports from check in init<commit_after>
#!/usr/bin/env python3 # encoding: utf-8 try: from gi import require_version require_version('Gtk', '3.0') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, Pango from PIL import Image, ImageEnhance except ImportError as import_error: print(import_error) print("Are all dependencies installed?") sys.exit(1) from vimiv.main import main
#!/usr/bin/env python3 # encoding: utf-8 try: import argparse import configparser import mimetypes import os import re import shutil import signal import sys from gi import require_version require_version('Gtk', '3.0') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, Pango from random import shuffle from subprocess import Popen, PIPE from threading import Thread from PIL import Image, ImageEnhance except ImportError as import_error: print(import_error) print("Are all dependencies installed?") sys.exit(1) from vimiv.main import main Remove standard imports from check in init#!/usr/bin/env python3 # encoding: utf-8 try: from gi import require_version require_version('Gtk', '3.0') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, Pango from PIL import Image, ImageEnhance except ImportError as import_error: print(import_error) print("Are all dependencies installed?") sys.exit(1) from vimiv.main import main
<commit_before>#!/usr/bin/env python3 # encoding: utf-8 try: import argparse import configparser import mimetypes import os import re import shutil import signal import sys from gi import require_version require_version('Gtk', '3.0') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, Pango from random import shuffle from subprocess import Popen, PIPE from threading import Thread from PIL import Image, ImageEnhance except ImportError as import_error: print(import_error) print("Are all dependencies installed?") sys.exit(1) from vimiv.main import main <commit_msg>Remove standard imports from check in init<commit_after>#!/usr/bin/env python3 # encoding: utf-8 try: from gi import require_version require_version('Gtk', '3.0') from gi.repository import GLib, Gtk, Gdk, GdkPixbuf, Pango from PIL import Image, ImageEnhance except ImportError as import_error: print(import_error) print("Are all dependencies installed?") sys.exit(1) from vimiv.main import main
fee6f9753b1b5209f605b6dd329ac5af00f87174
Lib/__init__.py
Lib/__init__.py
"""\ SciPy --- A scientific computing package for Python =================================================== You can support the development of SciPy by purchasing documentation at http://www.trelgol.com It is being distributed for a fee for a limited time to try and raise money for development. Documentation is also available in the docstrings. Available subpackages --------------------- """ try: import pkg_resources as _pr # activate namespace packages (manipulates __path__) del _pr except ImportError: pass from numpy import show_config as show_numpy_config if show_numpy_config is None: raise ImportError,"Cannot import scipy when running from numpy source directory." from numpy import __version__ as __numpy_version__ from __config__ import show as show_config from version import version as __version__ import numpy._import_tools as _ni pkgload = _ni.PackageLoader() del _ni import os as _os SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0')) pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True) del _os from numpy.testing import ScipyTest test = ScipyTest('scipy').test __all__.append('test') __doc__ += pkgload.get_pkgdocs()
"""\ SciPy --- A scientific computing package for Python =================================================== You can support the development of SciPy by purchasing documentation at http://www.trelgol.com It is being distributed for a fee for a limited time to try and raise money for development. Documentation is also available in the docstrings. Available subpackages --------------------- """ try: import pkg_resources as _pr # activate namespace packages (manipulates __path__) del _pr except ImportError: pass from numpy import show_config as show_numpy_config if show_numpy_config is None: raise ImportError,"Cannot import scipy when running from numpy source directory." from numpy import __version__ as __numpy_version__ from __config__ import show as show_config from version import version as __version__ import numpy._import_tools as _ni pkgload = _ni.PackageLoader() del _ni import os as _os SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0')) pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True) del _os from numpy.testing import ScipyTest test = ScipyTest('scipy').test __all__.append('test') import numpy as _num from numpy import * __all__ += _num.__all__ del _num __doc__ += pkgload.get_pkgdocs()
Put numpy namespace in scipy for backward compatibility...
Put numpy namespace in scipy for backward compatibility... git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1530 d6536bca-fef9-0310-8506-e4c0a848fbcf
Python
bsd-3-clause
scipy/scipy-svn,jasonmccampbell/scipy-refactor,scipy/scipy-svn,scipy/scipy-svn,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,scipy/scipy-svn
"""\ SciPy --- A scientific computing package for Python =================================================== You can support the development of SciPy by purchasing documentation at http://www.trelgol.com It is being distributed for a fee for a limited time to try and raise money for development. Documentation is also available in the docstrings. Available subpackages --------------------- """ try: import pkg_resources as _pr # activate namespace packages (manipulates __path__) del _pr except ImportError: pass from numpy import show_config as show_numpy_config if show_numpy_config is None: raise ImportError,"Cannot import scipy when running from numpy source directory." from numpy import __version__ as __numpy_version__ from __config__ import show as show_config from version import version as __version__ import numpy._import_tools as _ni pkgload = _ni.PackageLoader() del _ni import os as _os SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0')) pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True) del _os from numpy.testing import ScipyTest test = ScipyTest('scipy').test __all__.append('test') __doc__ += pkgload.get_pkgdocs() Put numpy namespace in scipy for backward compatibility... git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1530 d6536bca-fef9-0310-8506-e4c0a848fbcf
"""\ SciPy --- A scientific computing package for Python =================================================== You can support the development of SciPy by purchasing documentation at http://www.trelgol.com It is being distributed for a fee for a limited time to try and raise money for development. Documentation is also available in the docstrings. Available subpackages --------------------- """ try: import pkg_resources as _pr # activate namespace packages (manipulates __path__) del _pr except ImportError: pass from numpy import show_config as show_numpy_config if show_numpy_config is None: raise ImportError,"Cannot import scipy when running from numpy source directory." from numpy import __version__ as __numpy_version__ from __config__ import show as show_config from version import version as __version__ import numpy._import_tools as _ni pkgload = _ni.PackageLoader() del _ni import os as _os SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0')) pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True) del _os from numpy.testing import ScipyTest test = ScipyTest('scipy').test __all__.append('test') import numpy as _num from numpy import * __all__ += _num.__all__ del _num __doc__ += pkgload.get_pkgdocs()
<commit_before>"""\ SciPy --- A scientific computing package for Python =================================================== You can support the development of SciPy by purchasing documentation at http://www.trelgol.com It is being distributed for a fee for a limited time to try and raise money for development. Documentation is also available in the docstrings. Available subpackages --------------------- """ try: import pkg_resources as _pr # activate namespace packages (manipulates __path__) del _pr except ImportError: pass from numpy import show_config as show_numpy_config if show_numpy_config is None: raise ImportError,"Cannot import scipy when running from numpy source directory." from numpy import __version__ as __numpy_version__ from __config__ import show as show_config from version import version as __version__ import numpy._import_tools as _ni pkgload = _ni.PackageLoader() del _ni import os as _os SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0')) pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True) del _os from numpy.testing import ScipyTest test = ScipyTest('scipy').test __all__.append('test') __doc__ += pkgload.get_pkgdocs() <commit_msg>Put numpy namespace in scipy for backward compatibility... git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1530 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>
"""\ SciPy --- A scientific computing package for Python =================================================== You can support the development of SciPy by purchasing documentation at http://www.trelgol.com It is being distributed for a fee for a limited time to try and raise money for development. Documentation is also available in the docstrings. Available subpackages --------------------- """ try: import pkg_resources as _pr # activate namespace packages (manipulates __path__) del _pr except ImportError: pass from numpy import show_config as show_numpy_config if show_numpy_config is None: raise ImportError,"Cannot import scipy when running from numpy source directory." from numpy import __version__ as __numpy_version__ from __config__ import show as show_config from version import version as __version__ import numpy._import_tools as _ni pkgload = _ni.PackageLoader() del _ni import os as _os SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0')) pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True) del _os from numpy.testing import ScipyTest test = ScipyTest('scipy').test __all__.append('test') import numpy as _num from numpy import * __all__ += _num.__all__ del _num __doc__ += pkgload.get_pkgdocs()
"""\ SciPy --- A scientific computing package for Python =================================================== You can support the development of SciPy by purchasing documentation at http://www.trelgol.com It is being distributed for a fee for a limited time to try and raise money for development. Documentation is also available in the docstrings. Available subpackages --------------------- """ try: import pkg_resources as _pr # activate namespace packages (manipulates __path__) del _pr except ImportError: pass from numpy import show_config as show_numpy_config if show_numpy_config is None: raise ImportError,"Cannot import scipy when running from numpy source directory." from numpy import __version__ as __numpy_version__ from __config__ import show as show_config from version import version as __version__ import numpy._import_tools as _ni pkgload = _ni.PackageLoader() del _ni import os as _os SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0')) pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True) del _os from numpy.testing import ScipyTest test = ScipyTest('scipy').test __all__.append('test') __doc__ += pkgload.get_pkgdocs() Put numpy namespace in scipy for backward compatibility... git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1530 d6536bca-fef9-0310-8506-e4c0a848fbcf"""\ SciPy --- A scientific computing package for Python =================================================== You can support the development of SciPy by purchasing documentation at http://www.trelgol.com It is being distributed for a fee for a limited time to try and raise money for development. Documentation is also available in the docstrings. Available subpackages --------------------- """ try: import pkg_resources as _pr # activate namespace packages (manipulates __path__) del _pr except ImportError: pass from numpy import show_config as show_numpy_config if show_numpy_config is None: raise ImportError,"Cannot import scipy when running from numpy source directory." from numpy import __version__ as __numpy_version__ from __config__ import show as show_config from version import version as __version__ import numpy._import_tools as _ni pkgload = _ni.PackageLoader() del _ni import os as _os SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0')) pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True) del _os from numpy.testing import ScipyTest test = ScipyTest('scipy').test __all__.append('test') import numpy as _num from numpy import * __all__ += _num.__all__ del _num __doc__ += pkgload.get_pkgdocs()
<commit_before>"""\ SciPy --- A scientific computing package for Python =================================================== You can support the development of SciPy by purchasing documentation at http://www.trelgol.com It is being distributed for a fee for a limited time to try and raise money for development. Documentation is also available in the docstrings. Available subpackages --------------------- """ try: import pkg_resources as _pr # activate namespace packages (manipulates __path__) del _pr except ImportError: pass from numpy import show_config as show_numpy_config if show_numpy_config is None: raise ImportError,"Cannot import scipy when running from numpy source directory." from numpy import __version__ as __numpy_version__ from __config__ import show as show_config from version import version as __version__ import numpy._import_tools as _ni pkgload = _ni.PackageLoader() del _ni import os as _os SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0')) pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True) del _os from numpy.testing import ScipyTest test = ScipyTest('scipy').test __all__.append('test') __doc__ += pkgload.get_pkgdocs() <commit_msg>Put numpy namespace in scipy for backward compatibility... git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1530 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>"""\ SciPy --- A scientific computing package for Python =================================================== You can support the development of SciPy by purchasing documentation at http://www.trelgol.com It is being distributed for a fee for a limited time to try and raise money for development. Documentation is also available in the docstrings. Available subpackages --------------------- """ try: import pkg_resources as _pr # activate namespace packages (manipulates __path__) del _pr except ImportError: pass from numpy import show_config as show_numpy_config if show_numpy_config is None: raise ImportError,"Cannot import scipy when running from numpy source directory." from numpy import __version__ as __numpy_version__ from __config__ import show as show_config from version import version as __version__ import numpy._import_tools as _ni pkgload = _ni.PackageLoader() del _ni import os as _os SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0')) pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True) del _os from numpy.testing import ScipyTest test = ScipyTest('scipy').test __all__.append('test') import numpy as _num from numpy import * __all__ += _num.__all__ del _num __doc__ += pkgload.get_pkgdocs()
19433ab423abdd16dddf3508e8d73f0a0edae83c
bot/utils/attributeobject.py
bot/utils/attributeobject.py
class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): pass def _setattr(self, key, value): pass class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary)
class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): raise NotImplementedError() def _setattr(self, key, value): raise NotImplementedError() class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary)
Raise NotImplementedError instead of just passing in AttributeObject
Raise NotImplementedError instead of just passing in AttributeObject That way, if somebody uses it directly, it will fail with a proper error.
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): pass def _setattr(self, key, value): pass class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary) Raise NotImplementedError instead of just passing in AttributeObject That way, if somebody uses it directly, it will fail with a proper error.
class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): raise NotImplementedError() def _setattr(self, key, value): raise NotImplementedError() class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary)
<commit_before>class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): pass def _setattr(self, key, value): pass class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary) <commit_msg>Raise NotImplementedError instead of just passing in AttributeObject That way, if somebody uses it directly, it will fail with a proper error.<commit_after>
class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): raise NotImplementedError() def _setattr(self, key, value): raise NotImplementedError() class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary)
class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): pass def _setattr(self, key, value): pass class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary) Raise NotImplementedError instead of just passing in AttributeObject That way, if somebody uses it directly, it will fail with a proper error.class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): raise NotImplementedError() def _setattr(self, key, value): raise NotImplementedError() class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary)
<commit_before>class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): pass def _setattr(self, key, value): pass class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary) <commit_msg>Raise NotImplementedError instead of just passing in AttributeObject That way, if somebody uses it directly, it will fail with a proper error.<commit_after>class AttributeObject: def __init__(self, *excluded_keys): self._excluded_keys = excluded_keys def __getattr__(self, item): return self._getattr(item) def __setattr__(self, key, value): if key == "_excluded_keys" or key in self._excluded_keys: super().__setattr__(key, value) else: self._setattr(key, value) def _getattr(self, item): raise NotImplementedError() def _setattr(self, key, value): raise NotImplementedError() class DictionaryObject(AttributeObject): def __init__(self, initial_items={}): super().__init__("_dictionary") self._dictionary = dict(initial_items) def _getattr(self, item): return self._dictionary.get(item) def _setattr(self, key, value): self._dictionary[key] = value def _copy(self): return DictionaryObject(self._dictionary)
580425162c9c84dee5cb78aa90b0992af4316bd7
web/web_config.py
web/web_config.py
#!/usr/bin/python3 """ Configuration settings for server.py. """ HOSTNAME = "localhost" PORT = 8080 # integer OK_RESPONSE = 200
#!/usr/bin/python3 """ Configuration settings for server.py. """ HOSTNAME = "localhost" PORT = 2424 # integer OK_RESPONSE = 200
Test version of web package. See cinch.py for usage.
Test version of web package. See cinch.py for usage.
Python
mit
JackieChiles/Cinch,JackieChiles/Cinch,JackieChiles/Cinch
#!/usr/bin/python3 """ Configuration settings for server.py. """ HOSTNAME = "localhost" PORT = 8080 # integer OK_RESPONSE = 200 Test version of web package. See cinch.py for usage.
#!/usr/bin/python3 """ Configuration settings for server.py. """ HOSTNAME = "localhost" PORT = 2424 # integer OK_RESPONSE = 200
<commit_before>#!/usr/bin/python3 """ Configuration settings for server.py. """ HOSTNAME = "localhost" PORT = 8080 # integer OK_RESPONSE = 200 <commit_msg>Test version of web package. See cinch.py for usage.<commit_after>
#!/usr/bin/python3 """ Configuration settings for server.py. """ HOSTNAME = "localhost" PORT = 2424 # integer OK_RESPONSE = 200
#!/usr/bin/python3 """ Configuration settings for server.py. """ HOSTNAME = "localhost" PORT = 8080 # integer OK_RESPONSE = 200 Test version of web package. See cinch.py for usage.#!/usr/bin/python3 """ Configuration settings for server.py. """ HOSTNAME = "localhost" PORT = 2424 # integer OK_RESPONSE = 200
<commit_before>#!/usr/bin/python3 """ Configuration settings for server.py. """ HOSTNAME = "localhost" PORT = 8080 # integer OK_RESPONSE = 200 <commit_msg>Test version of web package. See cinch.py for usage.<commit_after>#!/usr/bin/python3 """ Configuration settings for server.py. """ HOSTNAME = "localhost" PORT = 2424 # integer OK_RESPONSE = 200
365e4abca73d55fe4ba1b51a0057556ff8487c41
changes/listeners/build_revision.py
changes/listeners/build_revision.py
import logging from flask import current_app from fnmatch import fnmatch from changes.api.build_index import BuildIndexAPIView from changes.config import db from changes.models import ItemOption, Project logger = logging.getLogger('build_revision') def should_build_branch(revision, allowed_branches): if not revision.branches: return True for branch in revision.branches: if any(fnmatch(branch, pattern) for pattern in allowed_branches): return True return False def revision_created_handler(revision, **kwargs): project_list = list(Project.query.filter( Project.repository_id == revision.repository_id, )) if not project_list: return options = dict( db.session.query( ItemOption.item_id, ItemOption.value ).filter( ItemOption.item_id.in_(p.id for p in project_list), ItemOption.name.in_([ 'build.branch-names', ]) ) ) for project in project_list: branch_names = options.get('build.branch-names', '*').split(' ') if not should_build_branch(revision, branch_names): return data = { 'sha': revision.sha, 'project': project.slug, } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): response = BuildIndexAPIView().post() if isinstance(response, (list, tuple)): response, status = response if status != 200: logger.error('Failed to create builds: %s' % (response,))
import logging from flask import current_app from fnmatch import fnmatch from changes.api.build_index import BuildIndexAPIView from changes.config import db from changes.models import ItemOption logger = logging.getLogger('build_revision') def should_build_branch(revision, allowed_branches): if not revision.branches: return True for branch in revision.branches: if any(fnmatch(branch, pattern) for pattern in allowed_branches): return True return False def revision_created_handler(revision, **kwargs): options = dict( db.session.query( ItemOption.name, ItemOption.value ).filter( ItemOption.item_id == revision.repository_id, ItemOption.name.in_([ 'build.branch-names', ]) ) ) if not should_build_branch(revision, options.get('build.branch-names', '*').split(' ')): return data = { 'sha': revision.sha, 'repository': revision.repository.url, } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): response = BuildIndexAPIView().post() if isinstance(response, (list, tuple)): response, status = response if status != 200: logger.error('Failed to create builds: %s' % (response,))
Revert "Move build.branch-names to project settings"
Revert "Move build.branch-names to project settings" This reverts commit a38fc17616ae160aa41046470964034294eade1a.
Python
apache-2.0
bowlofstew/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes
import logging from flask import current_app from fnmatch import fnmatch from changes.api.build_index import BuildIndexAPIView from changes.config import db from changes.models import ItemOption, Project logger = logging.getLogger('build_revision') def should_build_branch(revision, allowed_branches): if not revision.branches: return True for branch in revision.branches: if any(fnmatch(branch, pattern) for pattern in allowed_branches): return True return False def revision_created_handler(revision, **kwargs): project_list = list(Project.query.filter( Project.repository_id == revision.repository_id, )) if not project_list: return options = dict( db.session.query( ItemOption.item_id, ItemOption.value ).filter( ItemOption.item_id.in_(p.id for p in project_list), ItemOption.name.in_([ 'build.branch-names', ]) ) ) for project in project_list: branch_names = options.get('build.branch-names', '*').split(' ') if not should_build_branch(revision, branch_names): return data = { 'sha': revision.sha, 'project': project.slug, } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): response = BuildIndexAPIView().post() if isinstance(response, (list, tuple)): response, status = response if status != 200: logger.error('Failed to create builds: %s' % (response,)) Revert "Move build.branch-names to project settings" This reverts commit a38fc17616ae160aa41046470964034294eade1a.
import logging from flask import current_app from fnmatch import fnmatch from changes.api.build_index import BuildIndexAPIView from changes.config import db from changes.models import ItemOption logger = logging.getLogger('build_revision') def should_build_branch(revision, allowed_branches): if not revision.branches: return True for branch in revision.branches: if any(fnmatch(branch, pattern) for pattern in allowed_branches): return True return False def revision_created_handler(revision, **kwargs): options = dict( db.session.query( ItemOption.name, ItemOption.value ).filter( ItemOption.item_id == revision.repository_id, ItemOption.name.in_([ 'build.branch-names', ]) ) ) if not should_build_branch(revision, options.get('build.branch-names', '*').split(' ')): return data = { 'sha': revision.sha, 'repository': revision.repository.url, } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): response = BuildIndexAPIView().post() if isinstance(response, (list, tuple)): response, status = response if status != 200: logger.error('Failed to create builds: %s' % (response,))
<commit_before>import logging from flask import current_app from fnmatch import fnmatch from changes.api.build_index import BuildIndexAPIView from changes.config import db from changes.models import ItemOption, Project logger = logging.getLogger('build_revision') def should_build_branch(revision, allowed_branches): if not revision.branches: return True for branch in revision.branches: if any(fnmatch(branch, pattern) for pattern in allowed_branches): return True return False def revision_created_handler(revision, **kwargs): project_list = list(Project.query.filter( Project.repository_id == revision.repository_id, )) if not project_list: return options = dict( db.session.query( ItemOption.item_id, ItemOption.value ).filter( ItemOption.item_id.in_(p.id for p in project_list), ItemOption.name.in_([ 'build.branch-names', ]) ) ) for project in project_list: branch_names = options.get('build.branch-names', '*').split(' ') if not should_build_branch(revision, branch_names): return data = { 'sha': revision.sha, 'project': project.slug, } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): response = BuildIndexAPIView().post() if isinstance(response, (list, tuple)): response, status = response if status != 200: logger.error('Failed to create builds: %s' % (response,)) <commit_msg>Revert "Move build.branch-names to project settings" This reverts commit a38fc17616ae160aa41046470964034294eade1a.<commit_after>
import logging from flask import current_app from fnmatch import fnmatch from changes.api.build_index import BuildIndexAPIView from changes.config import db from changes.models import ItemOption logger = logging.getLogger('build_revision') def should_build_branch(revision, allowed_branches): if not revision.branches: return True for branch in revision.branches: if any(fnmatch(branch, pattern) for pattern in allowed_branches): return True return False def revision_created_handler(revision, **kwargs): options = dict( db.session.query( ItemOption.name, ItemOption.value ).filter( ItemOption.item_id == revision.repository_id, ItemOption.name.in_([ 'build.branch-names', ]) ) ) if not should_build_branch(revision, options.get('build.branch-names', '*').split(' ')): return data = { 'sha': revision.sha, 'repository': revision.repository.url, } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): response = BuildIndexAPIView().post() if isinstance(response, (list, tuple)): response, status = response if status != 200: logger.error('Failed to create builds: %s' % (response,))
import logging from flask import current_app from fnmatch import fnmatch from changes.api.build_index import BuildIndexAPIView from changes.config import db from changes.models import ItemOption, Project logger = logging.getLogger('build_revision') def should_build_branch(revision, allowed_branches): if not revision.branches: return True for branch in revision.branches: if any(fnmatch(branch, pattern) for pattern in allowed_branches): return True return False def revision_created_handler(revision, **kwargs): project_list = list(Project.query.filter( Project.repository_id == revision.repository_id, )) if not project_list: return options = dict( db.session.query( ItemOption.item_id, ItemOption.value ).filter( ItemOption.item_id.in_(p.id for p in project_list), ItemOption.name.in_([ 'build.branch-names', ]) ) ) for project in project_list: branch_names = options.get('build.branch-names', '*').split(' ') if not should_build_branch(revision, branch_names): return data = { 'sha': revision.sha, 'project': project.slug, } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): response = BuildIndexAPIView().post() if isinstance(response, (list, tuple)): response, status = response if status != 200: logger.error('Failed to create builds: %s' % (response,)) Revert "Move build.branch-names to project settings" This reverts commit a38fc17616ae160aa41046470964034294eade1a.import logging from flask import current_app from fnmatch import fnmatch from changes.api.build_index import BuildIndexAPIView from changes.config import db from changes.models import ItemOption logger = logging.getLogger('build_revision') def should_build_branch(revision, allowed_branches): if not revision.branches: return True for branch in revision.branches: if any(fnmatch(branch, pattern) for pattern in allowed_branches): return True return False def revision_created_handler(revision, **kwargs): options = dict( db.session.query( ItemOption.name, ItemOption.value ).filter( ItemOption.item_id == revision.repository_id, ItemOption.name.in_([ 'build.branch-names', ]) ) ) if not should_build_branch(revision, options.get('build.branch-names', '*').split(' ')): return data = { 'sha': revision.sha, 'repository': revision.repository.url, } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): response = BuildIndexAPIView().post() if isinstance(response, (list, tuple)): response, status = response if status != 200: logger.error('Failed to create builds: %s' % (response,))
<commit_before>import logging from flask import current_app from fnmatch import fnmatch from changes.api.build_index import BuildIndexAPIView from changes.config import db from changes.models import ItemOption, Project logger = logging.getLogger('build_revision') def should_build_branch(revision, allowed_branches): if not revision.branches: return True for branch in revision.branches: if any(fnmatch(branch, pattern) for pattern in allowed_branches): return True return False def revision_created_handler(revision, **kwargs): project_list = list(Project.query.filter( Project.repository_id == revision.repository_id, )) if not project_list: return options = dict( db.session.query( ItemOption.item_id, ItemOption.value ).filter( ItemOption.item_id.in_(p.id for p in project_list), ItemOption.name.in_([ 'build.branch-names', ]) ) ) for project in project_list: branch_names = options.get('build.branch-names', '*').split(' ') if not should_build_branch(revision, branch_names): return data = { 'sha': revision.sha, 'project': project.slug, } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): response = BuildIndexAPIView().post() if isinstance(response, (list, tuple)): response, status = response if status != 200: logger.error('Failed to create builds: %s' % (response,)) <commit_msg>Revert "Move build.branch-names to project settings" This reverts commit a38fc17616ae160aa41046470964034294eade1a.<commit_after>import logging from flask import current_app from fnmatch import fnmatch from changes.api.build_index import BuildIndexAPIView from changes.config import db from changes.models import ItemOption logger = logging.getLogger('build_revision') def should_build_branch(revision, allowed_branches): if not revision.branches: return True for branch in revision.branches: if any(fnmatch(branch, pattern) for pattern in allowed_branches): return True return False def revision_created_handler(revision, **kwargs): options = dict( db.session.query( ItemOption.name, ItemOption.value ).filter( ItemOption.item_id == revision.repository_id, ItemOption.name.in_([ 'build.branch-names', ]) ) ) if not should_build_branch(revision, options.get('build.branch-names', '*').split(' ')): return data = { 'sha': revision.sha, 'repository': revision.repository.url, } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): response = BuildIndexAPIView().post() if isinstance(response, (list, tuple)): response, status = response if status != 200: logger.error('Failed to create builds: %s' % (response,))
4325794f6cb3780b8c44fcf4198f141eef225fbf
dnzo/settings.py
dnzo/settings.py
from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) from ragendja.settings_post import *
from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) DJANGO_STYLE_MODEL_KIND = False from ragendja.settings_post import *
Switch off weird AEP-1.0 model-renaming bullshit.
Switch off weird AEP-1.0 model-renaming bullshit. git-svn-id: 062a66634e56759c7c3cc44955c32d2ce0012d25@295 c02d1e6f-6a35-45f2-ab14-3b6f79a691ff
Python
mit
taylorhughes/done-zo,taylorhughes/done-zo,taylorhughes/done-zo,taylorhughes/done-zo
from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) from ragendja.settings_post import * Switch off weird AEP-1.0 model-renaming bullshit. git-svn-id: 062a66634e56759c7c3cc44955c32d2ce0012d25@295 c02d1e6f-6a35-45f2-ab14-3b6f79a691ff
from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) DJANGO_STYLE_MODEL_KIND = False from ragendja.settings_post import *
<commit_before>from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) from ragendja.settings_post import * <commit_msg>Switch off weird AEP-1.0 model-renaming bullshit. git-svn-id: 062a66634e56759c7c3cc44955c32d2ce0012d25@295 c02d1e6f-6a35-45f2-ab14-3b6f79a691ff<commit_after>
from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) DJANGO_STYLE_MODEL_KIND = False from ragendja.settings_post import *
from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) from ragendja.settings_post import * Switch off weird AEP-1.0 model-renaming bullshit. git-svn-id: 062a66634e56759c7c3cc44955c32d2ce0012d25@295 c02d1e6f-6a35-45f2-ab14-3b6f79a691fffrom ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) DJANGO_STYLE_MODEL_KIND = False from ragendja.settings_post import *
<commit_before>from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) from ragendja.settings_post import * <commit_msg>Switch off weird AEP-1.0 model-renaming bullshit. git-svn-id: 062a66634e56759c7c3cc44955c32d2ce0012d25@295 c02d1e6f-6a35-45f2-ab14-3b6f79a691ff<commit_after>from ragendja.settings_pre import * import environment MEDIA_VERSION = environment.MAJOR_VERSION DEBUG = environment.IS_DEVELOPMENT TEMPLATE_DEBUG = environment.IS_DEVELOPMENT DATABASE_ENGINE = 'appengine' USE_I18N = False TEMPLATE_LOADERS = ( # Load basic template files in the normal way 'django.template.loaders.filesystem.load_template_source', ) TEMPLATE_CONTEXT_PROCESSORS = ( ) MIDDLEWARE_CLASSES = ( # does things like APPEND_SLASH for URLs 'django.middleware.common.CommonMiddleware', ) ROOT_URLCONF = 'urls' import os ROOT_PATH = os.path.dirname(__file__) TEMPLATE_DIRS = ( ROOT_PATH + '/resources/templates' ) INSTALLED_APPS = ( 'appenginepatcher', 'tasks', 'public', 'admin', ) DJANGO_STYLE_MODEL_KIND = False from ragendja.settings_post import *
2c67fcd8ec55324366087c2f69bcd232592ac312
pirx/base.py
pirx/base.py
class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value)
class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value.__repr__())
Use __repr__ instead of __str__ to print setting's value
Use __repr__ instead of __str__ to print setting's value
Python
mit
piotrekw/pirx
class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value) Use __repr__ instead of __str__ to print setting's value
class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value.__repr__())
<commit_before>class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value) <commit_msg>Use __repr__ instead of __str__ to print setting's value<commit_after>
class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value.__repr__())
class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value) Use __repr__ instead of __str__ to print setting's valueclass Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value.__repr__())
<commit_before>class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value) <commit_msg>Use __repr__ instead of __str__ to print setting's value<commit_after>class Settings(object): def __init__(self): self._settings = {} def __setattr__(self, name, value): if name != '_settings': self._settings[name] = value else: super(Settings, self).__setattr__(name, value) def write(self): for name, value in self._settings.iteritems(): print '%s = %s' % (name.upper(), value.__repr__())
da6f284cf1ffa1397c32167e1e23189ea29e5b2f
IPython/html/widgets/widget_container.py
IPython/html/widgets/widget_container.py
"""ContainerWidget class. Represents a container that can be used to group other widgets. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from .widget import DOMWidget from IPython.utils.traitlets import Unicode, Tuple, TraitError class ContainerWidget(DOMWidget): _view_name = Unicode('ContainerView', sync=True) # Child widgets in the container. # Using a tuple here to force reassignment to update the list. # When a proper notifying-list trait exists, that is what should be used here. children = Tuple(sync=True) def __init__(self, **kwargs): super(ContainerWidget, self).__init__(**kwargs) self.on_displayed(ContainerWidget._fire_children_displayed) def _fire_children_displayed(self): for child in self.children: child._handle_displayed() class PopupWidget(ContainerWidget): _view_name = Unicode('PopupView', sync=True) description = Unicode(sync=True) button_text = Unicode(sync=True)
"""ContainerWidget class. Represents a container that can be used to group other widgets. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from .widget import DOMWidget from IPython.utils.traitlets import Unicode, Tuple, TraitError class ContainerWidget(DOMWidget): _view_name = Unicode('ContainerView', sync=True) # Child widgets in the container. # Using a tuple here to force reassignment to update the list. # When a proper notifying-list trait exists, that is what should be used here. children = Tuple(sync=True) def __init__(self, children = None, **kwargs): kwargs['children'] = children super(ContainerWidget, self).__init__(**kwargs) self.on_displayed(ContainerWidget._fire_children_displayed) def _fire_children_displayed(self): for child in self.children: child._handle_displayed() class PopupWidget(ContainerWidget): _view_name = Unicode('PopupView', sync=True) description = Unicode(sync=True) button_text = Unicode(sync=True)
Make Container widgets take children as the first positional argument
Make Container widgets take children as the first positional argument This makes creating containers less cumbersome: Container([list, of, children]), rather than Container(children=[list, of, children])
Python
bsd-3-clause
ipython/ipython,ipython/ipython
"""ContainerWidget class. Represents a container that can be used to group other widgets. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from .widget import DOMWidget from IPython.utils.traitlets import Unicode, Tuple, TraitError class ContainerWidget(DOMWidget): _view_name = Unicode('ContainerView', sync=True) # Child widgets in the container. # Using a tuple here to force reassignment to update the list. # When a proper notifying-list trait exists, that is what should be used here. children = Tuple(sync=True) def __init__(self, **kwargs): super(ContainerWidget, self).__init__(**kwargs) self.on_displayed(ContainerWidget._fire_children_displayed) def _fire_children_displayed(self): for child in self.children: child._handle_displayed() class PopupWidget(ContainerWidget): _view_name = Unicode('PopupView', sync=True) description = Unicode(sync=True) button_text = Unicode(sync=True) Make Container widgets take children as the first positional argument This makes creating containers less cumbersome: Container([list, of, children]), rather than Container(children=[list, of, children])
"""ContainerWidget class. Represents a container that can be used to group other widgets. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from .widget import DOMWidget from IPython.utils.traitlets import Unicode, Tuple, TraitError class ContainerWidget(DOMWidget): _view_name = Unicode('ContainerView', sync=True) # Child widgets in the container. # Using a tuple here to force reassignment to update the list. # When a proper notifying-list trait exists, that is what should be used here. children = Tuple(sync=True) def __init__(self, children = None, **kwargs): kwargs['children'] = children super(ContainerWidget, self).__init__(**kwargs) self.on_displayed(ContainerWidget._fire_children_displayed) def _fire_children_displayed(self): for child in self.children: child._handle_displayed() class PopupWidget(ContainerWidget): _view_name = Unicode('PopupView', sync=True) description = Unicode(sync=True) button_text = Unicode(sync=True)
<commit_before>"""ContainerWidget class. Represents a container that can be used to group other widgets. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from .widget import DOMWidget from IPython.utils.traitlets import Unicode, Tuple, TraitError class ContainerWidget(DOMWidget): _view_name = Unicode('ContainerView', sync=True) # Child widgets in the container. # Using a tuple here to force reassignment to update the list. # When a proper notifying-list trait exists, that is what should be used here. children = Tuple(sync=True) def __init__(self, **kwargs): super(ContainerWidget, self).__init__(**kwargs) self.on_displayed(ContainerWidget._fire_children_displayed) def _fire_children_displayed(self): for child in self.children: child._handle_displayed() class PopupWidget(ContainerWidget): _view_name = Unicode('PopupView', sync=True) description = Unicode(sync=True) button_text = Unicode(sync=True) <commit_msg>Make Container widgets take children as the first positional argument This makes creating containers less cumbersome: Container([list, of, children]), rather than Container(children=[list, of, children])<commit_after>
"""ContainerWidget class. Represents a container that can be used to group other widgets. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from .widget import DOMWidget from IPython.utils.traitlets import Unicode, Tuple, TraitError class ContainerWidget(DOMWidget): _view_name = Unicode('ContainerView', sync=True) # Child widgets in the container. # Using a tuple here to force reassignment to update the list. # When a proper notifying-list trait exists, that is what should be used here. children = Tuple(sync=True) def __init__(self, children = None, **kwargs): kwargs['children'] = children super(ContainerWidget, self).__init__(**kwargs) self.on_displayed(ContainerWidget._fire_children_displayed) def _fire_children_displayed(self): for child in self.children: child._handle_displayed() class PopupWidget(ContainerWidget): _view_name = Unicode('PopupView', sync=True) description = Unicode(sync=True) button_text = Unicode(sync=True)
"""ContainerWidget class. Represents a container that can be used to group other widgets. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from .widget import DOMWidget from IPython.utils.traitlets import Unicode, Tuple, TraitError class ContainerWidget(DOMWidget): _view_name = Unicode('ContainerView', sync=True) # Child widgets in the container. # Using a tuple here to force reassignment to update the list. # When a proper notifying-list trait exists, that is what should be used here. children = Tuple(sync=True) def __init__(self, **kwargs): super(ContainerWidget, self).__init__(**kwargs) self.on_displayed(ContainerWidget._fire_children_displayed) def _fire_children_displayed(self): for child in self.children: child._handle_displayed() class PopupWidget(ContainerWidget): _view_name = Unicode('PopupView', sync=True) description = Unicode(sync=True) button_text = Unicode(sync=True) Make Container widgets take children as the first positional argument This makes creating containers less cumbersome: Container([list, of, children]), rather than Container(children=[list, of, children])"""ContainerWidget class. Represents a container that can be used to group other widgets. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from .widget import DOMWidget from IPython.utils.traitlets import Unicode, Tuple, TraitError class ContainerWidget(DOMWidget): _view_name = Unicode('ContainerView', sync=True) # Child widgets in the container. # Using a tuple here to force reassignment to update the list. # When a proper notifying-list trait exists, that is what should be used here. children = Tuple(sync=True) def __init__(self, children = None, **kwargs): kwargs['children'] = children super(ContainerWidget, self).__init__(**kwargs) self.on_displayed(ContainerWidget._fire_children_displayed) def _fire_children_displayed(self): for child in self.children: child._handle_displayed() class PopupWidget(ContainerWidget): _view_name = Unicode('PopupView', sync=True) description = Unicode(sync=True) button_text = Unicode(sync=True)
<commit_before>"""ContainerWidget class. Represents a container that can be used to group other widgets. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from .widget import DOMWidget from IPython.utils.traitlets import Unicode, Tuple, TraitError class ContainerWidget(DOMWidget): _view_name = Unicode('ContainerView', sync=True) # Child widgets in the container. # Using a tuple here to force reassignment to update the list. # When a proper notifying-list trait exists, that is what should be used here. children = Tuple(sync=True) def __init__(self, **kwargs): super(ContainerWidget, self).__init__(**kwargs) self.on_displayed(ContainerWidget._fire_children_displayed) def _fire_children_displayed(self): for child in self.children: child._handle_displayed() class PopupWidget(ContainerWidget): _view_name = Unicode('PopupView', sync=True) description = Unicode(sync=True) button_text = Unicode(sync=True) <commit_msg>Make Container widgets take children as the first positional argument This makes creating containers less cumbersome: Container([list, of, children]), rather than Container(children=[list, of, children])<commit_after>"""ContainerWidget class. Represents a container that can be used to group other widgets. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from .widget import DOMWidget from IPython.utils.traitlets import Unicode, Tuple, TraitError class ContainerWidget(DOMWidget): _view_name = Unicode('ContainerView', sync=True) # Child widgets in the container. # Using a tuple here to force reassignment to update the list. # When a proper notifying-list trait exists, that is what should be used here. children = Tuple(sync=True) def __init__(self, children = None, **kwargs): kwargs['children'] = children super(ContainerWidget, self).__init__(**kwargs) self.on_displayed(ContainerWidget._fire_children_displayed) def _fire_children_displayed(self): for child in self.children: child._handle_displayed() class PopupWidget(ContainerWidget): _view_name = Unicode('PopupView', sync=True) description = Unicode(sync=True) button_text = Unicode(sync=True)
efb0aa6c68ed9b5ab5c855464dc0b611506326d2
wafer/kv/migrations/0001_initial.py
wafer/kv/migrations/0001_initial.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import jsonfield.fields class Migration(migrations.Migration): dependencies = [ ('auth', '0006_require_contenttypes_0002'), ] operations = [ migrations.CreateModel( name='KeyValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=64, db_index=True)), ('value', jsonfield.fields.JSONField()), ('group', models.ForeignKey(to='auth.Group')), ], ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import jsonfield.fields class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL) ] operations = [ migrations.CreateModel( name='KeyValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=64, db_index=True)), ('value', jsonfield.fields.JSONField()), ('group', models.ForeignKey(to='auth.Group')), ], ), ]
Tweak kv migration to improve compatibility across Django versions
Tweak kv migration to improve compatibility across Django versions
Python
isc
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import jsonfield.fields class Migration(migrations.Migration): dependencies = [ ('auth', '0006_require_contenttypes_0002'), ] operations = [ migrations.CreateModel( name='KeyValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=64, db_index=True)), ('value', jsonfield.fields.JSONField()), ('group', models.ForeignKey(to='auth.Group')), ], ), ] Tweak kv migration to improve compatibility across Django versions
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import jsonfield.fields class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL) ] operations = [ migrations.CreateModel( name='KeyValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=64, db_index=True)), ('value', jsonfield.fields.JSONField()), ('group', models.ForeignKey(to='auth.Group')), ], ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import jsonfield.fields class Migration(migrations.Migration): dependencies = [ ('auth', '0006_require_contenttypes_0002'), ] operations = [ migrations.CreateModel( name='KeyValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=64, db_index=True)), ('value', jsonfield.fields.JSONField()), ('group', models.ForeignKey(to='auth.Group')), ], ), ] <commit_msg>Tweak kv migration to improve compatibility across Django versions<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import jsonfield.fields class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL) ] operations = [ migrations.CreateModel( name='KeyValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=64, db_index=True)), ('value', jsonfield.fields.JSONField()), ('group', models.ForeignKey(to='auth.Group')), ], ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import jsonfield.fields class Migration(migrations.Migration): dependencies = [ ('auth', '0006_require_contenttypes_0002'), ] operations = [ migrations.CreateModel( name='KeyValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=64, db_index=True)), ('value', jsonfield.fields.JSONField()), ('group', models.ForeignKey(to='auth.Group')), ], ), ] Tweak kv migration to improve compatibility across Django versions# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import jsonfield.fields class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL) ] operations = [ migrations.CreateModel( name='KeyValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=64, db_index=True)), ('value', jsonfield.fields.JSONField()), ('group', models.ForeignKey(to='auth.Group')), ], ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import jsonfield.fields class Migration(migrations.Migration): dependencies = [ ('auth', '0006_require_contenttypes_0002'), ] operations = [ migrations.CreateModel( name='KeyValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=64, db_index=True)), ('value', jsonfield.fields.JSONField()), ('group', models.ForeignKey(to='auth.Group')), ], ), ] <commit_msg>Tweak kv migration to improve compatibility across Django versions<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import jsonfield.fields class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL) ] operations = [ migrations.CreateModel( name='KeyValue', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=64, db_index=True)), ('value', jsonfield.fields.JSONField()), ('group', models.ForeignKey(to='auth.Group')), ], ), ]
29c3d87881ce9c57478eb821da60d77e9f5eeb48
eventsourcing/application/base.py
eventsourcing/application/base.py
from abc import abstractmethod, ABCMeta from six import with_metaclass from eventsourcing.infrastructure.event_store import EventStore from eventsourcing.infrastructure.persistence_subscriber import PersistenceSubscriber class EventSourcingApplication(with_metaclass(ABCMeta)): def __init__(self, json_encoder_cls=None, json_decoder_cls=None, cipher=None, always_encrypt_stored_events=False): self.stored_event_repo = self.create_stored_event_repo(json_encoder_cls=json_encoder_cls, json_decoder_cls=json_decoder_cls, cipher=cipher, always_encrypt=always_encrypt_stored_events) self.event_store = self.create_event_store() self.persistence_subscriber = self.create_persistence_subscriber() @abstractmethod def create_stored_event_repo(self, **kwargs): """Returns an instance of a subclass of StoredEventRepository. :rtype: StoredEventRepository """ def create_event_store(self): return EventStore(self.stored_event_repo) def create_persistence_subscriber(self): return PersistenceSubscriber(self.event_store) def close(self): self.persistence_subscriber.close() self.stored_event_repo = None self.event_store = None self.persistence_subscriber = None def __enter__(self): return self def __exit__(self, *_): self.close()
from abc import abstractmethod, ABCMeta from six import with_metaclass from eventsourcing.infrastructure.event_store import EventStore from eventsourcing.infrastructure.persistence_subscriber import PersistenceSubscriber class EventSourcingApplication(with_metaclass(ABCMeta)): persist_events = True def __init__(self, json_encoder_cls=None, json_decoder_cls=None, cipher=None, always_encrypt_stored_events=False): self.stored_event_repo = self.create_stored_event_repo(json_encoder_cls=json_encoder_cls, json_decoder_cls=json_decoder_cls, cipher=cipher, always_encrypt=always_encrypt_stored_events) self.event_store = self.create_event_store() if self.persist_events: self.persistence_subscriber = self.create_persistence_subscriber() else: self.persistence_subscriber = None @abstractmethod def create_stored_event_repo(self, **kwargs): """Returns an instance of a subclass of StoredEventRepository. :rtype: StoredEventRepository """ def create_event_store(self): return EventStore(self.stored_event_repo) def create_persistence_subscriber(self): return PersistenceSubscriber(self.event_store) def close(self): if self.persistence_subscriber: self.persistence_subscriber.close() self.stored_event_repo = None self.event_store = None self.persistence_subscriber = None def __enter__(self): return self def __exit__(self, *_): self.close()
Allow to disable events persistence at app class
Allow to disable events persistence at app class
Python
bsd-3-clause
johnbywater/eventsourcing,johnbywater/eventsourcing
from abc import abstractmethod, ABCMeta from six import with_metaclass from eventsourcing.infrastructure.event_store import EventStore from eventsourcing.infrastructure.persistence_subscriber import PersistenceSubscriber class EventSourcingApplication(with_metaclass(ABCMeta)): def __init__(self, json_encoder_cls=None, json_decoder_cls=None, cipher=None, always_encrypt_stored_events=False): self.stored_event_repo = self.create_stored_event_repo(json_encoder_cls=json_encoder_cls, json_decoder_cls=json_decoder_cls, cipher=cipher, always_encrypt=always_encrypt_stored_events) self.event_store = self.create_event_store() self.persistence_subscriber = self.create_persistence_subscriber() @abstractmethod def create_stored_event_repo(self, **kwargs): """Returns an instance of a subclass of StoredEventRepository. :rtype: StoredEventRepository """ def create_event_store(self): return EventStore(self.stored_event_repo) def create_persistence_subscriber(self): return PersistenceSubscriber(self.event_store) def close(self): self.persistence_subscriber.close() self.stored_event_repo = None self.event_store = None self.persistence_subscriber = None def __enter__(self): return self def __exit__(self, *_): self.close() Allow to disable events persistence at app class
from abc import abstractmethod, ABCMeta from six import with_metaclass from eventsourcing.infrastructure.event_store import EventStore from eventsourcing.infrastructure.persistence_subscriber import PersistenceSubscriber class EventSourcingApplication(with_metaclass(ABCMeta)): persist_events = True def __init__(self, json_encoder_cls=None, json_decoder_cls=None, cipher=None, always_encrypt_stored_events=False): self.stored_event_repo = self.create_stored_event_repo(json_encoder_cls=json_encoder_cls, json_decoder_cls=json_decoder_cls, cipher=cipher, always_encrypt=always_encrypt_stored_events) self.event_store = self.create_event_store() if self.persist_events: self.persistence_subscriber = self.create_persistence_subscriber() else: self.persistence_subscriber = None @abstractmethod def create_stored_event_repo(self, **kwargs): """Returns an instance of a subclass of StoredEventRepository. :rtype: StoredEventRepository """ def create_event_store(self): return EventStore(self.stored_event_repo) def create_persistence_subscriber(self): return PersistenceSubscriber(self.event_store) def close(self): if self.persistence_subscriber: self.persistence_subscriber.close() self.stored_event_repo = None self.event_store = None self.persistence_subscriber = None def __enter__(self): return self def __exit__(self, *_): self.close()
<commit_before>from abc import abstractmethod, ABCMeta from six import with_metaclass from eventsourcing.infrastructure.event_store import EventStore from eventsourcing.infrastructure.persistence_subscriber import PersistenceSubscriber class EventSourcingApplication(with_metaclass(ABCMeta)): def __init__(self, json_encoder_cls=None, json_decoder_cls=None, cipher=None, always_encrypt_stored_events=False): self.stored_event_repo = self.create_stored_event_repo(json_encoder_cls=json_encoder_cls, json_decoder_cls=json_decoder_cls, cipher=cipher, always_encrypt=always_encrypt_stored_events) self.event_store = self.create_event_store() self.persistence_subscriber = self.create_persistence_subscriber() @abstractmethod def create_stored_event_repo(self, **kwargs): """Returns an instance of a subclass of StoredEventRepository. :rtype: StoredEventRepository """ def create_event_store(self): return EventStore(self.stored_event_repo) def create_persistence_subscriber(self): return PersistenceSubscriber(self.event_store) def close(self): self.persistence_subscriber.close() self.stored_event_repo = None self.event_store = None self.persistence_subscriber = None def __enter__(self): return self def __exit__(self, *_): self.close() <commit_msg>Allow to disable events persistence at app class<commit_after>
from abc import abstractmethod, ABCMeta from six import with_metaclass from eventsourcing.infrastructure.event_store import EventStore from eventsourcing.infrastructure.persistence_subscriber import PersistenceSubscriber class EventSourcingApplication(with_metaclass(ABCMeta)): persist_events = True def __init__(self, json_encoder_cls=None, json_decoder_cls=None, cipher=None, always_encrypt_stored_events=False): self.stored_event_repo = self.create_stored_event_repo(json_encoder_cls=json_encoder_cls, json_decoder_cls=json_decoder_cls, cipher=cipher, always_encrypt=always_encrypt_stored_events) self.event_store = self.create_event_store() if self.persist_events: self.persistence_subscriber = self.create_persistence_subscriber() else: self.persistence_subscriber = None @abstractmethod def create_stored_event_repo(self, **kwargs): """Returns an instance of a subclass of StoredEventRepository. :rtype: StoredEventRepository """ def create_event_store(self): return EventStore(self.stored_event_repo) def create_persistence_subscriber(self): return PersistenceSubscriber(self.event_store) def close(self): if self.persistence_subscriber: self.persistence_subscriber.close() self.stored_event_repo = None self.event_store = None self.persistence_subscriber = None def __enter__(self): return self def __exit__(self, *_): self.close()
from abc import abstractmethod, ABCMeta from six import with_metaclass from eventsourcing.infrastructure.event_store import EventStore from eventsourcing.infrastructure.persistence_subscriber import PersistenceSubscriber class EventSourcingApplication(with_metaclass(ABCMeta)): def __init__(self, json_encoder_cls=None, json_decoder_cls=None, cipher=None, always_encrypt_stored_events=False): self.stored_event_repo = self.create_stored_event_repo(json_encoder_cls=json_encoder_cls, json_decoder_cls=json_decoder_cls, cipher=cipher, always_encrypt=always_encrypt_stored_events) self.event_store = self.create_event_store() self.persistence_subscriber = self.create_persistence_subscriber() @abstractmethod def create_stored_event_repo(self, **kwargs): """Returns an instance of a subclass of StoredEventRepository. :rtype: StoredEventRepository """ def create_event_store(self): return EventStore(self.stored_event_repo) def create_persistence_subscriber(self): return PersistenceSubscriber(self.event_store) def close(self): self.persistence_subscriber.close() self.stored_event_repo = None self.event_store = None self.persistence_subscriber = None def __enter__(self): return self def __exit__(self, *_): self.close() Allow to disable events persistence at app classfrom abc import abstractmethod, ABCMeta from six import with_metaclass from eventsourcing.infrastructure.event_store import EventStore from eventsourcing.infrastructure.persistence_subscriber import PersistenceSubscriber class EventSourcingApplication(with_metaclass(ABCMeta)): persist_events = True def __init__(self, json_encoder_cls=None, json_decoder_cls=None, cipher=None, always_encrypt_stored_events=False): self.stored_event_repo = self.create_stored_event_repo(json_encoder_cls=json_encoder_cls, json_decoder_cls=json_decoder_cls, cipher=cipher, always_encrypt=always_encrypt_stored_events) self.event_store = self.create_event_store() if self.persist_events: self.persistence_subscriber = self.create_persistence_subscriber() else: self.persistence_subscriber = None @abstractmethod def create_stored_event_repo(self, **kwargs): """Returns an instance of a subclass of StoredEventRepository. :rtype: StoredEventRepository """ def create_event_store(self): return EventStore(self.stored_event_repo) def create_persistence_subscriber(self): return PersistenceSubscriber(self.event_store) def close(self): if self.persistence_subscriber: self.persistence_subscriber.close() self.stored_event_repo = None self.event_store = None self.persistence_subscriber = None def __enter__(self): return self def __exit__(self, *_): self.close()
<commit_before>from abc import abstractmethod, ABCMeta from six import with_metaclass from eventsourcing.infrastructure.event_store import EventStore from eventsourcing.infrastructure.persistence_subscriber import PersistenceSubscriber class EventSourcingApplication(with_metaclass(ABCMeta)): def __init__(self, json_encoder_cls=None, json_decoder_cls=None, cipher=None, always_encrypt_stored_events=False): self.stored_event_repo = self.create_stored_event_repo(json_encoder_cls=json_encoder_cls, json_decoder_cls=json_decoder_cls, cipher=cipher, always_encrypt=always_encrypt_stored_events) self.event_store = self.create_event_store() self.persistence_subscriber = self.create_persistence_subscriber() @abstractmethod def create_stored_event_repo(self, **kwargs): """Returns an instance of a subclass of StoredEventRepository. :rtype: StoredEventRepository """ def create_event_store(self): return EventStore(self.stored_event_repo) def create_persistence_subscriber(self): return PersistenceSubscriber(self.event_store) def close(self): self.persistence_subscriber.close() self.stored_event_repo = None self.event_store = None self.persistence_subscriber = None def __enter__(self): return self def __exit__(self, *_): self.close() <commit_msg>Allow to disable events persistence at app class<commit_after>from abc import abstractmethod, ABCMeta from six import with_metaclass from eventsourcing.infrastructure.event_store import EventStore from eventsourcing.infrastructure.persistence_subscriber import PersistenceSubscriber class EventSourcingApplication(with_metaclass(ABCMeta)): persist_events = True def __init__(self, json_encoder_cls=None, json_decoder_cls=None, cipher=None, always_encrypt_stored_events=False): self.stored_event_repo = self.create_stored_event_repo(json_encoder_cls=json_encoder_cls, json_decoder_cls=json_decoder_cls, cipher=cipher, always_encrypt=always_encrypt_stored_events) self.event_store = self.create_event_store() if self.persist_events: self.persistence_subscriber = self.create_persistence_subscriber() else: self.persistence_subscriber = None @abstractmethod def create_stored_event_repo(self, **kwargs): """Returns an instance of a subclass of StoredEventRepository. :rtype: StoredEventRepository """ def create_event_store(self): return EventStore(self.stored_event_repo) def create_persistence_subscriber(self): return PersistenceSubscriber(self.event_store) def close(self): if self.persistence_subscriber: self.persistence_subscriber.close() self.stored_event_repo = None self.event_store = None self.persistence_subscriber = None def __enter__(self): return self def __exit__(self, *_): self.close()
bfaf081b0e3c3cb8a37270ca7c0a16d52795a3de
kozmic/auth/views.py
kozmic/auth/views.py
import github3 from flask import render_template, current_app, redirect, url_for from flask.ext.login import login_user, logout_user, login_required from kozmic import db from kozmic.models import User from . import bp @bp.route('/_auth/auth-callback/') @bp.github_oauth_app.authorized_handler def auth_callback(response): access_token = response['access_token'] gh = github3.login(token=access_token) gh_user = gh.user() user = (User.query.filter_by(gh_login=gh_user.login).first() or User(gh_id=gh_user.id, gh_name=gh_user.name, gh_login=gh_user.login, gh_avatar_url=gh_user.avatar_url, gh_access_token=access_token, email=gh_user.email)) db.session.add(user) db.session.commit() login_user(user, remember=True) return redirect(url_for('projects.index')) @bp.route('/_auth/') def auth(): callback_url = url_for('.auth_callback', _external=True) return bp.github_oauth_app.authorize(callback=callback_url) @bp.route('/login/') def login(): return render_template('auth/login.html') @bp.route('/logout/') @login_required def logout(): logout_user() return redirect(url_for('index'))
import github3 from flask import render_template, current_app, redirect, url_for from flask.ext.login import login_user, logout_user, login_required from kozmic import db from kozmic.models import User from . import bp @bp.route('/_auth/auth-callback/') @bp.github_oauth_app.authorized_handler def auth_callback(response): access_token = response['access_token'] gh = github3.login(token=access_token) gh_user = gh.user() user = (User.query.filter_by(gh_login=gh_user.login).first() or User(gh_id=gh_user.id, gh_name=gh_user.name, gh_login=gh_user.login, gh_avatar_url=gh_user.avatar_url, email=gh_user.email)) user.gh_access_token = access_token db.session.add(user) db.session.commit() login_user(user, remember=True) return redirect(url_for('projects.index')) @bp.route('/_auth/') def auth(): callback_url = url_for('.auth_callback', _external=True) return bp.github_oauth_app.authorize(callback=callback_url) @bp.route('/login/') def login(): return render_template('auth/login.html') @bp.route('/logout/') @login_required def logout(): logout_user() return redirect(url_for('index'))
Update GitHub access token on each login
Update GitHub access token on each login
Python
bsd-3-clause
aromanovich/kozmic-ci,abak-press/kozmic-ci,abak-press/kozmic-ci,abak-press/kozmic-ci,aromanovich/kozmic-ci,aromanovich/kozmic-ci,artofhuman/kozmic-ci,abak-press/kozmic-ci,aromanovich/kozmic-ci,artofhuman/kozmic-ci,artofhuman/kozmic-ci,artofhuman/kozmic-ci
import github3 from flask import render_template, current_app, redirect, url_for from flask.ext.login import login_user, logout_user, login_required from kozmic import db from kozmic.models import User from . import bp @bp.route('/_auth/auth-callback/') @bp.github_oauth_app.authorized_handler def auth_callback(response): access_token = response['access_token'] gh = github3.login(token=access_token) gh_user = gh.user() user = (User.query.filter_by(gh_login=gh_user.login).first() or User(gh_id=gh_user.id, gh_name=gh_user.name, gh_login=gh_user.login, gh_avatar_url=gh_user.avatar_url, gh_access_token=access_token, email=gh_user.email)) db.session.add(user) db.session.commit() login_user(user, remember=True) return redirect(url_for('projects.index')) @bp.route('/_auth/') def auth(): callback_url = url_for('.auth_callback', _external=True) return bp.github_oauth_app.authorize(callback=callback_url) @bp.route('/login/') def login(): return render_template('auth/login.html') @bp.route('/logout/') @login_required def logout(): logout_user() return redirect(url_for('index')) Update GitHub access token on each login
import github3 from flask import render_template, current_app, redirect, url_for from flask.ext.login import login_user, logout_user, login_required from kozmic import db from kozmic.models import User from . import bp @bp.route('/_auth/auth-callback/') @bp.github_oauth_app.authorized_handler def auth_callback(response): access_token = response['access_token'] gh = github3.login(token=access_token) gh_user = gh.user() user = (User.query.filter_by(gh_login=gh_user.login).first() or User(gh_id=gh_user.id, gh_name=gh_user.name, gh_login=gh_user.login, gh_avatar_url=gh_user.avatar_url, email=gh_user.email)) user.gh_access_token = access_token db.session.add(user) db.session.commit() login_user(user, remember=True) return redirect(url_for('projects.index')) @bp.route('/_auth/') def auth(): callback_url = url_for('.auth_callback', _external=True) return bp.github_oauth_app.authorize(callback=callback_url) @bp.route('/login/') def login(): return render_template('auth/login.html') @bp.route('/logout/') @login_required def logout(): logout_user() return redirect(url_for('index'))
<commit_before>import github3 from flask import render_template, current_app, redirect, url_for from flask.ext.login import login_user, logout_user, login_required from kozmic import db from kozmic.models import User from . import bp @bp.route('/_auth/auth-callback/') @bp.github_oauth_app.authorized_handler def auth_callback(response): access_token = response['access_token'] gh = github3.login(token=access_token) gh_user = gh.user() user = (User.query.filter_by(gh_login=gh_user.login).first() or User(gh_id=gh_user.id, gh_name=gh_user.name, gh_login=gh_user.login, gh_avatar_url=gh_user.avatar_url, gh_access_token=access_token, email=gh_user.email)) db.session.add(user) db.session.commit() login_user(user, remember=True) return redirect(url_for('projects.index')) @bp.route('/_auth/') def auth(): callback_url = url_for('.auth_callback', _external=True) return bp.github_oauth_app.authorize(callback=callback_url) @bp.route('/login/') def login(): return render_template('auth/login.html') @bp.route('/logout/') @login_required def logout(): logout_user() return redirect(url_for('index')) <commit_msg>Update GitHub access token on each login<commit_after>
import github3 from flask import render_template, current_app, redirect, url_for from flask.ext.login import login_user, logout_user, login_required from kozmic import db from kozmic.models import User from . import bp @bp.route('/_auth/auth-callback/') @bp.github_oauth_app.authorized_handler def auth_callback(response): access_token = response['access_token'] gh = github3.login(token=access_token) gh_user = gh.user() user = (User.query.filter_by(gh_login=gh_user.login).first() or User(gh_id=gh_user.id, gh_name=gh_user.name, gh_login=gh_user.login, gh_avatar_url=gh_user.avatar_url, email=gh_user.email)) user.gh_access_token = access_token db.session.add(user) db.session.commit() login_user(user, remember=True) return redirect(url_for('projects.index')) @bp.route('/_auth/') def auth(): callback_url = url_for('.auth_callback', _external=True) return bp.github_oauth_app.authorize(callback=callback_url) @bp.route('/login/') def login(): return render_template('auth/login.html') @bp.route('/logout/') @login_required def logout(): logout_user() return redirect(url_for('index'))
import github3 from flask import render_template, current_app, redirect, url_for from flask.ext.login import login_user, logout_user, login_required from kozmic import db from kozmic.models import User from . import bp @bp.route('/_auth/auth-callback/') @bp.github_oauth_app.authorized_handler def auth_callback(response): access_token = response['access_token'] gh = github3.login(token=access_token) gh_user = gh.user() user = (User.query.filter_by(gh_login=gh_user.login).first() or User(gh_id=gh_user.id, gh_name=gh_user.name, gh_login=gh_user.login, gh_avatar_url=gh_user.avatar_url, gh_access_token=access_token, email=gh_user.email)) db.session.add(user) db.session.commit() login_user(user, remember=True) return redirect(url_for('projects.index')) @bp.route('/_auth/') def auth(): callback_url = url_for('.auth_callback', _external=True) return bp.github_oauth_app.authorize(callback=callback_url) @bp.route('/login/') def login(): return render_template('auth/login.html') @bp.route('/logout/') @login_required def logout(): logout_user() return redirect(url_for('index')) Update GitHub access token on each loginimport github3 from flask import render_template, current_app, redirect, url_for from flask.ext.login import login_user, logout_user, login_required from kozmic import db from kozmic.models import User from . import bp @bp.route('/_auth/auth-callback/') @bp.github_oauth_app.authorized_handler def auth_callback(response): access_token = response['access_token'] gh = github3.login(token=access_token) gh_user = gh.user() user = (User.query.filter_by(gh_login=gh_user.login).first() or User(gh_id=gh_user.id, gh_name=gh_user.name, gh_login=gh_user.login, gh_avatar_url=gh_user.avatar_url, email=gh_user.email)) user.gh_access_token = access_token db.session.add(user) db.session.commit() login_user(user, remember=True) return redirect(url_for('projects.index')) @bp.route('/_auth/') def auth(): callback_url = url_for('.auth_callback', _external=True) return bp.github_oauth_app.authorize(callback=callback_url) @bp.route('/login/') def login(): return render_template('auth/login.html') @bp.route('/logout/') @login_required def logout(): logout_user() return redirect(url_for('index'))
<commit_before>import github3 from flask import render_template, current_app, redirect, url_for from flask.ext.login import login_user, logout_user, login_required from kozmic import db from kozmic.models import User from . import bp @bp.route('/_auth/auth-callback/') @bp.github_oauth_app.authorized_handler def auth_callback(response): access_token = response['access_token'] gh = github3.login(token=access_token) gh_user = gh.user() user = (User.query.filter_by(gh_login=gh_user.login).first() or User(gh_id=gh_user.id, gh_name=gh_user.name, gh_login=gh_user.login, gh_avatar_url=gh_user.avatar_url, gh_access_token=access_token, email=gh_user.email)) db.session.add(user) db.session.commit() login_user(user, remember=True) return redirect(url_for('projects.index')) @bp.route('/_auth/') def auth(): callback_url = url_for('.auth_callback', _external=True) return bp.github_oauth_app.authorize(callback=callback_url) @bp.route('/login/') def login(): return render_template('auth/login.html') @bp.route('/logout/') @login_required def logout(): logout_user() return redirect(url_for('index')) <commit_msg>Update GitHub access token on each login<commit_after>import github3 from flask import render_template, current_app, redirect, url_for from flask.ext.login import login_user, logout_user, login_required from kozmic import db from kozmic.models import User from . import bp @bp.route('/_auth/auth-callback/') @bp.github_oauth_app.authorized_handler def auth_callback(response): access_token = response['access_token'] gh = github3.login(token=access_token) gh_user = gh.user() user = (User.query.filter_by(gh_login=gh_user.login).first() or User(gh_id=gh_user.id, gh_name=gh_user.name, gh_login=gh_user.login, gh_avatar_url=gh_user.avatar_url, email=gh_user.email)) user.gh_access_token = access_token db.session.add(user) db.session.commit() login_user(user, remember=True) return redirect(url_for('projects.index')) @bp.route('/_auth/') def auth(): callback_url = url_for('.auth_callback', _external=True) return bp.github_oauth_app.authorize(callback=callback_url) @bp.route('/login/') def login(): return render_template('auth/login.html') @bp.route('/logout/') @login_required def logout(): logout_user() return redirect(url_for('index'))
0d93a0dff18165c36788a140af40208ec48d505f
prep.py
prep.py
from os import listdir from os.path import join def file_paths(data_path): return [join(data_path, name) for name in listdir(data_path)] def training_data(data_path): paths = file_paths(data_path) raw_text = [ open(path, 'r').read() for path in paths] dataX = [] dataY = [] for text in raw_text: data = split_data(text) dataX.append(data[0]) dataY.append(data[1]) return dataX, dataY # split inputs and outputs from data def split_data(text): lines = text.split('\n') # first line without first character (#) input_text = lines.pop(0)[1:] # the rest of the text output_text = '\n'.join(lines) return input_text, output_text
from os import listdir from os.path import join import re def file_paths(data_path): return [join(data_path, name) for name in listdir(data_path)] def training_data(data_path): paths = file_paths(data_path) raw_text = [open(path, 'r').read() for path in paths] dataX = [] dataY = [] for text in raw_text: data = split_data(text) dataX.append(split_to_words(data[0])) dataY.append(split_to_words(data[1])) return dataX, dataY # split inputs and outputs from data def split_data(text): lines = text.split('\n') # first line without first character (#) input_text = lines.pop(0)[1:] # the rest of the text output_text = '\n'.join(lines) return input_text, output_text def split_to_words(sentence): return re.findall(r"[\w']+|[.,!?;:()/\[\]]/", sentence)
Transform sentences to arrays of words
Transform sentences to arrays of words
Python
mit
vdragan1993/python-coder
from os import listdir from os.path import join def file_paths(data_path): return [join(data_path, name) for name in listdir(data_path)] def training_data(data_path): paths = file_paths(data_path) raw_text = [ open(path, 'r').read() for path in paths] dataX = [] dataY = [] for text in raw_text: data = split_data(text) dataX.append(data[0]) dataY.append(data[1]) return dataX, dataY # split inputs and outputs from data def split_data(text): lines = text.split('\n') # first line without first character (#) input_text = lines.pop(0)[1:] # the rest of the text output_text = '\n'.join(lines) return input_text, output_text Transform sentences to arrays of words
from os import listdir from os.path import join import re def file_paths(data_path): return [join(data_path, name) for name in listdir(data_path)] def training_data(data_path): paths = file_paths(data_path) raw_text = [open(path, 'r').read() for path in paths] dataX = [] dataY = [] for text in raw_text: data = split_data(text) dataX.append(split_to_words(data[0])) dataY.append(split_to_words(data[1])) return dataX, dataY # split inputs and outputs from data def split_data(text): lines = text.split('\n') # first line without first character (#) input_text = lines.pop(0)[1:] # the rest of the text output_text = '\n'.join(lines) return input_text, output_text def split_to_words(sentence): return re.findall(r"[\w']+|[.,!?;:()/\[\]]/", sentence)
<commit_before>from os import listdir from os.path import join def file_paths(data_path): return [join(data_path, name) for name in listdir(data_path)] def training_data(data_path): paths = file_paths(data_path) raw_text = [ open(path, 'r').read() for path in paths] dataX = [] dataY = [] for text in raw_text: data = split_data(text) dataX.append(data[0]) dataY.append(data[1]) return dataX, dataY # split inputs and outputs from data def split_data(text): lines = text.split('\n') # first line without first character (#) input_text = lines.pop(0)[1:] # the rest of the text output_text = '\n'.join(lines) return input_text, output_text <commit_msg>Transform sentences to arrays of words<commit_after>
from os import listdir from os.path import join import re def file_paths(data_path): return [join(data_path, name) for name in listdir(data_path)] def training_data(data_path): paths = file_paths(data_path) raw_text = [open(path, 'r').read() for path in paths] dataX = [] dataY = [] for text in raw_text: data = split_data(text) dataX.append(split_to_words(data[0])) dataY.append(split_to_words(data[1])) return dataX, dataY # split inputs and outputs from data def split_data(text): lines = text.split('\n') # first line without first character (#) input_text = lines.pop(0)[1:] # the rest of the text output_text = '\n'.join(lines) return input_text, output_text def split_to_words(sentence): return re.findall(r"[\w']+|[.,!?;:()/\[\]]/", sentence)
from os import listdir from os.path import join def file_paths(data_path): return [join(data_path, name) for name in listdir(data_path)] def training_data(data_path): paths = file_paths(data_path) raw_text = [ open(path, 'r').read() for path in paths] dataX = [] dataY = [] for text in raw_text: data = split_data(text) dataX.append(data[0]) dataY.append(data[1]) return dataX, dataY # split inputs and outputs from data def split_data(text): lines = text.split('\n') # first line without first character (#) input_text = lines.pop(0)[1:] # the rest of the text output_text = '\n'.join(lines) return input_text, output_text Transform sentences to arrays of wordsfrom os import listdir from os.path import join import re def file_paths(data_path): return [join(data_path, name) for name in listdir(data_path)] def training_data(data_path): paths = file_paths(data_path) raw_text = [open(path, 'r').read() for path in paths] dataX = [] dataY = [] for text in raw_text: data = split_data(text) dataX.append(split_to_words(data[0])) dataY.append(split_to_words(data[1])) return dataX, dataY # split inputs and outputs from data def split_data(text): lines = text.split('\n') # first line without first character (#) input_text = lines.pop(0)[1:] # the rest of the text output_text = '\n'.join(lines) return input_text, output_text def split_to_words(sentence): return re.findall(r"[\w']+|[.,!?;:()/\[\]]/", sentence)
<commit_before>from os import listdir from os.path import join def file_paths(data_path): return [join(data_path, name) for name in listdir(data_path)] def training_data(data_path): paths = file_paths(data_path) raw_text = [ open(path, 'r').read() for path in paths] dataX = [] dataY = [] for text in raw_text: data = split_data(text) dataX.append(data[0]) dataY.append(data[1]) return dataX, dataY # split inputs and outputs from data def split_data(text): lines = text.split('\n') # first line without first character (#) input_text = lines.pop(0)[1:] # the rest of the text output_text = '\n'.join(lines) return input_text, output_text <commit_msg>Transform sentences to arrays of words<commit_after>from os import listdir from os.path import join import re def file_paths(data_path): return [join(data_path, name) for name in listdir(data_path)] def training_data(data_path): paths = file_paths(data_path) raw_text = [open(path, 'r').read() for path in paths] dataX = [] dataY = [] for text in raw_text: data = split_data(text) dataX.append(split_to_words(data[0])) dataY.append(split_to_words(data[1])) return dataX, dataY # split inputs and outputs from data def split_data(text): lines = text.split('\n') # first line without first character (#) input_text = lines.pop(0)[1:] # the rest of the text output_text = '\n'.join(lines) return input_text, output_text def split_to_words(sentence): return re.findall(r"[\w']+|[.,!?;:()/\[\]]/", sentence)
4d01eb0c1b11680d463d4fcb0888fac4ab6c45c8
panoptes/utils/data.py
panoptes/utils/data.py
import os import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: os.rename(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
import os import shutil import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: shutil.move(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
Use shutil instead of `os.rename`
Use shutil instead of `os.rename`
Python
mit
panoptes/POCS,AstroHuntsman/POCS,joshwalawender/POCS,AstroHuntsman/POCS,joshwalawender/POCS,AstroHuntsman/POCS,panoptes/POCS,AstroHuntsman/POCS,panoptes/POCS,joshwalawender/POCS,panoptes/POCS
import os import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: os.rename(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder) Use shutil instead of `os.rename`
import os import shutil import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: shutil.move(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
<commit_before>import os import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: os.rename(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder) <commit_msg>Use shutil instead of `os.rename`<commit_after>
import os import shutil import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: shutil.move(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
import os import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: os.rename(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder) Use shutil instead of `os.rename`import os import shutil import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: shutil.move(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
<commit_before>import os import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: os.rename(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder) <commit_msg>Use shutil instead of `os.rename`<commit_after>import os import shutil import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: shutil.move(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
8556437ee02de028ec5de3b867abaab82533cb91
keystone/tests/unit/common/test_manager.py
keystone/tests/unit/common/test_manager.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': 'N', } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertIsInstance(impl, catalog.CatalogDriverV8)
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': mock.ANY, } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) self.assertIsInstance(impl, catalog.CatalogDriverV8)
Correct test to support changing N release name
Correct test to support changing N release name oslo.log is going to change to use Newton rather than N so this test should not make an assumption about the way that versionutils.deprecated is calling report_deprecated_feature. Change-Id: I06aa6d085232376811f73597b2d84b5174bc7a8d Closes-Bug: 1561121
Python
apache-2.0
ilay09/keystone,rajalokan/keystone,openstack/keystone,mahak/keystone,klmitch/keystone,openstack/keystone,ilay09/keystone,cernops/keystone,rajalokan/keystone,cernops/keystone,mahak/keystone,ilay09/keystone,rajalokan/keystone,klmitch/keystone,mahak/keystone,openstack/keystone
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': 'N', } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertIsInstance(impl, catalog.CatalogDriverV8) Correct test to support changing N release name oslo.log is going to change to use Newton rather than N so this test should not make an assumption about the way that versionutils.deprecated is calling report_deprecated_feature. Change-Id: I06aa6d085232376811f73597b2d84b5174bc7a8d Closes-Bug: 1561121
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': mock.ANY, } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) self.assertIsInstance(impl, catalog.CatalogDriverV8)
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': 'N', } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertIsInstance(impl, catalog.CatalogDriverV8) <commit_msg>Correct test to support changing N release name oslo.log is going to change to use Newton rather than N so this test should not make an assumption about the way that versionutils.deprecated is calling report_deprecated_feature. Change-Id: I06aa6d085232376811f73597b2d84b5174bc7a8d Closes-Bug: 1561121<commit_after>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': mock.ANY, } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) self.assertIsInstance(impl, catalog.CatalogDriverV8)
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': 'N', } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertIsInstance(impl, catalog.CatalogDriverV8) Correct test to support changing N release name oslo.log is going to change to use Newton rather than N so this test should not make an assumption about the way that versionutils.deprecated is calling report_deprecated_feature. Change-Id: I06aa6d085232376811f73597b2d84b5174bc7a8d Closes-Bug: 1561121# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': mock.ANY, } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) self.assertIsInstance(impl, catalog.CatalogDriverV8)
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': 'N', } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertIsInstance(impl, catalog.CatalogDriverV8) <commit_msg>Correct test to support changing N release name oslo.log is going to change to use Newton rather than N so this test should not make an assumption about the way that versionutils.deprecated is calling report_deprecated_feature. Change-Id: I06aa6d085232376811f73597b2d84b5174bc7a8d Closes-Bug: 1561121<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': mock.ANY, } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) self.assertIsInstance(impl, catalog.CatalogDriverV8)
014f7255ea62c748e0935bbb36e279a35626df38
kokki/cookbooks/ssh/libraries/resources.py
kokki/cookbooks/ssh/libraries/resources.py
__all__ = ["SSHKnownHost", "SSHAuthorizedKey"] import os.path from kokki import * class SSHKnownHost(Resource): provider = "*ssh.SSHKnownHostProvider" action = ForcedListArgument(default="include") host = ResourceArgument(default=lambda obj:obj.name) keytype = ResourceArgument() key = ResourceArgument() hashed = BooleanArgument(default=True) user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "known_hosts") class SSHAuthorizedKey(Resource): provider = "*ssh.SSHAuthorizedKeyProvider" action = ForcedListArgument(default="include") keytype = ResourceArgument() key = ResourceArgument() user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "authorized_keys")
__all__ = ["SSHKnownHost", "SSHAuthorizedKey"] import os.path from kokki import * class SSHKnownHost(Resource): provider = "*ssh.SSHKnownHostProvider" action = ForcedListArgument(default="include") host = ResourceArgument(default=lambda obj:obj.name) keytype = ResourceArgument() key = ResourceArgument() hashed = BooleanArgument(default=True) user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "known_hosts") Directory(os.path.dirname(self.path), owner = self.user, mode = 0700) class SSHAuthorizedKey(Resource): provider = "*ssh.SSHAuthorizedKeyProvider" action = ForcedListArgument(default="include") keytype = ResourceArgument() key = ResourceArgument() user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "authorized_keys") Directory(os.path.dirname(self.path), owner = self.user, mode = 0700)
Make sure ssh config directory exists
Make sure ssh config directory exists
Python
bsd-3-clause
samuel/kokki
__all__ = ["SSHKnownHost", "SSHAuthorizedKey"] import os.path from kokki import * class SSHKnownHost(Resource): provider = "*ssh.SSHKnownHostProvider" action = ForcedListArgument(default="include") host = ResourceArgument(default=lambda obj:obj.name) keytype = ResourceArgument() key = ResourceArgument() hashed = BooleanArgument(default=True) user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "known_hosts") class SSHAuthorizedKey(Resource): provider = "*ssh.SSHAuthorizedKeyProvider" action = ForcedListArgument(default="include") keytype = ResourceArgument() key = ResourceArgument() user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "authorized_keys") Make sure ssh config directory exists
__all__ = ["SSHKnownHost", "SSHAuthorizedKey"] import os.path from kokki import * class SSHKnownHost(Resource): provider = "*ssh.SSHKnownHostProvider" action = ForcedListArgument(default="include") host = ResourceArgument(default=lambda obj:obj.name) keytype = ResourceArgument() key = ResourceArgument() hashed = BooleanArgument(default=True) user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "known_hosts") Directory(os.path.dirname(self.path), owner = self.user, mode = 0700) class SSHAuthorizedKey(Resource): provider = "*ssh.SSHAuthorizedKeyProvider" action = ForcedListArgument(default="include") keytype = ResourceArgument() key = ResourceArgument() user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "authorized_keys") Directory(os.path.dirname(self.path), owner = self.user, mode = 0700)
<commit_before> __all__ = ["SSHKnownHost", "SSHAuthorizedKey"] import os.path from kokki import * class SSHKnownHost(Resource): provider = "*ssh.SSHKnownHostProvider" action = ForcedListArgument(default="include") host = ResourceArgument(default=lambda obj:obj.name) keytype = ResourceArgument() key = ResourceArgument() hashed = BooleanArgument(default=True) user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "known_hosts") class SSHAuthorizedKey(Resource): provider = "*ssh.SSHAuthorizedKeyProvider" action = ForcedListArgument(default="include") keytype = ResourceArgument() key = ResourceArgument() user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "authorized_keys") <commit_msg>Make sure ssh config directory exists<commit_after>
__all__ = ["SSHKnownHost", "SSHAuthorizedKey"] import os.path from kokki import * class SSHKnownHost(Resource): provider = "*ssh.SSHKnownHostProvider" action = ForcedListArgument(default="include") host = ResourceArgument(default=lambda obj:obj.name) keytype = ResourceArgument() key = ResourceArgument() hashed = BooleanArgument(default=True) user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "known_hosts") Directory(os.path.dirname(self.path), owner = self.user, mode = 0700) class SSHAuthorizedKey(Resource): provider = "*ssh.SSHAuthorizedKeyProvider" action = ForcedListArgument(default="include") keytype = ResourceArgument() key = ResourceArgument() user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "authorized_keys") Directory(os.path.dirname(self.path), owner = self.user, mode = 0700)
__all__ = ["SSHKnownHost", "SSHAuthorizedKey"] import os.path from kokki import * class SSHKnownHost(Resource): provider = "*ssh.SSHKnownHostProvider" action = ForcedListArgument(default="include") host = ResourceArgument(default=lambda obj:obj.name) keytype = ResourceArgument() key = ResourceArgument() hashed = BooleanArgument(default=True) user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "known_hosts") class SSHAuthorizedKey(Resource): provider = "*ssh.SSHAuthorizedKeyProvider" action = ForcedListArgument(default="include") keytype = ResourceArgument() key = ResourceArgument() user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "authorized_keys") Make sure ssh config directory exists __all__ = ["SSHKnownHost", "SSHAuthorizedKey"] import os.path from kokki import * class SSHKnownHost(Resource): provider = "*ssh.SSHKnownHostProvider" action = ForcedListArgument(default="include") host = ResourceArgument(default=lambda obj:obj.name) keytype = ResourceArgument() key = ResourceArgument() hashed = BooleanArgument(default=True) user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "known_hosts") Directory(os.path.dirname(self.path), owner = self.user, mode = 0700) class SSHAuthorizedKey(Resource): provider = "*ssh.SSHAuthorizedKeyProvider" action = ForcedListArgument(default="include") keytype = ResourceArgument() key = ResourceArgument() user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "authorized_keys") Directory(os.path.dirname(self.path), owner = self.user, mode = 0700)
<commit_before> __all__ = ["SSHKnownHost", "SSHAuthorizedKey"] import os.path from kokki import * class SSHKnownHost(Resource): provider = "*ssh.SSHKnownHostProvider" action = ForcedListArgument(default="include") host = ResourceArgument(default=lambda obj:obj.name) keytype = ResourceArgument() key = ResourceArgument() hashed = BooleanArgument(default=True) user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "known_hosts") class SSHAuthorizedKey(Resource): provider = "*ssh.SSHAuthorizedKeyProvider" action = ForcedListArgument(default="include") keytype = ResourceArgument() key = ResourceArgument() user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "authorized_keys") <commit_msg>Make sure ssh config directory exists<commit_after> __all__ = ["SSHKnownHost", "SSHAuthorizedKey"] import os.path from kokki import * class SSHKnownHost(Resource): provider = "*ssh.SSHKnownHostProvider" action = ForcedListArgument(default="include") host = ResourceArgument(default=lambda obj:obj.name) keytype = ResourceArgument() key = ResourceArgument() hashed = BooleanArgument(default=True) user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "known_hosts") Directory(os.path.dirname(self.path), owner = self.user, mode = 0700) class SSHAuthorizedKey(Resource): provider = "*ssh.SSHAuthorizedKeyProvider" action = ForcedListArgument(default="include") keytype = ResourceArgument() key = ResourceArgument() user = ResourceArgument() path = ResourceArgument() actions = Resource.actions + ["include", "exclude"] def validate(self): if not self.path: if not self.user: raise Fail("[%s] Either path or user is required" % self) self.path = os.path.join(ssh_path_for_user(self.user), "authorized_keys") Directory(os.path.dirname(self.path), owner = self.user, mode = 0700)
70842a821d713525e1fe3c6376a30fcc0a39155c
zoe_lib/predefined_apps/__init__.py
zoe_lib/predefined_apps/__init__.py
# Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ]
# Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ]
Fix import error due to wrong import line
Fix import error due to wrong import line
Python
apache-2.0
DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe
# Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ] Fix import error due to wrong import line
# Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ]
<commit_before># Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ] <commit_msg>Fix import error due to wrong import line<commit_after>
# Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ]
# Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ] Fix import error due to wrong import line# Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ]
<commit_before># Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_frameworks.jupyter_spark import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ] <commit_msg>Fix import error due to wrong import line<commit_after># Copyright (c) 2016, Daniele Venzano # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from zoe_lib.predefined_apps.copier import copier_app from zoe_lib.predefined_apps.spark_interactive import spark_jupyter_notebook_app from zoe_lib.predefined_apps.eurecom_aml_lab import spark_jupyter_notebook_lab_app from zoe_lib.predefined_apps.hdfs import hdfs_app from zoe_lib.predefined_apps.openmpi import openmpi_app from zoe_lib.predefined_apps.spark_submit import spark_submit_app from zoe_lib.predefined_apps.test_sleep import sleeper_app PREDEFINED_APPS = [ copier_app, spark_jupyter_notebook_app, spark_jupyter_notebook_lab_app, hdfs_app, openmpi_app, spark_submit_app, sleeper_app ]
fa16e93e4d00db3ef68f9de16f5c1eb28988dc18
apps/local_apps/account/context_processors.py
apps/local_apps/account/context_processors.py
from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except (Account.DoesNotExist, Account.MultipleObjectsReturned): account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account}
from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except Account.DoesNotExist: account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account}
Throw 500 error on multiple accounts in account context processor
Throw 500 error on multiple accounts in account context processor
Python
mit
ingenieroariel/pinax,ingenieroariel/pinax
from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except (Account.DoesNotExist, Account.MultipleObjectsReturned): account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account} Throw 500 error on multiple accounts in account context processor
from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except Account.DoesNotExist: account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account}
<commit_before> from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except (Account.DoesNotExist, Account.MultipleObjectsReturned): account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account} <commit_msg>Throw 500 error on multiple accounts in account context processor<commit_after>
from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except Account.DoesNotExist: account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account}
from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except (Account.DoesNotExist, Account.MultipleObjectsReturned): account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account} Throw 500 error on multiple accounts in account context processor from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except Account.DoesNotExist: account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account}
<commit_before> from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except (Account.DoesNotExist, Account.MultipleObjectsReturned): account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account} <commit_msg>Throw 500 error on multiple accounts in account context processor<commit_after> from account.models import Account, AnonymousAccount def openid(request): return {'openid': request.openid} def account(request): if request.user.is_authenticated(): try: account = Account._default_manager.get(user=request.user) except Account.DoesNotExist: account = AnonymousAccount(request) else: account = AnonymousAccount(request) return {'account': account}
52a3a7b2a6aac284b9dd1a7edfb27cdec4d33675
lib/pyfrc/test_support/pyfrc_fake_hooks.py
lib/pyfrc/test_support/pyfrc_fake_hooks.py
from hal_impl.data import hal_data class PyFrcFakeHooks: ''' Defines hal hooks that use the fake time object ''' def __init__(self, fake_time): self.fake_time = fake_time # # Hook functions # def getTime(self): return self.fake_time.get() def getFPGATime(self): return int((self.fake_time.get() - hal_data['time']['program_start']) * 1000000) def delayMillis(self, ms): self.fake_time.increment_time_by(.001 * ms) def delaySeconds(self, s): self.fake_time.increment_time_by(s) def initializeDriverStation(self): pass @property def ds_cond(self): return self.fake_time.ds_cond @ds_cond.setter def ds_cond(self, value): pass # ignored
from hal_impl.sim_hooks import SimHooks class PyFrcFakeHooks(SimHooks): ''' Defines hal hooks that use the fake time object ''' def __init__(self, fake_time): self.fake_time = fake_time super().__init__() # # Time related hooks # def getTime(self): return self.fake_time.get() def delayMillis(self, ms): self.fake_time.increment_time_by(.001 * ms) def delaySeconds(self, s): self.fake_time.increment_time_by(s) # # DriverStation related hooks # @property def ds_cond(self): return self.fake_time.ds_cond @ds_cond.setter def ds_cond(self, value): pass # ignored
Update sim hooks for 2018
Update sim hooks for 2018
Python
mit
robotpy/pyfrc
from hal_impl.data import hal_data class PyFrcFakeHooks: ''' Defines hal hooks that use the fake time object ''' def __init__(self, fake_time): self.fake_time = fake_time # # Hook functions # def getTime(self): return self.fake_time.get() def getFPGATime(self): return int((self.fake_time.get() - hal_data['time']['program_start']) * 1000000) def delayMillis(self, ms): self.fake_time.increment_time_by(.001 * ms) def delaySeconds(self, s): self.fake_time.increment_time_by(s) def initializeDriverStation(self): pass @property def ds_cond(self): return self.fake_time.ds_cond @ds_cond.setter def ds_cond(self, value): pass # ignored Update sim hooks for 2018
from hal_impl.sim_hooks import SimHooks class PyFrcFakeHooks(SimHooks): ''' Defines hal hooks that use the fake time object ''' def __init__(self, fake_time): self.fake_time = fake_time super().__init__() # # Time related hooks # def getTime(self): return self.fake_time.get() def delayMillis(self, ms): self.fake_time.increment_time_by(.001 * ms) def delaySeconds(self, s): self.fake_time.increment_time_by(s) # # DriverStation related hooks # @property def ds_cond(self): return self.fake_time.ds_cond @ds_cond.setter def ds_cond(self, value): pass # ignored
<commit_before>from hal_impl.data import hal_data class PyFrcFakeHooks: ''' Defines hal hooks that use the fake time object ''' def __init__(self, fake_time): self.fake_time = fake_time # # Hook functions # def getTime(self): return self.fake_time.get() def getFPGATime(self): return int((self.fake_time.get() - hal_data['time']['program_start']) * 1000000) def delayMillis(self, ms): self.fake_time.increment_time_by(.001 * ms) def delaySeconds(self, s): self.fake_time.increment_time_by(s) def initializeDriverStation(self): pass @property def ds_cond(self): return self.fake_time.ds_cond @ds_cond.setter def ds_cond(self, value): pass # ignored <commit_msg>Update sim hooks for 2018<commit_after>
from hal_impl.sim_hooks import SimHooks class PyFrcFakeHooks(SimHooks): ''' Defines hal hooks that use the fake time object ''' def __init__(self, fake_time): self.fake_time = fake_time super().__init__() # # Time related hooks # def getTime(self): return self.fake_time.get() def delayMillis(self, ms): self.fake_time.increment_time_by(.001 * ms) def delaySeconds(self, s): self.fake_time.increment_time_by(s) # # DriverStation related hooks # @property def ds_cond(self): return self.fake_time.ds_cond @ds_cond.setter def ds_cond(self, value): pass # ignored
from hal_impl.data import hal_data class PyFrcFakeHooks: ''' Defines hal hooks that use the fake time object ''' def __init__(self, fake_time): self.fake_time = fake_time # # Hook functions # def getTime(self): return self.fake_time.get() def getFPGATime(self): return int((self.fake_time.get() - hal_data['time']['program_start']) * 1000000) def delayMillis(self, ms): self.fake_time.increment_time_by(.001 * ms) def delaySeconds(self, s): self.fake_time.increment_time_by(s) def initializeDriverStation(self): pass @property def ds_cond(self): return self.fake_time.ds_cond @ds_cond.setter def ds_cond(self, value): pass # ignored Update sim hooks for 2018 from hal_impl.sim_hooks import SimHooks class PyFrcFakeHooks(SimHooks): ''' Defines hal hooks that use the fake time object ''' def __init__(self, fake_time): self.fake_time = fake_time super().__init__() # # Time related hooks # def getTime(self): return self.fake_time.get() def delayMillis(self, ms): self.fake_time.increment_time_by(.001 * ms) def delaySeconds(self, s): self.fake_time.increment_time_by(s) # # DriverStation related hooks # @property def ds_cond(self): return self.fake_time.ds_cond @ds_cond.setter def ds_cond(self, value): pass # ignored
<commit_before>from hal_impl.data import hal_data class PyFrcFakeHooks: ''' Defines hal hooks that use the fake time object ''' def __init__(self, fake_time): self.fake_time = fake_time # # Hook functions # def getTime(self): return self.fake_time.get() def getFPGATime(self): return int((self.fake_time.get() - hal_data['time']['program_start']) * 1000000) def delayMillis(self, ms): self.fake_time.increment_time_by(.001 * ms) def delaySeconds(self, s): self.fake_time.increment_time_by(s) def initializeDriverStation(self): pass @property def ds_cond(self): return self.fake_time.ds_cond @ds_cond.setter def ds_cond(self, value): pass # ignored <commit_msg>Update sim hooks for 2018<commit_after> from hal_impl.sim_hooks import SimHooks class PyFrcFakeHooks(SimHooks): ''' Defines hal hooks that use the fake time object ''' def __init__(self, fake_time): self.fake_time = fake_time super().__init__() # # Time related hooks # def getTime(self): return self.fake_time.get() def delayMillis(self, ms): self.fake_time.increment_time_by(.001 * ms) def delaySeconds(self, s): self.fake_time.increment_time_by(s) # # DriverStation related hooks # @property def ds_cond(self): return self.fake_time.ds_cond @ds_cond.setter def ds_cond(self, value): pass # ignored
32671085ddd8362db14e22d98d4fa5910dd0aa62
ui/tcmui/testexecution/views.py
ui/tcmui/testexecution/views.py
from django.template.response import TemplateResponse from ..products.models import Product from ..static import testcyclestatus from ..users.decorators import login_required from .models import TestCycleList @login_required def cycles(request, product_id): product = Product.get("products/%s" % product_id, auth=request.auth) # @@@ should be auth=request.auth - API permissions broken here? from ..core.api import admin cycles = TestCycleList.get(auth=admin).filter( productId=product_id, testCycleStatusId=testcyclestatus.ACTIVE) return TemplateResponse( request, "test/cycles.html", {"product": product, "cycles": cycles})
from django.template.response import TemplateResponse from ..products.models import Product from ..static import testcyclestatus from ..users.decorators import login_required from .models import TestCycleList @login_required def cycles(request, product_id): product = Product.get("products/%s" % product_id, auth=request.auth) cycles = TestCycleList.get(auth=request.auth).filter( productId=product_id, testCycleStatusId=testcyclestatus.ACTIVE) return TemplateResponse( request, "test/cycles.html", {"product": product, "cycles": cycles})
Use correct auth in cycles view, now that API permissions are fixed.
Use correct auth in cycles view, now that API permissions are fixed.
Python
bsd-2-clause
mccarrmb/moztrap,mccarrmb/moztrap,mccarrmb/moztrap,shinglyu/moztrap,mozilla/moztrap,shinglyu/moztrap,shinglyu/moztrap,mozilla/moztrap,bobsilverberg/moztrap,shinglyu/moztrap,mccarrmb/moztrap,bobsilverberg/moztrap,mozilla/moztrap,shinglyu/moztrap,mccarrmb/moztrap,bobsilverberg/moztrap,mozilla/moztrap,mozilla/moztrap,bobsilverberg/moztrap
from django.template.response import TemplateResponse from ..products.models import Product from ..static import testcyclestatus from ..users.decorators import login_required from .models import TestCycleList @login_required def cycles(request, product_id): product = Product.get("products/%s" % product_id, auth=request.auth) # @@@ should be auth=request.auth - API permissions broken here? from ..core.api import admin cycles = TestCycleList.get(auth=admin).filter( productId=product_id, testCycleStatusId=testcyclestatus.ACTIVE) return TemplateResponse( request, "test/cycles.html", {"product": product, "cycles": cycles}) Use correct auth in cycles view, now that API permissions are fixed.
from django.template.response import TemplateResponse from ..products.models import Product from ..static import testcyclestatus from ..users.decorators import login_required from .models import TestCycleList @login_required def cycles(request, product_id): product = Product.get("products/%s" % product_id, auth=request.auth) cycles = TestCycleList.get(auth=request.auth).filter( productId=product_id, testCycleStatusId=testcyclestatus.ACTIVE) return TemplateResponse( request, "test/cycles.html", {"product": product, "cycles": cycles})
<commit_before>from django.template.response import TemplateResponse from ..products.models import Product from ..static import testcyclestatus from ..users.decorators import login_required from .models import TestCycleList @login_required def cycles(request, product_id): product = Product.get("products/%s" % product_id, auth=request.auth) # @@@ should be auth=request.auth - API permissions broken here? from ..core.api import admin cycles = TestCycleList.get(auth=admin).filter( productId=product_id, testCycleStatusId=testcyclestatus.ACTIVE) return TemplateResponse( request, "test/cycles.html", {"product": product, "cycles": cycles}) <commit_msg>Use correct auth in cycles view, now that API permissions are fixed.<commit_after>
from django.template.response import TemplateResponse from ..products.models import Product from ..static import testcyclestatus from ..users.decorators import login_required from .models import TestCycleList @login_required def cycles(request, product_id): product = Product.get("products/%s" % product_id, auth=request.auth) cycles = TestCycleList.get(auth=request.auth).filter( productId=product_id, testCycleStatusId=testcyclestatus.ACTIVE) return TemplateResponse( request, "test/cycles.html", {"product": product, "cycles": cycles})
from django.template.response import TemplateResponse from ..products.models import Product from ..static import testcyclestatus from ..users.decorators import login_required from .models import TestCycleList @login_required def cycles(request, product_id): product = Product.get("products/%s" % product_id, auth=request.auth) # @@@ should be auth=request.auth - API permissions broken here? from ..core.api import admin cycles = TestCycleList.get(auth=admin).filter( productId=product_id, testCycleStatusId=testcyclestatus.ACTIVE) return TemplateResponse( request, "test/cycles.html", {"product": product, "cycles": cycles}) Use correct auth in cycles view, now that API permissions are fixed.from django.template.response import TemplateResponse from ..products.models import Product from ..static import testcyclestatus from ..users.decorators import login_required from .models import TestCycleList @login_required def cycles(request, product_id): product = Product.get("products/%s" % product_id, auth=request.auth) cycles = TestCycleList.get(auth=request.auth).filter( productId=product_id, testCycleStatusId=testcyclestatus.ACTIVE) return TemplateResponse( request, "test/cycles.html", {"product": product, "cycles": cycles})
<commit_before>from django.template.response import TemplateResponse from ..products.models import Product from ..static import testcyclestatus from ..users.decorators import login_required from .models import TestCycleList @login_required def cycles(request, product_id): product = Product.get("products/%s" % product_id, auth=request.auth) # @@@ should be auth=request.auth - API permissions broken here? from ..core.api import admin cycles = TestCycleList.get(auth=admin).filter( productId=product_id, testCycleStatusId=testcyclestatus.ACTIVE) return TemplateResponse( request, "test/cycles.html", {"product": product, "cycles": cycles}) <commit_msg>Use correct auth in cycles view, now that API permissions are fixed.<commit_after>from django.template.response import TemplateResponse from ..products.models import Product from ..static import testcyclestatus from ..users.decorators import login_required from .models import TestCycleList @login_required def cycles(request, product_id): product = Product.get("products/%s" % product_id, auth=request.auth) cycles = TestCycleList.get(auth=request.auth).filter( productId=product_id, testCycleStatusId=testcyclestatus.ACTIVE) return TemplateResponse( request, "test/cycles.html", {"product": product, "cycles": cycles})
901c482f357ba3a845d40cb126667490472a6bf6
Code/divide.py
Code/divide.py
def divide(a, b): return a / b print divide(20, 2)
num1 = input('Enter first number: ') num2 = input('Enter second number: ') if num2==0: print 'Denominator cannot be 0' else: Division=float(num1)/float(num2) print Division
Divide two numbers using python code
Divide two numbers using python code
Python
mit
HarendraSingh22/Python-Guide-for-Beginners
def divide(a, b): return a / b print divide(20, 2) Divide two numbers using python code
num1 = input('Enter first number: ') num2 = input('Enter second number: ') if num2==0: print 'Denominator cannot be 0' else: Division=float(num1)/float(num2) print Division
<commit_before>def divide(a, b): return a / b print divide(20, 2) <commit_msg> Divide two numbers using python code<commit_after>
num1 = input('Enter first number: ') num2 = input('Enter second number: ') if num2==0: print 'Denominator cannot be 0' else: Division=float(num1)/float(num2) print Division
def divide(a, b): return a / b print divide(20, 2) Divide two numbers using python codenum1 = input('Enter first number: ') num2 = input('Enter second number: ') if num2==0: print 'Denominator cannot be 0' else: Division=float(num1)/float(num2) print Division
<commit_before>def divide(a, b): return a / b print divide(20, 2) <commit_msg> Divide two numbers using python code<commit_after>num1 = input('Enter first number: ') num2 = input('Enter second number: ') if num2==0: print 'Denominator cannot be 0' else: Division=float(num1)/float(num2) print Division