commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
37170b156e6a284d5e5df671875070a3fcac9310
commands/join.py
commands/join.py
from CommandTemplate import CommandTemplate from IrcMessage import IrcMessage class Command(CommandTemplate): triggers = ['join'] helptext = "Makes me join another channel, if I'm allowed to at least" def execute(self, message): """ :type message: IrcMessage """ replytext = "" if message.messagePartsLength < 1: replytext = "Please provide a channel for me to join" else: channel = message.messageParts[0] if channel.replace('#', '') not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress): replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission" else: replytext = "All right, I'll go to {}. See you there!".format(channel) message.bot.join(channel) message.reply(replytext, "say")
from CommandTemplate import CommandTemplate from IrcMessage import IrcMessage class Command(CommandTemplate): triggers = ['join'] helptext = "Makes me join another channel, if I'm allowed to at least" def execute(self, message): """ :type message: IrcMessage """ replytext = "" if message.messagePartsLength < 1: replytext = "Please provide a channel for me to join" else: channel = message.messageParts[0].lower() if channel.startswith('#'): channel = channel.lstrip('#') if '#' + channel in message.bot.channelsUserList: replytext = "I'm already there, waiting for you. You're welcome!" elif channel not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress): replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission" else: replytext = "All right, I'll go to #{}. See you there!".format(channel) message.bot.join(channel) message.reply(replytext, "say")
Check if we're already in the channel; Improved parameter parsing
[Join] Check if we're already in the channel; Improved parameter parsing
Python
mit
Didero/DideRobot
from CommandTemplate import CommandTemplate from IrcMessage import IrcMessage class Command(CommandTemplate): triggers = ['join'] helptext = "Makes me join another channel, if I'm allowed to at least" def execute(self, message): """ :type message: IrcMessage """ replytext = "" if message.messagePartsLength < 1: replytext = "Please provide a channel for me to join" else: channel = message.messageParts[0] if channel.replace('#', '') not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress): replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission" else: replytext = "All right, I'll go to {}. See you there!".format(channel) message.bot.join(channel) message.reply(replytext, "say")[Join] Check if we're already in the channel; Improved parameter parsing
from CommandTemplate import CommandTemplate from IrcMessage import IrcMessage class Command(CommandTemplate): triggers = ['join'] helptext = "Makes me join another channel, if I'm allowed to at least" def execute(self, message): """ :type message: IrcMessage """ replytext = "" if message.messagePartsLength < 1: replytext = "Please provide a channel for me to join" else: channel = message.messageParts[0].lower() if channel.startswith('#'): channel = channel.lstrip('#') if '#' + channel in message.bot.channelsUserList: replytext = "I'm already there, waiting for you. You're welcome!" elif channel not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress): replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission" else: replytext = "All right, I'll go to #{}. See you there!".format(channel) message.bot.join(channel) message.reply(replytext, "say")
<commit_before>from CommandTemplate import CommandTemplate from IrcMessage import IrcMessage class Command(CommandTemplate): triggers = ['join'] helptext = "Makes me join another channel, if I'm allowed to at least" def execute(self, message): """ :type message: IrcMessage """ replytext = "" if message.messagePartsLength < 1: replytext = "Please provide a channel for me to join" else: channel = message.messageParts[0] if channel.replace('#', '') not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress): replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission" else: replytext = "All right, I'll go to {}. See you there!".format(channel) message.bot.join(channel) message.reply(replytext, "say")<commit_msg>[Join] Check if we're already in the channel; Improved parameter parsing<commit_after>
from CommandTemplate import CommandTemplate from IrcMessage import IrcMessage class Command(CommandTemplate): triggers = ['join'] helptext = "Makes me join another channel, if I'm allowed to at least" def execute(self, message): """ :type message: IrcMessage """ replytext = "" if message.messagePartsLength < 1: replytext = "Please provide a channel for me to join" else: channel = message.messageParts[0].lower() if channel.startswith('#'): channel = channel.lstrip('#') if '#' + channel in message.bot.channelsUserList: replytext = "I'm already there, waiting for you. You're welcome!" elif channel not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress): replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission" else: replytext = "All right, I'll go to #{}. See you there!".format(channel) message.bot.join(channel) message.reply(replytext, "say")
from CommandTemplate import CommandTemplate from IrcMessage import IrcMessage class Command(CommandTemplate): triggers = ['join'] helptext = "Makes me join another channel, if I'm allowed to at least" def execute(self, message): """ :type message: IrcMessage """ replytext = "" if message.messagePartsLength < 1: replytext = "Please provide a channel for me to join" else: channel = message.messageParts[0] if channel.replace('#', '') not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress): replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission" else: replytext = "All right, I'll go to {}. See you there!".format(channel) message.bot.join(channel) message.reply(replytext, "say")[Join] Check if we're already in the channel; Improved parameter parsingfrom CommandTemplate import CommandTemplate from IrcMessage import IrcMessage class Command(CommandTemplate): triggers = ['join'] helptext = "Makes me join another channel, if I'm allowed to at least" def execute(self, message): """ :type message: IrcMessage """ replytext = "" if message.messagePartsLength < 1: replytext = "Please provide a channel for me to join" else: channel = message.messageParts[0].lower() if channel.startswith('#'): channel = channel.lstrip('#') if '#' + channel in message.bot.channelsUserList: replytext = "I'm already there, waiting for you. You're welcome!" elif channel not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress): replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission" else: replytext = "All right, I'll go to #{}. See you there!".format(channel) message.bot.join(channel) message.reply(replytext, "say")
<commit_before>from CommandTemplate import CommandTemplate from IrcMessage import IrcMessage class Command(CommandTemplate): triggers = ['join'] helptext = "Makes me join another channel, if I'm allowed to at least" def execute(self, message): """ :type message: IrcMessage """ replytext = "" if message.messagePartsLength < 1: replytext = "Please provide a channel for me to join" else: channel = message.messageParts[0] if channel.replace('#', '') not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress): replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission" else: replytext = "All right, I'll go to {}. See you there!".format(channel) message.bot.join(channel) message.reply(replytext, "say")<commit_msg>[Join] Check if we're already in the channel; Improved parameter parsing<commit_after>from CommandTemplate import CommandTemplate from IrcMessage import IrcMessage class Command(CommandTemplate): triggers = ['join'] helptext = "Makes me join another channel, if I'm allowed to at least" def execute(self, message): """ :type message: IrcMessage """ replytext = "" if message.messagePartsLength < 1: replytext = "Please provide a channel for me to join" else: channel = message.messageParts[0].lower() if channel.startswith('#'): channel = channel.lstrip('#') if '#' + channel in message.bot.channelsUserList: replytext = "I'm already there, waiting for you. You're welcome!" elif channel not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress): replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission" else: replytext = "All right, I'll go to #{}. See you there!".format(channel) message.bot.join(channel) message.reply(replytext, "say")
29efb23b2edb46aed4be835787d461ee1ada96f6
GitAutoDeploy.py
GitAutoDeploy.py
#!/usr/bin/env python if __name__ == '__main__': import sys import os import gitautodeploy sys.stderr.write("\033[1;33m[WARNING]\033[0;33m GitAutoDeploy.py is deprecated. Please use \033[1;33m'python gitautodeploy%s'\033[0;33m instead.\033[0m\n" % (' ' + ' '.join(sys.argv[1:])).strip()) gitautodeploy.main()
#!/usr/bin/env python if __name__ == '__main__': import sys import os import gitautodeploy sys.stderr.write("\033[1;33m[WARNING]\033[0;33m GitAutoDeploy.py is deprecated. Please use \033[1;33m'python gitautodeploy%s'\033[0;33m instead.\033[0m\n" % (' ' + ' '.join(sys.argv[1:])).rstrip()) gitautodeploy.main()
Fix truncated command and options
Fix truncated command and options Before: ```console $ python GitAutoDeploy.py --daemon-mode [WARNING] GitAutoDeploy.py is deprecated. Please use 'python gitautodeploy--daemon-mode' instead. ``` Now: ```console $ python GitAutoDeploy.py --daemon-mode [WARNING] GitAutoDeploy.py is deprecated. Please use 'python gitautodeploy --daemon-mode' instead. ```
Python
mit
evoja/docker-Github-Gitlab-Auto-Deploy,evoja/docker-Github-Gitlab-Auto-Deploy
#!/usr/bin/env python if __name__ == '__main__': import sys import os import gitautodeploy sys.stderr.write("\033[1;33m[WARNING]\033[0;33m GitAutoDeploy.py is deprecated. Please use \033[1;33m'python gitautodeploy%s'\033[0;33m instead.\033[0m\n" % (' ' + ' '.join(sys.argv[1:])).strip()) gitautodeploy.main() Fix truncated command and options Before: ```console $ python GitAutoDeploy.py --daemon-mode [WARNING] GitAutoDeploy.py is deprecated. Please use 'python gitautodeploy--daemon-mode' instead. ``` Now: ```console $ python GitAutoDeploy.py --daemon-mode [WARNING] GitAutoDeploy.py is deprecated. Please use 'python gitautodeploy --daemon-mode' instead. ```
#!/usr/bin/env python if __name__ == '__main__': import sys import os import gitautodeploy sys.stderr.write("\033[1;33m[WARNING]\033[0;33m GitAutoDeploy.py is deprecated. Please use \033[1;33m'python gitautodeploy%s'\033[0;33m instead.\033[0m\n" % (' ' + ' '.join(sys.argv[1:])).rstrip()) gitautodeploy.main()
<commit_before>#!/usr/bin/env python if __name__ == '__main__': import sys import os import gitautodeploy sys.stderr.write("\033[1;33m[WARNING]\033[0;33m GitAutoDeploy.py is deprecated. Please use \033[1;33m'python gitautodeploy%s'\033[0;33m instead.\033[0m\n" % (' ' + ' '.join(sys.argv[1:])).strip()) gitautodeploy.main() <commit_msg>Fix truncated command and options Before: ```console $ python GitAutoDeploy.py --daemon-mode [WARNING] GitAutoDeploy.py is deprecated. Please use 'python gitautodeploy--daemon-mode' instead. ``` Now: ```console $ python GitAutoDeploy.py --daemon-mode [WARNING] GitAutoDeploy.py is deprecated. Please use 'python gitautodeploy --daemon-mode' instead. ```<commit_after>
#!/usr/bin/env python if __name__ == '__main__': import sys import os import gitautodeploy sys.stderr.write("\033[1;33m[WARNING]\033[0;33m GitAutoDeploy.py is deprecated. Please use \033[1;33m'python gitautodeploy%s'\033[0;33m instead.\033[0m\n" % (' ' + ' '.join(sys.argv[1:])).rstrip()) gitautodeploy.main()
#!/usr/bin/env python if __name__ == '__main__': import sys import os import gitautodeploy sys.stderr.write("\033[1;33m[WARNING]\033[0;33m GitAutoDeploy.py is deprecated. Please use \033[1;33m'python gitautodeploy%s'\033[0;33m instead.\033[0m\n" % (' ' + ' '.join(sys.argv[1:])).strip()) gitautodeploy.main() Fix truncated command and options Before: ```console $ python GitAutoDeploy.py --daemon-mode [WARNING] GitAutoDeploy.py is deprecated. Please use 'python gitautodeploy--daemon-mode' instead. ``` Now: ```console $ python GitAutoDeploy.py --daemon-mode [WARNING] GitAutoDeploy.py is deprecated. Please use 'python gitautodeploy --daemon-mode' instead. ```#!/usr/bin/env python if __name__ == '__main__': import sys import os import gitautodeploy sys.stderr.write("\033[1;33m[WARNING]\033[0;33m GitAutoDeploy.py is deprecated. Please use \033[1;33m'python gitautodeploy%s'\033[0;33m instead.\033[0m\n" % (' ' + ' '.join(sys.argv[1:])).rstrip()) gitautodeploy.main()
<commit_before>#!/usr/bin/env python if __name__ == '__main__': import sys import os import gitautodeploy sys.stderr.write("\033[1;33m[WARNING]\033[0;33m GitAutoDeploy.py is deprecated. Please use \033[1;33m'python gitautodeploy%s'\033[0;33m instead.\033[0m\n" % (' ' + ' '.join(sys.argv[1:])).strip()) gitautodeploy.main() <commit_msg>Fix truncated command and options Before: ```console $ python GitAutoDeploy.py --daemon-mode [WARNING] GitAutoDeploy.py is deprecated. Please use 'python gitautodeploy--daemon-mode' instead. ``` Now: ```console $ python GitAutoDeploy.py --daemon-mode [WARNING] GitAutoDeploy.py is deprecated. Please use 'python gitautodeploy --daemon-mode' instead. ```<commit_after>#!/usr/bin/env python if __name__ == '__main__': import sys import os import gitautodeploy sys.stderr.write("\033[1;33m[WARNING]\033[0;33m GitAutoDeploy.py is deprecated. Please use \033[1;33m'python gitautodeploy%s'\033[0;33m instead.\033[0m\n" % (' ' + ' '.join(sys.argv[1:])).rstrip()) gitautodeploy.main()
b282c54ebaaae13aa8b81f2380cdc20acaa9fc69
lab/gendata.py
lab/gendata.py
import random import time from coverage.data import CoverageJsonData from coverage.sqldata import CoverageSqliteData NUM_FILES = 1000 NUM_LINES = 1000 def gen_data(cdata): rnd = random.Random() rnd.seed(17) def linenos(num_lines, prob): return (n for n in range(num_lines) if random.random() < prob) start = time.time() for i in range(NUM_FILES): filename = f"/src/foo/project/file{i}.py" line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) } cdata.add_lines(line_data) cdata.write() end = time.time() delta = end - start return delta class DummyData: def add_lines(self, line_data): return def write(self): return overhead = gen_data(DummyData()) jtime = gen_data(CoverageJsonData("gendata.json")) - overhead stime = gen_data(CoverageSqliteData("gendata.db")) - overhead print(f"Overhead: {overhead:.3f}s") print(f"JSON: {jtime:.3f}s") print(f"SQLite: {stime:.3f}s") print(f"{stime / jtime:.3f}x slower")
# Run some timing tests of JsonData vs SqliteData. import random import time from coverage.data import CoverageJsonData from coverage.sqldata import CoverageSqliteData NUM_FILES = 1000 NUM_LINES = 1000 def gen_data(cdata): rnd = random.Random() rnd.seed(17) def linenos(num_lines, prob): return (n for n in range(num_lines) if random.random() < prob) start = time.time() for i in range(NUM_FILES): filename = "/src/foo/project/file{i}.py".format(i=i) line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) } cdata.add_lines(line_data) cdata.write() end = time.time() delta = end - start return delta class DummyData: def add_lines(self, line_data): return def write(self): return overhead = gen_data(DummyData()) jtime = gen_data(CoverageJsonData("gendata.json")) - overhead stime = gen_data(CoverageSqliteData("gendata.db")) - overhead print("Overhead: {overhead:.3f}s".format(overhead=overhead)) print("JSON: {jtime:.3f}s".format(jtime=jtime)) print("SQLite: {stime:.3f}s".format(stime=stime)) print("{slower:.3f}x slower".format(slower=stime/jtime))
Make it run on PyPy for time tests there
Make it run on PyPy for time tests there
Python
apache-2.0
hugovk/coveragepy,nedbat/coveragepy,hugovk/coveragepy,nedbat/coveragepy,hugovk/coveragepy,nedbat/coveragepy,hugovk/coveragepy,hugovk/coveragepy,nedbat/coveragepy,nedbat/coveragepy
import random import time from coverage.data import CoverageJsonData from coverage.sqldata import CoverageSqliteData NUM_FILES = 1000 NUM_LINES = 1000 def gen_data(cdata): rnd = random.Random() rnd.seed(17) def linenos(num_lines, prob): return (n for n in range(num_lines) if random.random() < prob) start = time.time() for i in range(NUM_FILES): filename = f"/src/foo/project/file{i}.py" line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) } cdata.add_lines(line_data) cdata.write() end = time.time() delta = end - start return delta class DummyData: def add_lines(self, line_data): return def write(self): return overhead = gen_data(DummyData()) jtime = gen_data(CoverageJsonData("gendata.json")) - overhead stime = gen_data(CoverageSqliteData("gendata.db")) - overhead print(f"Overhead: {overhead:.3f}s") print(f"JSON: {jtime:.3f}s") print(f"SQLite: {stime:.3f}s") print(f"{stime / jtime:.3f}x slower") Make it run on PyPy for time tests there
# Run some timing tests of JsonData vs SqliteData. import random import time from coverage.data import CoverageJsonData from coverage.sqldata import CoverageSqliteData NUM_FILES = 1000 NUM_LINES = 1000 def gen_data(cdata): rnd = random.Random() rnd.seed(17) def linenos(num_lines, prob): return (n for n in range(num_lines) if random.random() < prob) start = time.time() for i in range(NUM_FILES): filename = "/src/foo/project/file{i}.py".format(i=i) line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) } cdata.add_lines(line_data) cdata.write() end = time.time() delta = end - start return delta class DummyData: def add_lines(self, line_data): return def write(self): return overhead = gen_data(DummyData()) jtime = gen_data(CoverageJsonData("gendata.json")) - overhead stime = gen_data(CoverageSqliteData("gendata.db")) - overhead print("Overhead: {overhead:.3f}s".format(overhead=overhead)) print("JSON: {jtime:.3f}s".format(jtime=jtime)) print("SQLite: {stime:.3f}s".format(stime=stime)) print("{slower:.3f}x slower".format(slower=stime/jtime))
<commit_before>import random import time from coverage.data import CoverageJsonData from coverage.sqldata import CoverageSqliteData NUM_FILES = 1000 NUM_LINES = 1000 def gen_data(cdata): rnd = random.Random() rnd.seed(17) def linenos(num_lines, prob): return (n for n in range(num_lines) if random.random() < prob) start = time.time() for i in range(NUM_FILES): filename = f"/src/foo/project/file{i}.py" line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) } cdata.add_lines(line_data) cdata.write() end = time.time() delta = end - start return delta class DummyData: def add_lines(self, line_data): return def write(self): return overhead = gen_data(DummyData()) jtime = gen_data(CoverageJsonData("gendata.json")) - overhead stime = gen_data(CoverageSqliteData("gendata.db")) - overhead print(f"Overhead: {overhead:.3f}s") print(f"JSON: {jtime:.3f}s") print(f"SQLite: {stime:.3f}s") print(f"{stime / jtime:.3f}x slower") <commit_msg>Make it run on PyPy for time tests there<commit_after>
# Run some timing tests of JsonData vs SqliteData. import random import time from coverage.data import CoverageJsonData from coverage.sqldata import CoverageSqliteData NUM_FILES = 1000 NUM_LINES = 1000 def gen_data(cdata): rnd = random.Random() rnd.seed(17) def linenos(num_lines, prob): return (n for n in range(num_lines) if random.random() < prob) start = time.time() for i in range(NUM_FILES): filename = "/src/foo/project/file{i}.py".format(i=i) line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) } cdata.add_lines(line_data) cdata.write() end = time.time() delta = end - start return delta class DummyData: def add_lines(self, line_data): return def write(self): return overhead = gen_data(DummyData()) jtime = gen_data(CoverageJsonData("gendata.json")) - overhead stime = gen_data(CoverageSqliteData("gendata.db")) - overhead print("Overhead: {overhead:.3f}s".format(overhead=overhead)) print("JSON: {jtime:.3f}s".format(jtime=jtime)) print("SQLite: {stime:.3f}s".format(stime=stime)) print("{slower:.3f}x slower".format(slower=stime/jtime))
import random import time from coverage.data import CoverageJsonData from coverage.sqldata import CoverageSqliteData NUM_FILES = 1000 NUM_LINES = 1000 def gen_data(cdata): rnd = random.Random() rnd.seed(17) def linenos(num_lines, prob): return (n for n in range(num_lines) if random.random() < prob) start = time.time() for i in range(NUM_FILES): filename = f"/src/foo/project/file{i}.py" line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) } cdata.add_lines(line_data) cdata.write() end = time.time() delta = end - start return delta class DummyData: def add_lines(self, line_data): return def write(self): return overhead = gen_data(DummyData()) jtime = gen_data(CoverageJsonData("gendata.json")) - overhead stime = gen_data(CoverageSqliteData("gendata.db")) - overhead print(f"Overhead: {overhead:.3f}s") print(f"JSON: {jtime:.3f}s") print(f"SQLite: {stime:.3f}s") print(f"{stime / jtime:.3f}x slower") Make it run on PyPy for time tests there# Run some timing tests of JsonData vs SqliteData. import random import time from coverage.data import CoverageJsonData from coverage.sqldata import CoverageSqliteData NUM_FILES = 1000 NUM_LINES = 1000 def gen_data(cdata): rnd = random.Random() rnd.seed(17) def linenos(num_lines, prob): return (n for n in range(num_lines) if random.random() < prob) start = time.time() for i in range(NUM_FILES): filename = "/src/foo/project/file{i}.py".format(i=i) line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) } cdata.add_lines(line_data) cdata.write() end = time.time() delta = end - start return delta class DummyData: def add_lines(self, line_data): return def write(self): return overhead = gen_data(DummyData()) jtime = gen_data(CoverageJsonData("gendata.json")) - overhead stime = gen_data(CoverageSqliteData("gendata.db")) - overhead print("Overhead: {overhead:.3f}s".format(overhead=overhead)) print("JSON: {jtime:.3f}s".format(jtime=jtime)) print("SQLite: {stime:.3f}s".format(stime=stime)) print("{slower:.3f}x slower".format(slower=stime/jtime))
<commit_before>import random import time from coverage.data import CoverageJsonData from coverage.sqldata import CoverageSqliteData NUM_FILES = 1000 NUM_LINES = 1000 def gen_data(cdata): rnd = random.Random() rnd.seed(17) def linenos(num_lines, prob): return (n for n in range(num_lines) if random.random() < prob) start = time.time() for i in range(NUM_FILES): filename = f"/src/foo/project/file{i}.py" line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) } cdata.add_lines(line_data) cdata.write() end = time.time() delta = end - start return delta class DummyData: def add_lines(self, line_data): return def write(self): return overhead = gen_data(DummyData()) jtime = gen_data(CoverageJsonData("gendata.json")) - overhead stime = gen_data(CoverageSqliteData("gendata.db")) - overhead print(f"Overhead: {overhead:.3f}s") print(f"JSON: {jtime:.3f}s") print(f"SQLite: {stime:.3f}s") print(f"{stime / jtime:.3f}x slower") <commit_msg>Make it run on PyPy for time tests there<commit_after># Run some timing tests of JsonData vs SqliteData. import random import time from coverage.data import CoverageJsonData from coverage.sqldata import CoverageSqliteData NUM_FILES = 1000 NUM_LINES = 1000 def gen_data(cdata): rnd = random.Random() rnd.seed(17) def linenos(num_lines, prob): return (n for n in range(num_lines) if random.random() < prob) start = time.time() for i in range(NUM_FILES): filename = "/src/foo/project/file{i}.py".format(i=i) line_data = { filename: dict.fromkeys(linenos(NUM_LINES, .6)) } cdata.add_lines(line_data) cdata.write() end = time.time() delta = end - start return delta class DummyData: def add_lines(self, line_data): return def write(self): return overhead = gen_data(DummyData()) jtime = gen_data(CoverageJsonData("gendata.json")) - overhead stime = gen_data(CoverageSqliteData("gendata.db")) - overhead print("Overhead: {overhead:.3f}s".format(overhead=overhead)) print("JSON: {jtime:.3f}s".format(jtime=jtime)) print("SQLite: {stime:.3f}s".format(stime=stime)) print("{slower:.3f}x slower".format(slower=stime/jtime))
711c35ce82479f0ced86ef7b3dff083c49f7ee09
vidscraper/__init__.py
vidscraper/__init__.py
from vidscraper import errors from vidscraper.sites import ( vimeo, google_video, youtube) AUTOSCRAPE_SUITES = [ vimeo.SUITE, google_video.SUITE, youtube.SUITE, blip.SUITE] def scrape_suite(url, suite, fields=None): scraped_data = {} funcs_map = suite['funcs'] fields = fields or funcs_map.keys() order = suite.get('order') if order: # remove items in the order that are not in the fields for field in set(order).difference(fields): order.pop(field) # add items that may have been missing from the order but # which are in the fields order.extend(set(fields).difference(order)) fields = order shortmem = {} for field in fields: func = funcs_map[field] scraped_data[field] = func(url, shortmem=shortmem) return scraped_data def auto_scrape(url, fields=None): for suite in AUTOSCRAPE_SUITES: if suite['regex'].match(url): return scrape_suite(url, suite, fields) # If we get here that means that none of the regexes matched, so # throw an error raise errors.CantIdentifyUrl( "No video scraping suite was found that can scrape that url")
from vidscraper import errors from vidscraper.sites import ( vimeo, google_video, youtube, blip) AUTOSCRAPE_SUITES = [ vimeo.SUITE, google_video.SUITE, youtube.SUITE, blip.SUITE] def scrape_suite(url, suite, fields=None): scraped_data = {} funcs_map = suite['funcs'] fields = fields or funcs_map.keys() order = suite.get('order') if order: # remove items in the order that are not in the fields for field in set(order).difference(fields): order.pop(field) # add items that may have been missing from the order but # which are in the fields order.extend(set(fields).difference(order)) fields = order shortmem = {} for field in fields: func = funcs_map[field] scraped_data[field] = func(url, shortmem=shortmem) return scraped_data def auto_scrape(url, fields=None): for suite in AUTOSCRAPE_SUITES: if suite['regex'].match(url): return scrape_suite(url, suite, fields) # If we get here that means that none of the regexes matched, so # throw an error raise errors.CantIdentifyUrl( "No video scraping suite was found that can scrape that url")
Fix the blip autoscrape. Missed an import.
Fix the blip autoscrape. Missed an import. Stupid stupid stupid. Should have tested before pushing.
Python
bsd-3-clause
pculture/vidscraper,pculture/vidscraper
from vidscraper import errors from vidscraper.sites import ( vimeo, google_video, youtube) AUTOSCRAPE_SUITES = [ vimeo.SUITE, google_video.SUITE, youtube.SUITE, blip.SUITE] def scrape_suite(url, suite, fields=None): scraped_data = {} funcs_map = suite['funcs'] fields = fields or funcs_map.keys() order = suite.get('order') if order: # remove items in the order that are not in the fields for field in set(order).difference(fields): order.pop(field) # add items that may have been missing from the order but # which are in the fields order.extend(set(fields).difference(order)) fields = order shortmem = {} for field in fields: func = funcs_map[field] scraped_data[field] = func(url, shortmem=shortmem) return scraped_data def auto_scrape(url, fields=None): for suite in AUTOSCRAPE_SUITES: if suite['regex'].match(url): return scrape_suite(url, suite, fields) # If we get here that means that none of the regexes matched, so # throw an error raise errors.CantIdentifyUrl( "No video scraping suite was found that can scrape that url") Fix the blip autoscrape. Missed an import. Stupid stupid stupid. Should have tested before pushing.
from vidscraper import errors from vidscraper.sites import ( vimeo, google_video, youtube, blip) AUTOSCRAPE_SUITES = [ vimeo.SUITE, google_video.SUITE, youtube.SUITE, blip.SUITE] def scrape_suite(url, suite, fields=None): scraped_data = {} funcs_map = suite['funcs'] fields = fields or funcs_map.keys() order = suite.get('order') if order: # remove items in the order that are not in the fields for field in set(order).difference(fields): order.pop(field) # add items that may have been missing from the order but # which are in the fields order.extend(set(fields).difference(order)) fields = order shortmem = {} for field in fields: func = funcs_map[field] scraped_data[field] = func(url, shortmem=shortmem) return scraped_data def auto_scrape(url, fields=None): for suite in AUTOSCRAPE_SUITES: if suite['regex'].match(url): return scrape_suite(url, suite, fields) # If we get here that means that none of the regexes matched, so # throw an error raise errors.CantIdentifyUrl( "No video scraping suite was found that can scrape that url")
<commit_before>from vidscraper import errors from vidscraper.sites import ( vimeo, google_video, youtube) AUTOSCRAPE_SUITES = [ vimeo.SUITE, google_video.SUITE, youtube.SUITE, blip.SUITE] def scrape_suite(url, suite, fields=None): scraped_data = {} funcs_map = suite['funcs'] fields = fields or funcs_map.keys() order = suite.get('order') if order: # remove items in the order that are not in the fields for field in set(order).difference(fields): order.pop(field) # add items that may have been missing from the order but # which are in the fields order.extend(set(fields).difference(order)) fields = order shortmem = {} for field in fields: func = funcs_map[field] scraped_data[field] = func(url, shortmem=shortmem) return scraped_data def auto_scrape(url, fields=None): for suite in AUTOSCRAPE_SUITES: if suite['regex'].match(url): return scrape_suite(url, suite, fields) # If we get here that means that none of the regexes matched, so # throw an error raise errors.CantIdentifyUrl( "No video scraping suite was found that can scrape that url") <commit_msg>Fix the blip autoscrape. Missed an import. Stupid stupid stupid. Should have tested before pushing.<commit_after>
from vidscraper import errors from vidscraper.sites import ( vimeo, google_video, youtube, blip) AUTOSCRAPE_SUITES = [ vimeo.SUITE, google_video.SUITE, youtube.SUITE, blip.SUITE] def scrape_suite(url, suite, fields=None): scraped_data = {} funcs_map = suite['funcs'] fields = fields or funcs_map.keys() order = suite.get('order') if order: # remove items in the order that are not in the fields for field in set(order).difference(fields): order.pop(field) # add items that may have been missing from the order but # which are in the fields order.extend(set(fields).difference(order)) fields = order shortmem = {} for field in fields: func = funcs_map[field] scraped_data[field] = func(url, shortmem=shortmem) return scraped_data def auto_scrape(url, fields=None): for suite in AUTOSCRAPE_SUITES: if suite['regex'].match(url): return scrape_suite(url, suite, fields) # If we get here that means that none of the regexes matched, so # throw an error raise errors.CantIdentifyUrl( "No video scraping suite was found that can scrape that url")
from vidscraper import errors from vidscraper.sites import ( vimeo, google_video, youtube) AUTOSCRAPE_SUITES = [ vimeo.SUITE, google_video.SUITE, youtube.SUITE, blip.SUITE] def scrape_suite(url, suite, fields=None): scraped_data = {} funcs_map = suite['funcs'] fields = fields or funcs_map.keys() order = suite.get('order') if order: # remove items in the order that are not in the fields for field in set(order).difference(fields): order.pop(field) # add items that may have been missing from the order but # which are in the fields order.extend(set(fields).difference(order)) fields = order shortmem = {} for field in fields: func = funcs_map[field] scraped_data[field] = func(url, shortmem=shortmem) return scraped_data def auto_scrape(url, fields=None): for suite in AUTOSCRAPE_SUITES: if suite['regex'].match(url): return scrape_suite(url, suite, fields) # If we get here that means that none of the regexes matched, so # throw an error raise errors.CantIdentifyUrl( "No video scraping suite was found that can scrape that url") Fix the blip autoscrape. Missed an import. Stupid stupid stupid. Should have tested before pushing.from vidscraper import errors from vidscraper.sites import ( vimeo, google_video, youtube, blip) AUTOSCRAPE_SUITES = [ vimeo.SUITE, google_video.SUITE, youtube.SUITE, blip.SUITE] def scrape_suite(url, suite, fields=None): scraped_data = {} funcs_map = suite['funcs'] fields = fields or funcs_map.keys() order = suite.get('order') if order: # remove items in the order that are not in the fields for field in set(order).difference(fields): order.pop(field) # add items that may have been missing from the order but # which are in the fields order.extend(set(fields).difference(order)) fields = order shortmem = {} for field in fields: func = funcs_map[field] scraped_data[field] = func(url, shortmem=shortmem) return scraped_data def auto_scrape(url, fields=None): for suite in AUTOSCRAPE_SUITES: if suite['regex'].match(url): return scrape_suite(url, suite, fields) # If we get here that means that none of the regexes matched, so # throw an error raise errors.CantIdentifyUrl( "No video scraping suite was found that can scrape that url")
<commit_before>from vidscraper import errors from vidscraper.sites import ( vimeo, google_video, youtube) AUTOSCRAPE_SUITES = [ vimeo.SUITE, google_video.SUITE, youtube.SUITE, blip.SUITE] def scrape_suite(url, suite, fields=None): scraped_data = {} funcs_map = suite['funcs'] fields = fields or funcs_map.keys() order = suite.get('order') if order: # remove items in the order that are not in the fields for field in set(order).difference(fields): order.pop(field) # add items that may have been missing from the order but # which are in the fields order.extend(set(fields).difference(order)) fields = order shortmem = {} for field in fields: func = funcs_map[field] scraped_data[field] = func(url, shortmem=shortmem) return scraped_data def auto_scrape(url, fields=None): for suite in AUTOSCRAPE_SUITES: if suite['regex'].match(url): return scrape_suite(url, suite, fields) # If we get here that means that none of the regexes matched, so # throw an error raise errors.CantIdentifyUrl( "No video scraping suite was found that can scrape that url") <commit_msg>Fix the blip autoscrape. Missed an import. Stupid stupid stupid. Should have tested before pushing.<commit_after>from vidscraper import errors from vidscraper.sites import ( vimeo, google_video, youtube, blip) AUTOSCRAPE_SUITES = [ vimeo.SUITE, google_video.SUITE, youtube.SUITE, blip.SUITE] def scrape_suite(url, suite, fields=None): scraped_data = {} funcs_map = suite['funcs'] fields = fields or funcs_map.keys() order = suite.get('order') if order: # remove items in the order that are not in the fields for field in set(order).difference(fields): order.pop(field) # add items that may have been missing from the order but # which are in the fields order.extend(set(fields).difference(order)) fields = order shortmem = {} for field in fields: func = funcs_map[field] scraped_data[field] = func(url, shortmem=shortmem) return scraped_data def auto_scrape(url, fields=None): for suite in AUTOSCRAPE_SUITES: if suite['regex'].match(url): return scrape_suite(url, suite, fields) # If we get here that means that none of the regexes matched, so # throw an error raise errors.CantIdentifyUrl( "No video scraping suite was found that can scrape that url")
fc3e5201935653228a71d4a52eeffb94de284141
vesper/external_urls.py
vesper/external_urls.py
""" Functions that return external URLs, for example for the Vesper documentation. """ import vesper.version as vesper_version _USE_LATEST_DOCUMENTATION_VERSION = False """Set this `True` during development, `False` for release.""" def _create_documentation_url(): if _USE_LATEST_DOCUMENTATION_VERSION: doc_version = 'latest' else: doc_version = vesper_version.full_version return 'https://vesper.readthedocs.io/en/' + doc_version + '/' def _create_tutorial_url(): return _create_documentation_url() + 'tutorial.html' documentation_url = _create_documentation_url() tutorial_url = _create_tutorial_url() source_code_url = 'https://github.com/HaroldMills/Vesper'
""" Functions that return external URLs, for example for the Vesper documentation. """ import vesper.version as vesper_version _USE_LATEST_DOCUMENTATION_VERSION = True """Set this `True` during development, `False` for release.""" def _create_documentation_url(): if _USE_LATEST_DOCUMENTATION_VERSION: doc_version = 'latest' else: doc_version = vesper_version.full_version return 'https://vesper.readthedocs.io/en/' + doc_version + '/' def _create_tutorial_url(): return _create_documentation_url() + 'tutorial.html' documentation_url = _create_documentation_url() tutorial_url = _create_tutorial_url() source_code_url = 'https://github.com/HaroldMills/Vesper'
Edit external URLs module for development.
Edit external URLs module for development.
Python
mit
HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper,HaroldMills/Vesper
""" Functions that return external URLs, for example for the Vesper documentation. """ import vesper.version as vesper_version _USE_LATEST_DOCUMENTATION_VERSION = False """Set this `True` during development, `False` for release.""" def _create_documentation_url(): if _USE_LATEST_DOCUMENTATION_VERSION: doc_version = 'latest' else: doc_version = vesper_version.full_version return 'https://vesper.readthedocs.io/en/' + doc_version + '/' def _create_tutorial_url(): return _create_documentation_url() + 'tutorial.html' documentation_url = _create_documentation_url() tutorial_url = _create_tutorial_url() source_code_url = 'https://github.com/HaroldMills/Vesper' Edit external URLs module for development.
""" Functions that return external URLs, for example for the Vesper documentation. """ import vesper.version as vesper_version _USE_LATEST_DOCUMENTATION_VERSION = True """Set this `True` during development, `False` for release.""" def _create_documentation_url(): if _USE_LATEST_DOCUMENTATION_VERSION: doc_version = 'latest' else: doc_version = vesper_version.full_version return 'https://vesper.readthedocs.io/en/' + doc_version + '/' def _create_tutorial_url(): return _create_documentation_url() + 'tutorial.html' documentation_url = _create_documentation_url() tutorial_url = _create_tutorial_url() source_code_url = 'https://github.com/HaroldMills/Vesper'
<commit_before>""" Functions that return external URLs, for example for the Vesper documentation. """ import vesper.version as vesper_version _USE_LATEST_DOCUMENTATION_VERSION = False """Set this `True` during development, `False` for release.""" def _create_documentation_url(): if _USE_LATEST_DOCUMENTATION_VERSION: doc_version = 'latest' else: doc_version = vesper_version.full_version return 'https://vesper.readthedocs.io/en/' + doc_version + '/' def _create_tutorial_url(): return _create_documentation_url() + 'tutorial.html' documentation_url = _create_documentation_url() tutorial_url = _create_tutorial_url() source_code_url = 'https://github.com/HaroldMills/Vesper' <commit_msg>Edit external URLs module for development.<commit_after>
""" Functions that return external URLs, for example for the Vesper documentation. """ import vesper.version as vesper_version _USE_LATEST_DOCUMENTATION_VERSION = True """Set this `True` during development, `False` for release.""" def _create_documentation_url(): if _USE_LATEST_DOCUMENTATION_VERSION: doc_version = 'latest' else: doc_version = vesper_version.full_version return 'https://vesper.readthedocs.io/en/' + doc_version + '/' def _create_tutorial_url(): return _create_documentation_url() + 'tutorial.html' documentation_url = _create_documentation_url() tutorial_url = _create_tutorial_url() source_code_url = 'https://github.com/HaroldMills/Vesper'
""" Functions that return external URLs, for example for the Vesper documentation. """ import vesper.version as vesper_version _USE_LATEST_DOCUMENTATION_VERSION = False """Set this `True` during development, `False` for release.""" def _create_documentation_url(): if _USE_LATEST_DOCUMENTATION_VERSION: doc_version = 'latest' else: doc_version = vesper_version.full_version return 'https://vesper.readthedocs.io/en/' + doc_version + '/' def _create_tutorial_url(): return _create_documentation_url() + 'tutorial.html' documentation_url = _create_documentation_url() tutorial_url = _create_tutorial_url() source_code_url = 'https://github.com/HaroldMills/Vesper' Edit external URLs module for development.""" Functions that return external URLs, for example for the Vesper documentation. """ import vesper.version as vesper_version _USE_LATEST_DOCUMENTATION_VERSION = True """Set this `True` during development, `False` for release.""" def _create_documentation_url(): if _USE_LATEST_DOCUMENTATION_VERSION: doc_version = 'latest' else: doc_version = vesper_version.full_version return 'https://vesper.readthedocs.io/en/' + doc_version + '/' def _create_tutorial_url(): return _create_documentation_url() + 'tutorial.html' documentation_url = _create_documentation_url() tutorial_url = _create_tutorial_url() source_code_url = 'https://github.com/HaroldMills/Vesper'
<commit_before>""" Functions that return external URLs, for example for the Vesper documentation. """ import vesper.version as vesper_version _USE_LATEST_DOCUMENTATION_VERSION = False """Set this `True` during development, `False` for release.""" def _create_documentation_url(): if _USE_LATEST_DOCUMENTATION_VERSION: doc_version = 'latest' else: doc_version = vesper_version.full_version return 'https://vesper.readthedocs.io/en/' + doc_version + '/' def _create_tutorial_url(): return _create_documentation_url() + 'tutorial.html' documentation_url = _create_documentation_url() tutorial_url = _create_tutorial_url() source_code_url = 'https://github.com/HaroldMills/Vesper' <commit_msg>Edit external URLs module for development.<commit_after>""" Functions that return external URLs, for example for the Vesper documentation. """ import vesper.version as vesper_version _USE_LATEST_DOCUMENTATION_VERSION = True """Set this `True` during development, `False` for release.""" def _create_documentation_url(): if _USE_LATEST_DOCUMENTATION_VERSION: doc_version = 'latest' else: doc_version = vesper_version.full_version return 'https://vesper.readthedocs.io/en/' + doc_version + '/' def _create_tutorial_url(): return _create_documentation_url() + 'tutorial.html' documentation_url = _create_documentation_url() tutorial_url = _create_tutorial_url() source_code_url = 'https://github.com/HaroldMills/Vesper'
ba9386fc7c14be6335896e1d888c822db972dfe1
indra/java_vm.py
indra/java_vm.py
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' cp_existing = os.environ.get('CLASSPATH') print 'before', os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp print 'after', os.environ.get('CLASSPATH') from jnius import autoclass, JavaException, cast
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
Remove messages from Java VM
Remove messages from Java VM
Python
bsd-2-clause
bgyori/indra,johnbachman/belpy,johnbachman/indra,bgyori/indra,sorgerlab/indra,jmuhlich/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,jmuhlich/indra,sorgerlab/belpy,sorgerlab/belpy,pvtodorov/indra,johnbachman/belpy,sorgerlab/indra,jmuhlich/indra,bgyori/indra,johnbachman/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/belpy
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' cp_existing = os.environ.get('CLASSPATH') print 'before', os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp print 'after', os.environ.get('CLASSPATH') from jnius import autoclass, JavaException, cast Remove messages from Java VM
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
<commit_before>"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' cp_existing = os.environ.get('CLASSPATH') print 'before', os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp print 'after', os.environ.get('CLASSPATH') from jnius import autoclass, JavaException, cast <commit_msg>Remove messages from Java VM<commit_after>
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' cp_existing = os.environ.get('CLASSPATH') print 'before', os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp print 'after', os.environ.get('CLASSPATH') from jnius import autoclass, JavaException, cast Remove messages from Java VM"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
<commit_before>"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' cp_existing = os.environ.get('CLASSPATH') print 'before', os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp print 'after', os.environ.get('CLASSPATH') from jnius import autoclass, JavaException, cast <commit_msg>Remove messages from Java VM<commit_after>"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" import os import warnings import jnius_config if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: warnings.warn("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = path_here + '/biopax/jars/paxtools.jar' cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
8d170381532228ffbef32534ca1217714b5f1594
dataproperty/type/_typecode.py
dataproperty/type/_typecode.py
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import class Typecode(object): NONE = 0 INT = 1 << 0 FLOAT = 1 << 1 STRING = 1 << 2 DATETIME = 1 << 3 INFINITY = 1 << 4 NAN = 1 << 5 BOOL = 1 << 6 DEFAULT_TYPENAME_TABLE = { NONE: "NONE", INT: "INT", FLOAT: "FLOAT", STRING: "STRING", DATETIME: "DATETIME", INFINITY: "INFINITY", NAN: "NAN", BOOL: "BOOL", } TYPENAME_TABLE = DEFAULT_TYPENAME_TABLE @classmethod def get_typename(cls, typecode): type_name = cls.TYPENAME_TABLE.get(typecode) if type_name is None: raise ValueError("unknown typecode: {}".format(typecode)) return type_name
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import class Typecode(object): NONE = 0 INT = 1 << 0 FLOAT = 1 << 1 STRING = 1 << 2 DATETIME = 1 << 3 INFINITY = 1 << 4 NAN = 1 << 5 BOOL = 1 << 6 DEFAULT_TYPENAME_TABLE = { NONE: "NONE", INT: "INTEGER", FLOAT: "FLOAT", STRING: "STRING", DATETIME: "DATETIME", INFINITY: "INFINITY", NAN: "NAN", BOOL: "BOOL", } TYPENAME_TABLE = DEFAULT_TYPENAME_TABLE @classmethod def get_typename(cls, typecode): type_name = cls.TYPENAME_TABLE.get(typecode) if type_name is None: raise ValueError("unknown typecode: {}".format(typecode)) return type_name
Change integer default type name
Change integer default type name
Python
mit
thombashi/DataProperty
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import class Typecode(object): NONE = 0 INT = 1 << 0 FLOAT = 1 << 1 STRING = 1 << 2 DATETIME = 1 << 3 INFINITY = 1 << 4 NAN = 1 << 5 BOOL = 1 << 6 DEFAULT_TYPENAME_TABLE = { NONE: "NONE", INT: "INT", FLOAT: "FLOAT", STRING: "STRING", DATETIME: "DATETIME", INFINITY: "INFINITY", NAN: "NAN", BOOL: "BOOL", } TYPENAME_TABLE = DEFAULT_TYPENAME_TABLE @classmethod def get_typename(cls, typecode): type_name = cls.TYPENAME_TABLE.get(typecode) if type_name is None: raise ValueError("unknown typecode: {}".format(typecode)) return type_name Change integer default type name
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import class Typecode(object): NONE = 0 INT = 1 << 0 FLOAT = 1 << 1 STRING = 1 << 2 DATETIME = 1 << 3 INFINITY = 1 << 4 NAN = 1 << 5 BOOL = 1 << 6 DEFAULT_TYPENAME_TABLE = { NONE: "NONE", INT: "INTEGER", FLOAT: "FLOAT", STRING: "STRING", DATETIME: "DATETIME", INFINITY: "INFINITY", NAN: "NAN", BOOL: "BOOL", } TYPENAME_TABLE = DEFAULT_TYPENAME_TABLE @classmethod def get_typename(cls, typecode): type_name = cls.TYPENAME_TABLE.get(typecode) if type_name is None: raise ValueError("unknown typecode: {}".format(typecode)) return type_name
<commit_before># encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import class Typecode(object): NONE = 0 INT = 1 << 0 FLOAT = 1 << 1 STRING = 1 << 2 DATETIME = 1 << 3 INFINITY = 1 << 4 NAN = 1 << 5 BOOL = 1 << 6 DEFAULT_TYPENAME_TABLE = { NONE: "NONE", INT: "INT", FLOAT: "FLOAT", STRING: "STRING", DATETIME: "DATETIME", INFINITY: "INFINITY", NAN: "NAN", BOOL: "BOOL", } TYPENAME_TABLE = DEFAULT_TYPENAME_TABLE @classmethod def get_typename(cls, typecode): type_name = cls.TYPENAME_TABLE.get(typecode) if type_name is None: raise ValueError("unknown typecode: {}".format(typecode)) return type_name <commit_msg>Change integer default type name<commit_after>
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import class Typecode(object): NONE = 0 INT = 1 << 0 FLOAT = 1 << 1 STRING = 1 << 2 DATETIME = 1 << 3 INFINITY = 1 << 4 NAN = 1 << 5 BOOL = 1 << 6 DEFAULT_TYPENAME_TABLE = { NONE: "NONE", INT: "INTEGER", FLOAT: "FLOAT", STRING: "STRING", DATETIME: "DATETIME", INFINITY: "INFINITY", NAN: "NAN", BOOL: "BOOL", } TYPENAME_TABLE = DEFAULT_TYPENAME_TABLE @classmethod def get_typename(cls, typecode): type_name = cls.TYPENAME_TABLE.get(typecode) if type_name is None: raise ValueError("unknown typecode: {}".format(typecode)) return type_name
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import class Typecode(object): NONE = 0 INT = 1 << 0 FLOAT = 1 << 1 STRING = 1 << 2 DATETIME = 1 << 3 INFINITY = 1 << 4 NAN = 1 << 5 BOOL = 1 << 6 DEFAULT_TYPENAME_TABLE = { NONE: "NONE", INT: "INT", FLOAT: "FLOAT", STRING: "STRING", DATETIME: "DATETIME", INFINITY: "INFINITY", NAN: "NAN", BOOL: "BOOL", } TYPENAME_TABLE = DEFAULT_TYPENAME_TABLE @classmethod def get_typename(cls, typecode): type_name = cls.TYPENAME_TABLE.get(typecode) if type_name is None: raise ValueError("unknown typecode: {}".format(typecode)) return type_name Change integer default type name# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import class Typecode(object): NONE = 0 INT = 1 << 0 FLOAT = 1 << 1 STRING = 1 << 2 DATETIME = 1 << 3 INFINITY = 1 << 4 NAN = 1 << 5 BOOL = 1 << 6 DEFAULT_TYPENAME_TABLE = { NONE: "NONE", INT: "INTEGER", FLOAT: "FLOAT", STRING: "STRING", DATETIME: "DATETIME", INFINITY: "INFINITY", NAN: "NAN", BOOL: "BOOL", } TYPENAME_TABLE = DEFAULT_TYPENAME_TABLE @classmethod def get_typename(cls, typecode): type_name = cls.TYPENAME_TABLE.get(typecode) if type_name is None: raise ValueError("unknown typecode: {}".format(typecode)) return type_name
<commit_before># encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import class Typecode(object): NONE = 0 INT = 1 << 0 FLOAT = 1 << 1 STRING = 1 << 2 DATETIME = 1 << 3 INFINITY = 1 << 4 NAN = 1 << 5 BOOL = 1 << 6 DEFAULT_TYPENAME_TABLE = { NONE: "NONE", INT: "INT", FLOAT: "FLOAT", STRING: "STRING", DATETIME: "DATETIME", INFINITY: "INFINITY", NAN: "NAN", BOOL: "BOOL", } TYPENAME_TABLE = DEFAULT_TYPENAME_TABLE @classmethod def get_typename(cls, typecode): type_name = cls.TYPENAME_TABLE.get(typecode) if type_name is None: raise ValueError("unknown typecode: {}".format(typecode)) return type_name <commit_msg>Change integer default type name<commit_after># encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com> """ from __future__ import absolute_import class Typecode(object): NONE = 0 INT = 1 << 0 FLOAT = 1 << 1 STRING = 1 << 2 DATETIME = 1 << 3 INFINITY = 1 << 4 NAN = 1 << 5 BOOL = 1 << 6 DEFAULT_TYPENAME_TABLE = { NONE: "NONE", INT: "INTEGER", FLOAT: "FLOAT", STRING: "STRING", DATETIME: "DATETIME", INFINITY: "INFINITY", NAN: "NAN", BOOL: "BOOL", } TYPENAME_TABLE = DEFAULT_TYPENAME_TABLE @classmethod def get_typename(cls, typecode): type_name = cls.TYPENAME_TABLE.get(typecode) if type_name is None: raise ValueError("unknown typecode: {}".format(typecode)) return type_name
d66b4be946785d7b9223a0a3497d8ec4a4cebea9
project/settings/production.py
project/settings/production.py
import os from .common import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'singlepoint', } } PUBLIC_ROOT = os.path.join(os.sep, 'var', 'www', 'singlepoint', 'public') STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static') MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media') TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) PREPEND_WWW = False EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD') EMAIL_HOST = 'mail.trilan.ru' EMAIL_HOST_USER = 'noanswer@trilan.ru' DEFAULT_FROM_EMAIL = 'noanswer@trilan.ru' ALLOWED_HOSTS = ['.singlepointhq.com'] BROKER_URL = 'redis://localhost:6379/0' INSTALLED_APPS += ("djcelery_email",) EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERYD_CONCURRENCY = 2 CELERYD_MAX_TASKS_PER_CHILD = 100
import os from .common import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'singlepoint', } } PUBLIC_ROOT = os.path.join(os.sep, 'var', 'www', 'singlepoint', 'public') STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static') MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media') TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) PREPEND_WWW = False EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD') EMAIL_HOST = 'mail.trilan.ru' EMAIL_HOST_USER = 'noanswer@trilan.ru' DEFAULT_FROM_EMAIL = 'noanswer@trilan.ru' ALLOWED_HOSTS = ['.singlepointhq.com'] BROKER_URL = 'redis://localhost:6379/0' INSTALLED_APPS += ("djcelery", "djcelery_email",) EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERYD_CONCURRENCY = 2 CELERYD_MAX_TASKS_PER_CHILD = 100
Add django celery to installed apps
Add django celery to installed apps
Python
mit
xobb1t/ddash2013,xobb1t/ddash2013
import os from .common import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'singlepoint', } } PUBLIC_ROOT = os.path.join(os.sep, 'var', 'www', 'singlepoint', 'public') STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static') MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media') TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) PREPEND_WWW = False EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD') EMAIL_HOST = 'mail.trilan.ru' EMAIL_HOST_USER = 'noanswer@trilan.ru' DEFAULT_FROM_EMAIL = 'noanswer@trilan.ru' ALLOWED_HOSTS = ['.singlepointhq.com'] BROKER_URL = 'redis://localhost:6379/0' INSTALLED_APPS += ("djcelery_email",) EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERYD_CONCURRENCY = 2 CELERYD_MAX_TASKS_PER_CHILD = 100 Add django celery to installed apps
import os from .common import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'singlepoint', } } PUBLIC_ROOT = os.path.join(os.sep, 'var', 'www', 'singlepoint', 'public') STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static') MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media') TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) PREPEND_WWW = False EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD') EMAIL_HOST = 'mail.trilan.ru' EMAIL_HOST_USER = 'noanswer@trilan.ru' DEFAULT_FROM_EMAIL = 'noanswer@trilan.ru' ALLOWED_HOSTS = ['.singlepointhq.com'] BROKER_URL = 'redis://localhost:6379/0' INSTALLED_APPS += ("djcelery", "djcelery_email",) EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERYD_CONCURRENCY = 2 CELERYD_MAX_TASKS_PER_CHILD = 100
<commit_before>import os from .common import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'singlepoint', } } PUBLIC_ROOT = os.path.join(os.sep, 'var', 'www', 'singlepoint', 'public') STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static') MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media') TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) PREPEND_WWW = False EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD') EMAIL_HOST = 'mail.trilan.ru' EMAIL_HOST_USER = 'noanswer@trilan.ru' DEFAULT_FROM_EMAIL = 'noanswer@trilan.ru' ALLOWED_HOSTS = ['.singlepointhq.com'] BROKER_URL = 'redis://localhost:6379/0' INSTALLED_APPS += ("djcelery_email",) EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERYD_CONCURRENCY = 2 CELERYD_MAX_TASKS_PER_CHILD = 100 <commit_msg>Add django celery to installed apps<commit_after>
import os from .common import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'singlepoint', } } PUBLIC_ROOT = os.path.join(os.sep, 'var', 'www', 'singlepoint', 'public') STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static') MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media') TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) PREPEND_WWW = False EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD') EMAIL_HOST = 'mail.trilan.ru' EMAIL_HOST_USER = 'noanswer@trilan.ru' DEFAULT_FROM_EMAIL = 'noanswer@trilan.ru' ALLOWED_HOSTS = ['.singlepointhq.com'] BROKER_URL = 'redis://localhost:6379/0' INSTALLED_APPS += ("djcelery", "djcelery_email",) EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERYD_CONCURRENCY = 2 CELERYD_MAX_TASKS_PER_CHILD = 100
import os from .common import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'singlepoint', } } PUBLIC_ROOT = os.path.join(os.sep, 'var', 'www', 'singlepoint', 'public') STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static') MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media') TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) PREPEND_WWW = False EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD') EMAIL_HOST = 'mail.trilan.ru' EMAIL_HOST_USER = 'noanswer@trilan.ru' DEFAULT_FROM_EMAIL = 'noanswer@trilan.ru' ALLOWED_HOSTS = ['.singlepointhq.com'] BROKER_URL = 'redis://localhost:6379/0' INSTALLED_APPS += ("djcelery_email",) EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERYD_CONCURRENCY = 2 CELERYD_MAX_TASKS_PER_CHILD = 100 Add django celery to installed appsimport os from .common import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'singlepoint', } } PUBLIC_ROOT = os.path.join(os.sep, 'var', 'www', 'singlepoint', 'public') STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static') MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media') TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) PREPEND_WWW = False EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD') EMAIL_HOST = 'mail.trilan.ru' EMAIL_HOST_USER = 'noanswer@trilan.ru' DEFAULT_FROM_EMAIL = 'noanswer@trilan.ru' ALLOWED_HOSTS = ['.singlepointhq.com'] BROKER_URL = 'redis://localhost:6379/0' INSTALLED_APPS += ("djcelery", "djcelery_email",) EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERYD_CONCURRENCY = 2 CELERYD_MAX_TASKS_PER_CHILD = 100
<commit_before>import os from .common import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'singlepoint', } } PUBLIC_ROOT = os.path.join(os.sep, 'var', 'www', 'singlepoint', 'public') STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static') MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media') TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) PREPEND_WWW = False EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD') EMAIL_HOST = 'mail.trilan.ru' EMAIL_HOST_USER = 'noanswer@trilan.ru' DEFAULT_FROM_EMAIL = 'noanswer@trilan.ru' ALLOWED_HOSTS = ['.singlepointhq.com'] BROKER_URL = 'redis://localhost:6379/0' INSTALLED_APPS += ("djcelery_email",) EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERYD_CONCURRENCY = 2 CELERYD_MAX_TASKS_PER_CHILD = 100 <commit_msg>Add django celery to installed apps<commit_after>import os from .common import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'singlepoint', } } PUBLIC_ROOT = os.path.join(os.sep, 'var', 'www', 'singlepoint', 'public') STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static') MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media') TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) PREPEND_WWW = False EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD') EMAIL_HOST = 'mail.trilan.ru' EMAIL_HOST_USER = 'noanswer@trilan.ru' DEFAULT_FROM_EMAIL = 'noanswer@trilan.ru' ALLOWED_HOSTS = ['.singlepointhq.com'] BROKER_URL = 'redis://localhost:6379/0' INSTALLED_APPS += ("djcelery", "djcelery_email",) EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERYD_CONCURRENCY = 2 CELERYD_MAX_TASKS_PER_CHILD = 100
88fd32be09bc20ce734f272b7d3a54a71958e6b4
energy/models.py
energy/models.py
from sqlalchemy import create_engine from sqlalchemy.sql import text import arrow def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() query = """SELECT DATE_M, Ch1 FROM INTERVAL_READINGS WHERE DATE_M >= DATE(:x) AND DATE_M < DATE(:y) ORDER BY DATE_M ASC """ s = text(query) data = conn.execute(s, x=start_date, y=end_date).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata
from sqlalchemy import create_engine from sqlalchemy import MetaData, Table, Column, DateTime, Float, between from sqlalchemy.sql import select, text import arrow metadata = MetaData() meter_readings = Table('interval_readings', metadata, Column('reading_date', DateTime, primary_key=True), Column('ch1', Float, nullable=False), ) def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() s = select([meter_readings]).where(between(meter_readings.c.reading_date, start_date, end_date)) data = conn.execute(s).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata
Use sqlalchemy to generate query
Use sqlalchemy to generate query
Python
agpl-3.0
aguinane/energyusage,aguinane/energyusage,aguinane/energyusage,aguinane/energyusage
from sqlalchemy import create_engine from sqlalchemy.sql import text import arrow def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() query = """SELECT DATE_M, Ch1 FROM INTERVAL_READINGS WHERE DATE_M >= DATE(:x) AND DATE_M < DATE(:y) ORDER BY DATE_M ASC """ s = text(query) data = conn.execute(s, x=start_date, y=end_date).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata Use sqlalchemy to generate query
from sqlalchemy import create_engine from sqlalchemy import MetaData, Table, Column, DateTime, Float, between from sqlalchemy.sql import select, text import arrow metadata = MetaData() meter_readings = Table('interval_readings', metadata, Column('reading_date', DateTime, primary_key=True), Column('ch1', Float, nullable=False), ) def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() s = select([meter_readings]).where(between(meter_readings.c.reading_date, start_date, end_date)) data = conn.execute(s).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata
<commit_before>from sqlalchemy import create_engine from sqlalchemy.sql import text import arrow def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() query = """SELECT DATE_M, Ch1 FROM INTERVAL_READINGS WHERE DATE_M >= DATE(:x) AND DATE_M < DATE(:y) ORDER BY DATE_M ASC """ s = text(query) data = conn.execute(s, x=start_date, y=end_date).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata <commit_msg>Use sqlalchemy to generate query<commit_after>
from sqlalchemy import create_engine from sqlalchemy import MetaData, Table, Column, DateTime, Float, between from sqlalchemy.sql import select, text import arrow metadata = MetaData() meter_readings = Table('interval_readings', metadata, Column('reading_date', DateTime, primary_key=True), Column('ch1', Float, nullable=False), ) def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() s = select([meter_readings]).where(between(meter_readings.c.reading_date, start_date, end_date)) data = conn.execute(s).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata
from sqlalchemy import create_engine from sqlalchemy.sql import text import arrow def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() query = """SELECT DATE_M, Ch1 FROM INTERVAL_READINGS WHERE DATE_M >= DATE(:x) AND DATE_M < DATE(:y) ORDER BY DATE_M ASC """ s = text(query) data = conn.execute(s, x=start_date, y=end_date).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata Use sqlalchemy to generate queryfrom sqlalchemy import create_engine from sqlalchemy import MetaData, Table, Column, DateTime, Float, between from sqlalchemy.sql import select, text import arrow metadata = MetaData() meter_readings = Table('interval_readings', metadata, Column('reading_date', DateTime, primary_key=True), Column('ch1', Float, nullable=False), ) def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() s = select([meter_readings]).where(between(meter_readings.c.reading_date, start_date, end_date)) data = conn.execute(s).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata
<commit_before>from sqlalchemy import create_engine from sqlalchemy.sql import text import arrow def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() query = """SELECT DATE_M, Ch1 FROM INTERVAL_READINGS WHERE DATE_M >= DATE(:x) AND DATE_M < DATE(:y) ORDER BY DATE_M ASC """ s = text(query) data = conn.execute(s, x=start_date, y=end_date).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata <commit_msg>Use sqlalchemy to generate query<commit_after>from sqlalchemy import create_engine from sqlalchemy import MetaData, Table, Column, DateTime, Float, between from sqlalchemy.sql import select, text import arrow metadata = MetaData() meter_readings = Table('interval_readings', metadata, Column('reading_date', DateTime, primary_key=True), Column('ch1', Float, nullable=False), ) def get_energy_chart_data(meterId, start_date="2016-09-01", end_date="2016-10-01"): """ Return json object for flot chart """ engine = create_engine('sqlite:///../data/'+ str(meterId) + '.db', echo=True) conn = engine.connect() s = select([meter_readings]).where(between(meter_readings.c.reading_date, start_date, end_date)) data = conn.execute(s).fetchall() chartdata = {} chartdata['label'] = 'Energy Profile' chartdata['consumption'] = [] for row in data: dTime = arrow.get(row[0]) ts = int(dTime.timestamp * 1000) chartdata['consumption'].append([ts, row[1]]) return chartdata
0b132427e5e81d7e502085d62177bc079cb9d6e8
gaphor/misc/tests/test_gidlethread.py
gaphor/misc/tests/test_gidlethread.py
import pytest from gaphor.misc.gidlethread import GIdleThread def counter(count): for x in range(count): yield x @pytest.fixture def gidle_counter(request): # Setup GIdle Thread with 0.01 sec timeout t = GIdleThread(counter(request.param)) t.start() assert t.is_alive() wait_result = t.wait(0.01) yield wait_result # Teardown GIdle Thread t.interrupt() @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True) def test_wait_with_timeout(gidle_counter): # GIVEN a long coroutine thread # WHEN waiting short timeout # THEN timeout is True assert gidle_counter @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True) def test_wait_until_finished(gidle_counter): # GIVEN a short coroutine thread # WHEN wait for coroutine to finish # THEN coroutine finished assert not gidle_counter
import pytest from gaphor.misc.gidlethread import GIdleThread def counter(count): for x in range(count): yield x @pytest.fixture def gidle_counter(request): # Setup GIdle Thread with 0.02 sec timeout t = GIdleThread(counter(request.param)) t.start() assert t.is_alive() wait_result = t.wait(0.02) yield wait_result # Teardown GIdle Thread t.interrupt() @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True) def test_wait_with_timeout(gidle_counter): # GIVEN a long coroutine thread # WHEN waiting short timeout # THEN timeout is True assert gidle_counter @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True) def test_wait_until_finished(gidle_counter): # GIVEN a short coroutine thread # WHEN wait for coroutine to finish # THEN coroutine finished assert not gidle_counter
Fix test failing in macOS due to short GIdleThread timeout
Fix test failing in macOS due to short GIdleThread timeout Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
Python
lgpl-2.1
amolenaar/gaphor,amolenaar/gaphor
import pytest from gaphor.misc.gidlethread import GIdleThread def counter(count): for x in range(count): yield x @pytest.fixture def gidle_counter(request): # Setup GIdle Thread with 0.01 sec timeout t = GIdleThread(counter(request.param)) t.start() assert t.is_alive() wait_result = t.wait(0.01) yield wait_result # Teardown GIdle Thread t.interrupt() @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True) def test_wait_with_timeout(gidle_counter): # GIVEN a long coroutine thread # WHEN waiting short timeout # THEN timeout is True assert gidle_counter @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True) def test_wait_until_finished(gidle_counter): # GIVEN a short coroutine thread # WHEN wait for coroutine to finish # THEN coroutine finished assert not gidle_counter Fix test failing in macOS due to short GIdleThread timeout Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
import pytest from gaphor.misc.gidlethread import GIdleThread def counter(count): for x in range(count): yield x @pytest.fixture def gidle_counter(request): # Setup GIdle Thread with 0.02 sec timeout t = GIdleThread(counter(request.param)) t.start() assert t.is_alive() wait_result = t.wait(0.02) yield wait_result # Teardown GIdle Thread t.interrupt() @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True) def test_wait_with_timeout(gidle_counter): # GIVEN a long coroutine thread # WHEN waiting short timeout # THEN timeout is True assert gidle_counter @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True) def test_wait_until_finished(gidle_counter): # GIVEN a short coroutine thread # WHEN wait for coroutine to finish # THEN coroutine finished assert not gidle_counter
<commit_before>import pytest from gaphor.misc.gidlethread import GIdleThread def counter(count): for x in range(count): yield x @pytest.fixture def gidle_counter(request): # Setup GIdle Thread with 0.01 sec timeout t = GIdleThread(counter(request.param)) t.start() assert t.is_alive() wait_result = t.wait(0.01) yield wait_result # Teardown GIdle Thread t.interrupt() @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True) def test_wait_with_timeout(gidle_counter): # GIVEN a long coroutine thread # WHEN waiting short timeout # THEN timeout is True assert gidle_counter @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True) def test_wait_until_finished(gidle_counter): # GIVEN a short coroutine thread # WHEN wait for coroutine to finish # THEN coroutine finished assert not gidle_counter <commit_msg>Fix test failing in macOS due to short GIdleThread timeout Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me><commit_after>
import pytest from gaphor.misc.gidlethread import GIdleThread def counter(count): for x in range(count): yield x @pytest.fixture def gidle_counter(request): # Setup GIdle Thread with 0.02 sec timeout t = GIdleThread(counter(request.param)) t.start() assert t.is_alive() wait_result = t.wait(0.02) yield wait_result # Teardown GIdle Thread t.interrupt() @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True) def test_wait_with_timeout(gidle_counter): # GIVEN a long coroutine thread # WHEN waiting short timeout # THEN timeout is True assert gidle_counter @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True) def test_wait_until_finished(gidle_counter): # GIVEN a short coroutine thread # WHEN wait for coroutine to finish # THEN coroutine finished assert not gidle_counter
import pytest from gaphor.misc.gidlethread import GIdleThread def counter(count): for x in range(count): yield x @pytest.fixture def gidle_counter(request): # Setup GIdle Thread with 0.01 sec timeout t = GIdleThread(counter(request.param)) t.start() assert t.is_alive() wait_result = t.wait(0.01) yield wait_result # Teardown GIdle Thread t.interrupt() @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True) def test_wait_with_timeout(gidle_counter): # GIVEN a long coroutine thread # WHEN waiting short timeout # THEN timeout is True assert gidle_counter @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True) def test_wait_until_finished(gidle_counter): # GIVEN a short coroutine thread # WHEN wait for coroutine to finish # THEN coroutine finished assert not gidle_counter Fix test failing in macOS due to short GIdleThread timeout Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>import pytest from gaphor.misc.gidlethread import GIdleThread def counter(count): for x in range(count): yield x @pytest.fixture def gidle_counter(request): # Setup GIdle Thread with 0.02 sec timeout t = GIdleThread(counter(request.param)) t.start() assert t.is_alive() wait_result = t.wait(0.02) yield wait_result # Teardown GIdle Thread t.interrupt() @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True) def test_wait_with_timeout(gidle_counter): # GIVEN a long coroutine thread # WHEN waiting short timeout # THEN timeout is True assert gidle_counter @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True) def test_wait_until_finished(gidle_counter): # GIVEN a short coroutine thread # WHEN wait for coroutine to finish # THEN coroutine finished assert not gidle_counter
<commit_before>import pytest from gaphor.misc.gidlethread import GIdleThread def counter(count): for x in range(count): yield x @pytest.fixture def gidle_counter(request): # Setup GIdle Thread with 0.01 sec timeout t = GIdleThread(counter(request.param)) t.start() assert t.is_alive() wait_result = t.wait(0.01) yield wait_result # Teardown GIdle Thread t.interrupt() @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True) def test_wait_with_timeout(gidle_counter): # GIVEN a long coroutine thread # WHEN waiting short timeout # THEN timeout is True assert gidle_counter @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True) def test_wait_until_finished(gidle_counter): # GIVEN a short coroutine thread # WHEN wait for coroutine to finish # THEN coroutine finished assert not gidle_counter <commit_msg>Fix test failing in macOS due to short GIdleThread timeout Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me><commit_after>import pytest from gaphor.misc.gidlethread import GIdleThread def counter(count): for x in range(count): yield x @pytest.fixture def gidle_counter(request): # Setup GIdle Thread with 0.02 sec timeout t = GIdleThread(counter(request.param)) t.start() assert t.is_alive() wait_result = t.wait(0.02) yield wait_result # Teardown GIdle Thread t.interrupt() @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[20000], indirect=True) def test_wait_with_timeout(gidle_counter): # GIVEN a long coroutine thread # WHEN waiting short timeout # THEN timeout is True assert gidle_counter @pytest.mark.parametrize(argnames="gidle_counter", argvalues=[2], indirect=True) def test_wait_until_finished(gidle_counter): # GIVEN a short coroutine thread # WHEN wait for coroutine to finish # THEN coroutine finished assert not gidle_counter
ea1389d6dfb0060cda8d194079aacc900bbf56ae
simple_graph.py
simple_graph.py
#!/usr/bin/env python from __future__ import print_function from __future__ import unicode_literals class Graph(object): ''' Create an empty graph. ''' def __init__(self): self.graph = {} return def nodes(): return nodes def edges(): return edges def add_node(self, node): self.graph.setdefault(node, []) return def add_edge(self, node1, node2): return def del_node(self, node): try: del self.graph[node] except KeyError: raise KeyError('node not in graph') def has_node(self, node): return node in self.graph def neighbors(self, node): return self.graph[node] def adjecent(self, node1, node2): if node2 in self.graph[node1] or node1 in self.graph[node2]: return True else: return False
#!/usr/bin/env python from __future__ import print_function from __future__ import unicode_literals class Graph(object): ''' Create an empty graph. ''' def __init__(self): self.graph = {} return def nodes(self): return self.graph.keys() def edges(self): edge_list = [] for key, value in self.graph(): for item in value: edge_list.append((key, item)) return edge_list def add_node(self, node): self.graph.setdefault(node, []) def add_edge(self, node1, node2): if node1 in self.graph: self.graph.append(node2) else: self.graph[node1] = node2 def del_node(self, node): try: del self.graph[node] except KeyError: raise KeyError('node not in graph') def has_node(self, node): return node in self.graph def neighbors(self, node): return self.graph[node] def adjecent(self, node1, node2): if node2 in self.graph[node1] or node1 in self.graph[node2]: return True else: return False
Add functions for adding a node, an edge, and defining a node
Add functions for adding a node, an edge, and defining a node
Python
mit
constanthatz/data-structures
#!/usr/bin/env python from __future__ import print_function from __future__ import unicode_literals class Graph(object): ''' Create an empty graph. ''' def __init__(self): self.graph = {} return def nodes(): return nodes def edges(): return edges def add_node(self, node): self.graph.setdefault(node, []) return def add_edge(self, node1, node2): return def del_node(self, node): try: del self.graph[node] except KeyError: raise KeyError('node not in graph') def has_node(self, node): return node in self.graph def neighbors(self, node): return self.graph[node] def adjecent(self, node1, node2): if node2 in self.graph[node1] or node1 in self.graph[node2]: return True else: return False Add functions for adding a node, an edge, and defining a node
#!/usr/bin/env python from __future__ import print_function from __future__ import unicode_literals class Graph(object): ''' Create an empty graph. ''' def __init__(self): self.graph = {} return def nodes(self): return self.graph.keys() def edges(self): edge_list = [] for key, value in self.graph(): for item in value: edge_list.append((key, item)) return edge_list def add_node(self, node): self.graph.setdefault(node, []) def add_edge(self, node1, node2): if node1 in self.graph: self.graph.append(node2) else: self.graph[node1] = node2 def del_node(self, node): try: del self.graph[node] except KeyError: raise KeyError('node not in graph') def has_node(self, node): return node in self.graph def neighbors(self, node): return self.graph[node] def adjecent(self, node1, node2): if node2 in self.graph[node1] or node1 in self.graph[node2]: return True else: return False
<commit_before>#!/usr/bin/env python from __future__ import print_function from __future__ import unicode_literals class Graph(object): ''' Create an empty graph. ''' def __init__(self): self.graph = {} return def nodes(): return nodes def edges(): return edges def add_node(self, node): self.graph.setdefault(node, []) return def add_edge(self, node1, node2): return def del_node(self, node): try: del self.graph[node] except KeyError: raise KeyError('node not in graph') def has_node(self, node): return node in self.graph def neighbors(self, node): return self.graph[node] def adjecent(self, node1, node2): if node2 in self.graph[node1] or node1 in self.graph[node2]: return True else: return False <commit_msg>Add functions for adding a node, an edge, and defining a node<commit_after>
#!/usr/bin/env python from __future__ import print_function from __future__ import unicode_literals class Graph(object): ''' Create an empty graph. ''' def __init__(self): self.graph = {} return def nodes(self): return self.graph.keys() def edges(self): edge_list = [] for key, value in self.graph(): for item in value: edge_list.append((key, item)) return edge_list def add_node(self, node): self.graph.setdefault(node, []) def add_edge(self, node1, node2): if node1 in self.graph: self.graph.append(node2) else: self.graph[node1] = node2 def del_node(self, node): try: del self.graph[node] except KeyError: raise KeyError('node not in graph') def has_node(self, node): return node in self.graph def neighbors(self, node): return self.graph[node] def adjecent(self, node1, node2): if node2 in self.graph[node1] or node1 in self.graph[node2]: return True else: return False
#!/usr/bin/env python from __future__ import print_function from __future__ import unicode_literals class Graph(object): ''' Create an empty graph. ''' def __init__(self): self.graph = {} return def nodes(): return nodes def edges(): return edges def add_node(self, node): self.graph.setdefault(node, []) return def add_edge(self, node1, node2): return def del_node(self, node): try: del self.graph[node] except KeyError: raise KeyError('node not in graph') def has_node(self, node): return node in self.graph def neighbors(self, node): return self.graph[node] def adjecent(self, node1, node2): if node2 in self.graph[node1] or node1 in self.graph[node2]: return True else: return False Add functions for adding a node, an edge, and defining a node#!/usr/bin/env python from __future__ import print_function from __future__ import unicode_literals class Graph(object): ''' Create an empty graph. ''' def __init__(self): self.graph = {} return def nodes(self): return self.graph.keys() def edges(self): edge_list = [] for key, value in self.graph(): for item in value: edge_list.append((key, item)) return edge_list def add_node(self, node): self.graph.setdefault(node, []) def add_edge(self, node1, node2): if node1 in self.graph: self.graph.append(node2) else: self.graph[node1] = node2 def del_node(self, node): try: del self.graph[node] except KeyError: raise KeyError('node not in graph') def has_node(self, node): return node in self.graph def neighbors(self, node): return self.graph[node] def adjecent(self, node1, node2): if node2 in self.graph[node1] or node1 in self.graph[node2]: return True else: return False
<commit_before>#!/usr/bin/env python from __future__ import print_function from __future__ import unicode_literals class Graph(object): ''' Create an empty graph. ''' def __init__(self): self.graph = {} return def nodes(): return nodes def edges(): return edges def add_node(self, node): self.graph.setdefault(node, []) return def add_edge(self, node1, node2): return def del_node(self, node): try: del self.graph[node] except KeyError: raise KeyError('node not in graph') def has_node(self, node): return node in self.graph def neighbors(self, node): return self.graph[node] def adjecent(self, node1, node2): if node2 in self.graph[node1] or node1 in self.graph[node2]: return True else: return False <commit_msg>Add functions for adding a node, an edge, and defining a node<commit_after>#!/usr/bin/env python from __future__ import print_function from __future__ import unicode_literals class Graph(object): ''' Create an empty graph. ''' def __init__(self): self.graph = {} return def nodes(self): return self.graph.keys() def edges(self): edge_list = [] for key, value in self.graph(): for item in value: edge_list.append((key, item)) return edge_list def add_node(self, node): self.graph.setdefault(node, []) def add_edge(self, node1, node2): if node1 in self.graph: self.graph.append(node2) else: self.graph[node1] = node2 def del_node(self, node): try: del self.graph[node] except KeyError: raise KeyError('node not in graph') def has_node(self, node): return node in self.graph def neighbors(self, node): return self.graph[node] def adjecent(self, node1, node2): if node2 in self.graph[node1] or node1 in self.graph[node2]: return True else: return False
2d50e06c7e55c19e3055d555d78fac699c61104d
tests/integration/test_os_signals.py
tests/integration/test_os_signals.py
import os import signal import diesel state = {'triggered':False} def waiter(): diesel.signal(signal.SIGUSR1) state['triggered'] = True def test_can_wait_on_os_signals(): # Start our Loop that will wait on USR1 diesel.fork(waiter) # Let execution switch to the newly spawned loop diesel.sleep() # We haven't sent the signal, so the state should not be triggered assert not state['triggered'] # Send the USR1 signal os.kill(os.getpid(), signal.SIGUSR1) # Again, force a switch so the waiter can act on the signal diesel.sleep() # Now that we're back, the waiter should have triggered the state assert state['triggered']
import os import signal import diesel from diesel.util.event import Countdown state = {'triggered':False} def waiter(): diesel.signal(signal.SIGUSR1) state['triggered'] = True def test_can_wait_on_os_signals(): # Start our Loop that will wait on USR1 diesel.fork(waiter) # Let execution switch to the newly spawned loop diesel.sleep() # We haven't sent the signal, so the state should not be triggered assert not state['triggered'] # Send the USR1 signal os.kill(os.getpid(), signal.SIGUSR1) # Again, force a switch so the waiter can act on the signal diesel.sleep() # Now that we're back, the waiter should have triggered the state assert state['triggered'] def test_multiple_signal_waiters(): N_WAITERS = 5 c = Countdown(N_WAITERS) def mwaiter(): diesel.signal(signal.SIGUSR1) c.tick() for i in xrange(N_WAITERS): diesel.fork(mwaiter) diesel.sleep() os.kill(os.getpid(), signal.SIGUSR1) evt, data = diesel.first(sleep=1, waits=[c]) assert evt is c, "all waiters were not triggered!"
Test for multiple waiters on a signal
Test for multiple waiters on a signal
Python
bsd-3-clause
dieseldev/diesel
import os import signal import diesel state = {'triggered':False} def waiter(): diesel.signal(signal.SIGUSR1) state['triggered'] = True def test_can_wait_on_os_signals(): # Start our Loop that will wait on USR1 diesel.fork(waiter) # Let execution switch to the newly spawned loop diesel.sleep() # We haven't sent the signal, so the state should not be triggered assert not state['triggered'] # Send the USR1 signal os.kill(os.getpid(), signal.SIGUSR1) # Again, force a switch so the waiter can act on the signal diesel.sleep() # Now that we're back, the waiter should have triggered the state assert state['triggered'] Test for multiple waiters on a signal
import os import signal import diesel from diesel.util.event import Countdown state = {'triggered':False} def waiter(): diesel.signal(signal.SIGUSR1) state['triggered'] = True def test_can_wait_on_os_signals(): # Start our Loop that will wait on USR1 diesel.fork(waiter) # Let execution switch to the newly spawned loop diesel.sleep() # We haven't sent the signal, so the state should not be triggered assert not state['triggered'] # Send the USR1 signal os.kill(os.getpid(), signal.SIGUSR1) # Again, force a switch so the waiter can act on the signal diesel.sleep() # Now that we're back, the waiter should have triggered the state assert state['triggered'] def test_multiple_signal_waiters(): N_WAITERS = 5 c = Countdown(N_WAITERS) def mwaiter(): diesel.signal(signal.SIGUSR1) c.tick() for i in xrange(N_WAITERS): diesel.fork(mwaiter) diesel.sleep() os.kill(os.getpid(), signal.SIGUSR1) evt, data = diesel.first(sleep=1, waits=[c]) assert evt is c, "all waiters were not triggered!"
<commit_before>import os import signal import diesel state = {'triggered':False} def waiter(): diesel.signal(signal.SIGUSR1) state['triggered'] = True def test_can_wait_on_os_signals(): # Start our Loop that will wait on USR1 diesel.fork(waiter) # Let execution switch to the newly spawned loop diesel.sleep() # We haven't sent the signal, so the state should not be triggered assert not state['triggered'] # Send the USR1 signal os.kill(os.getpid(), signal.SIGUSR1) # Again, force a switch so the waiter can act on the signal diesel.sleep() # Now that we're back, the waiter should have triggered the state assert state['triggered'] <commit_msg>Test for multiple waiters on a signal<commit_after>
import os import signal import diesel from diesel.util.event import Countdown state = {'triggered':False} def waiter(): diesel.signal(signal.SIGUSR1) state['triggered'] = True def test_can_wait_on_os_signals(): # Start our Loop that will wait on USR1 diesel.fork(waiter) # Let execution switch to the newly spawned loop diesel.sleep() # We haven't sent the signal, so the state should not be triggered assert not state['triggered'] # Send the USR1 signal os.kill(os.getpid(), signal.SIGUSR1) # Again, force a switch so the waiter can act on the signal diesel.sleep() # Now that we're back, the waiter should have triggered the state assert state['triggered'] def test_multiple_signal_waiters(): N_WAITERS = 5 c = Countdown(N_WAITERS) def mwaiter(): diesel.signal(signal.SIGUSR1) c.tick() for i in xrange(N_WAITERS): diesel.fork(mwaiter) diesel.sleep() os.kill(os.getpid(), signal.SIGUSR1) evt, data = diesel.first(sleep=1, waits=[c]) assert evt is c, "all waiters were not triggered!"
import os import signal import diesel state = {'triggered':False} def waiter(): diesel.signal(signal.SIGUSR1) state['triggered'] = True def test_can_wait_on_os_signals(): # Start our Loop that will wait on USR1 diesel.fork(waiter) # Let execution switch to the newly spawned loop diesel.sleep() # We haven't sent the signal, so the state should not be triggered assert not state['triggered'] # Send the USR1 signal os.kill(os.getpid(), signal.SIGUSR1) # Again, force a switch so the waiter can act on the signal diesel.sleep() # Now that we're back, the waiter should have triggered the state assert state['triggered'] Test for multiple waiters on a signalimport os import signal import diesel from diesel.util.event import Countdown state = {'triggered':False} def waiter(): diesel.signal(signal.SIGUSR1) state['triggered'] = True def test_can_wait_on_os_signals(): # Start our Loop that will wait on USR1 diesel.fork(waiter) # Let execution switch to the newly spawned loop diesel.sleep() # We haven't sent the signal, so the state should not be triggered assert not state['triggered'] # Send the USR1 signal os.kill(os.getpid(), signal.SIGUSR1) # Again, force a switch so the waiter can act on the signal diesel.sleep() # Now that we're back, the waiter should have triggered the state assert state['triggered'] def test_multiple_signal_waiters(): N_WAITERS = 5 c = Countdown(N_WAITERS) def mwaiter(): diesel.signal(signal.SIGUSR1) c.tick() for i in xrange(N_WAITERS): diesel.fork(mwaiter) diesel.sleep() os.kill(os.getpid(), signal.SIGUSR1) evt, data = diesel.first(sleep=1, waits=[c]) assert evt is c, "all waiters were not triggered!"
<commit_before>import os import signal import diesel state = {'triggered':False} def waiter(): diesel.signal(signal.SIGUSR1) state['triggered'] = True def test_can_wait_on_os_signals(): # Start our Loop that will wait on USR1 diesel.fork(waiter) # Let execution switch to the newly spawned loop diesel.sleep() # We haven't sent the signal, so the state should not be triggered assert not state['triggered'] # Send the USR1 signal os.kill(os.getpid(), signal.SIGUSR1) # Again, force a switch so the waiter can act on the signal diesel.sleep() # Now that we're back, the waiter should have triggered the state assert state['triggered'] <commit_msg>Test for multiple waiters on a signal<commit_after>import os import signal import diesel from diesel.util.event import Countdown state = {'triggered':False} def waiter(): diesel.signal(signal.SIGUSR1) state['triggered'] = True def test_can_wait_on_os_signals(): # Start our Loop that will wait on USR1 diesel.fork(waiter) # Let execution switch to the newly spawned loop diesel.sleep() # We haven't sent the signal, so the state should not be triggered assert not state['triggered'] # Send the USR1 signal os.kill(os.getpid(), signal.SIGUSR1) # Again, force a switch so the waiter can act on the signal diesel.sleep() # Now that we're back, the waiter should have triggered the state assert state['triggered'] def test_multiple_signal_waiters(): N_WAITERS = 5 c = Countdown(N_WAITERS) def mwaiter(): diesel.signal(signal.SIGUSR1) c.tick() for i in xrange(N_WAITERS): diesel.fork(mwaiter) diesel.sleep() os.kill(os.getpid(), signal.SIGUSR1) evt, data = diesel.first(sleep=1, waits=[c]) assert evt is c, "all waiters were not triggered!"
caa3b8a7c06f3e1fffb7ec6b957216bd6d147f23
numbas_auth.py
numbas_auth.py
from django_auth_ldap.backend import LDAPBackend class NumbasAuthBackend(LDAPBackend): """Authentication backend overriding LDAPBackend. This could be used to override certain functionality within the LDAP authentication backend. The example here overrides get_or_create_user() to alter the LDAP givenName attribute. To use this backend, edit settings.py and replace the LDAPBackend line in AUTHENTICATION_BACKENDS with numbas_auth.NumbasAuthBackend """ def get_or_create_user(self, username, ldap_user): """Alter the LDAP givenName attribute to the familiar first name in displayName.""" ldap_user.attrs['givenname'] = [ldap_user.attrs['displayname'][0].split()[0]] return super(NumbasAuthBackend, self).get_or_create_user(username, ldap_user)
from django_auth_ldap.backend import LDAPBackend class NumbasAuthBackend(LDAPBackend): """Authentication backend overriding LDAPBackend. This could be used to override certain functionality within the LDAP authentication backend. The example here overrides get_or_create_user() to alter the LDAP givenName attribute. To use this backend, edit settings.py and replace the LDAPBackend line in AUTHENTICATION_BACKENDS with numbas_auth.NumbasAuthBackend """ def get_or_create_user(self, username, ldap_user): """Alter the LDAP givenName attribute to the familiar first name in displayName.""" ldap_user.attrs['givenName'] = [ldap_user.attrs['displayName'][0].split()[0]] return super(NumbasAuthBackend, self).get_or_create_user(username, ldap_user)
Fix case sensitive LDAP attributes
Fix case sensitive LDAP attributes
Python
apache-2.0
numbas/editor,numbas/editor,numbas/editor
from django_auth_ldap.backend import LDAPBackend class NumbasAuthBackend(LDAPBackend): """Authentication backend overriding LDAPBackend. This could be used to override certain functionality within the LDAP authentication backend. The example here overrides get_or_create_user() to alter the LDAP givenName attribute. To use this backend, edit settings.py and replace the LDAPBackend line in AUTHENTICATION_BACKENDS with numbas_auth.NumbasAuthBackend """ def get_or_create_user(self, username, ldap_user): """Alter the LDAP givenName attribute to the familiar first name in displayName.""" ldap_user.attrs['givenname'] = [ldap_user.attrs['displayname'][0].split()[0]] return super(NumbasAuthBackend, self).get_or_create_user(username, ldap_user) Fix case sensitive LDAP attributes
from django_auth_ldap.backend import LDAPBackend class NumbasAuthBackend(LDAPBackend): """Authentication backend overriding LDAPBackend. This could be used to override certain functionality within the LDAP authentication backend. The example here overrides get_or_create_user() to alter the LDAP givenName attribute. To use this backend, edit settings.py and replace the LDAPBackend line in AUTHENTICATION_BACKENDS with numbas_auth.NumbasAuthBackend """ def get_or_create_user(self, username, ldap_user): """Alter the LDAP givenName attribute to the familiar first name in displayName.""" ldap_user.attrs['givenName'] = [ldap_user.attrs['displayName'][0].split()[0]] return super(NumbasAuthBackend, self).get_or_create_user(username, ldap_user)
<commit_before>from django_auth_ldap.backend import LDAPBackend class NumbasAuthBackend(LDAPBackend): """Authentication backend overriding LDAPBackend. This could be used to override certain functionality within the LDAP authentication backend. The example here overrides get_or_create_user() to alter the LDAP givenName attribute. To use this backend, edit settings.py and replace the LDAPBackend line in AUTHENTICATION_BACKENDS with numbas_auth.NumbasAuthBackend """ def get_or_create_user(self, username, ldap_user): """Alter the LDAP givenName attribute to the familiar first name in displayName.""" ldap_user.attrs['givenname'] = [ldap_user.attrs['displayname'][0].split()[0]] return super(NumbasAuthBackend, self).get_or_create_user(username, ldap_user) <commit_msg>Fix case sensitive LDAP attributes<commit_after>
from django_auth_ldap.backend import LDAPBackend class NumbasAuthBackend(LDAPBackend): """Authentication backend overriding LDAPBackend. This could be used to override certain functionality within the LDAP authentication backend. The example here overrides get_or_create_user() to alter the LDAP givenName attribute. To use this backend, edit settings.py and replace the LDAPBackend line in AUTHENTICATION_BACKENDS with numbas_auth.NumbasAuthBackend """ def get_or_create_user(self, username, ldap_user): """Alter the LDAP givenName attribute to the familiar first name in displayName.""" ldap_user.attrs['givenName'] = [ldap_user.attrs['displayName'][0].split()[0]] return super(NumbasAuthBackend, self).get_or_create_user(username, ldap_user)
from django_auth_ldap.backend import LDAPBackend class NumbasAuthBackend(LDAPBackend): """Authentication backend overriding LDAPBackend. This could be used to override certain functionality within the LDAP authentication backend. The example here overrides get_or_create_user() to alter the LDAP givenName attribute. To use this backend, edit settings.py and replace the LDAPBackend line in AUTHENTICATION_BACKENDS with numbas_auth.NumbasAuthBackend """ def get_or_create_user(self, username, ldap_user): """Alter the LDAP givenName attribute to the familiar first name in displayName.""" ldap_user.attrs['givenname'] = [ldap_user.attrs['displayname'][0].split()[0]] return super(NumbasAuthBackend, self).get_or_create_user(username, ldap_user) Fix case sensitive LDAP attributesfrom django_auth_ldap.backend import LDAPBackend class NumbasAuthBackend(LDAPBackend): """Authentication backend overriding LDAPBackend. This could be used to override certain functionality within the LDAP authentication backend. The example here overrides get_or_create_user() to alter the LDAP givenName attribute. To use this backend, edit settings.py and replace the LDAPBackend line in AUTHENTICATION_BACKENDS with numbas_auth.NumbasAuthBackend """ def get_or_create_user(self, username, ldap_user): """Alter the LDAP givenName attribute to the familiar first name in displayName.""" ldap_user.attrs['givenName'] = [ldap_user.attrs['displayName'][0].split()[0]] return super(NumbasAuthBackend, self).get_or_create_user(username, ldap_user)
<commit_before>from django_auth_ldap.backend import LDAPBackend class NumbasAuthBackend(LDAPBackend): """Authentication backend overriding LDAPBackend. This could be used to override certain functionality within the LDAP authentication backend. The example here overrides get_or_create_user() to alter the LDAP givenName attribute. To use this backend, edit settings.py and replace the LDAPBackend line in AUTHENTICATION_BACKENDS with numbas_auth.NumbasAuthBackend """ def get_or_create_user(self, username, ldap_user): """Alter the LDAP givenName attribute to the familiar first name in displayName.""" ldap_user.attrs['givenname'] = [ldap_user.attrs['displayname'][0].split()[0]] return super(NumbasAuthBackend, self).get_or_create_user(username, ldap_user) <commit_msg>Fix case sensitive LDAP attributes<commit_after>from django_auth_ldap.backend import LDAPBackend class NumbasAuthBackend(LDAPBackend): """Authentication backend overriding LDAPBackend. This could be used to override certain functionality within the LDAP authentication backend. The example here overrides get_or_create_user() to alter the LDAP givenName attribute. To use this backend, edit settings.py and replace the LDAPBackend line in AUTHENTICATION_BACKENDS with numbas_auth.NumbasAuthBackend """ def get_or_create_user(self, username, ldap_user): """Alter the LDAP givenName attribute to the familiar first name in displayName.""" ldap_user.attrs['givenName'] = [ldap_user.attrs['displayName'][0].split()[0]] return super(NumbasAuthBackend, self).get_or_create_user(username, ldap_user)
c7d03f4f8d4d50ce837cf0df446a52b24e891cee
config/flask_prod.py
config/flask_prod.py
import os from rmc.config.flask_base import * import rmc.shared.secrets as s JS_DIR = 'js_prod' DEBUG = False ENV = 'prod' GA_PROPERTY_ID = 'UA-35073503-1' LOG_DIR = '/home/rmc/logs' LOG_PATH = os.path.join(LOG_DIR, 'server/server.log') FB_APP_ID = '219309734863464' FB_APP_SECRET = s.FB_APP_SECRET_PROD
import os from rmc.config.flask_base import * import rmc.shared.secrets as s JS_DIR = 'js' DEBUG = False ENV = 'prod' GA_PROPERTY_ID = 'UA-35073503-1' LOG_DIR = '/home/rmc/logs' LOG_PATH = os.path.join(LOG_DIR, 'server/server.log') FB_APP_ID = '219309734863464' FB_APP_SECRET = s.FB_APP_SECRET_PROD
Revert "Revert "do not use minified js on prod"" (=don't minify to fix things)
Revert "Revert "do not use minified js on prod"" (=don't minify to fix things) This reverts commit 88f2886393991ac660ac382d48c65088eff56d52.
Python
mit
sachdevs/rmc,MichalKononenko/rmc,sachdevs/rmc,ccqi/rmc,JGulbronson/rmc,MichalKononenko/rmc,JGulbronson/rmc,JGulbronson/rmc,sachdevs/rmc,rageandqq/rmc,MichalKononenko/rmc,JGulbronson/rmc,ccqi/rmc,shakilkanji/rmc,MichalKononenko/rmc,rageandqq/rmc,UWFlow/rmc,UWFlow/rmc,UWFlow/rmc,ccqi/rmc,rageandqq/rmc,UWFlow/rmc,duaayousif/rmc,duaayousif/rmc,duaayousif/rmc,sachdevs/rmc,shakilkanji/rmc,shakilkanji/rmc,JGulbronson/rmc,shakilkanji/rmc,duaayousif/rmc,ccqi/rmc,ccqi/rmc,rageandqq/rmc,sachdevs/rmc,UWFlow/rmc,shakilkanji/rmc,rageandqq/rmc,MichalKononenko/rmc,duaayousif/rmc
import os from rmc.config.flask_base import * import rmc.shared.secrets as s JS_DIR = 'js_prod' DEBUG = False ENV = 'prod' GA_PROPERTY_ID = 'UA-35073503-1' LOG_DIR = '/home/rmc/logs' LOG_PATH = os.path.join(LOG_DIR, 'server/server.log') FB_APP_ID = '219309734863464' FB_APP_SECRET = s.FB_APP_SECRET_PROD Revert "Revert "do not use minified js on prod"" (=don't minify to fix things) This reverts commit 88f2886393991ac660ac382d48c65088eff56d52.
import os from rmc.config.flask_base import * import rmc.shared.secrets as s JS_DIR = 'js' DEBUG = False ENV = 'prod' GA_PROPERTY_ID = 'UA-35073503-1' LOG_DIR = '/home/rmc/logs' LOG_PATH = os.path.join(LOG_DIR, 'server/server.log') FB_APP_ID = '219309734863464' FB_APP_SECRET = s.FB_APP_SECRET_PROD
<commit_before>import os from rmc.config.flask_base import * import rmc.shared.secrets as s JS_DIR = 'js_prod' DEBUG = False ENV = 'prod' GA_PROPERTY_ID = 'UA-35073503-1' LOG_DIR = '/home/rmc/logs' LOG_PATH = os.path.join(LOG_DIR, 'server/server.log') FB_APP_ID = '219309734863464' FB_APP_SECRET = s.FB_APP_SECRET_PROD <commit_msg>Revert "Revert "do not use minified js on prod"" (=don't minify to fix things) This reverts commit 88f2886393991ac660ac382d48c65088eff56d52.<commit_after>
import os from rmc.config.flask_base import * import rmc.shared.secrets as s JS_DIR = 'js' DEBUG = False ENV = 'prod' GA_PROPERTY_ID = 'UA-35073503-1' LOG_DIR = '/home/rmc/logs' LOG_PATH = os.path.join(LOG_DIR, 'server/server.log') FB_APP_ID = '219309734863464' FB_APP_SECRET = s.FB_APP_SECRET_PROD
import os from rmc.config.flask_base import * import rmc.shared.secrets as s JS_DIR = 'js_prod' DEBUG = False ENV = 'prod' GA_PROPERTY_ID = 'UA-35073503-1' LOG_DIR = '/home/rmc/logs' LOG_PATH = os.path.join(LOG_DIR, 'server/server.log') FB_APP_ID = '219309734863464' FB_APP_SECRET = s.FB_APP_SECRET_PROD Revert "Revert "do not use minified js on prod"" (=don't minify to fix things) This reverts commit 88f2886393991ac660ac382d48c65088eff56d52.import os from rmc.config.flask_base import * import rmc.shared.secrets as s JS_DIR = 'js' DEBUG = False ENV = 'prod' GA_PROPERTY_ID = 'UA-35073503-1' LOG_DIR = '/home/rmc/logs' LOG_PATH = os.path.join(LOG_DIR, 'server/server.log') FB_APP_ID = '219309734863464' FB_APP_SECRET = s.FB_APP_SECRET_PROD
<commit_before>import os from rmc.config.flask_base import * import rmc.shared.secrets as s JS_DIR = 'js_prod' DEBUG = False ENV = 'prod' GA_PROPERTY_ID = 'UA-35073503-1' LOG_DIR = '/home/rmc/logs' LOG_PATH = os.path.join(LOG_DIR, 'server/server.log') FB_APP_ID = '219309734863464' FB_APP_SECRET = s.FB_APP_SECRET_PROD <commit_msg>Revert "Revert "do not use minified js on prod"" (=don't minify to fix things) This reverts commit 88f2886393991ac660ac382d48c65088eff56d52.<commit_after>import os from rmc.config.flask_base import * import rmc.shared.secrets as s JS_DIR = 'js' DEBUG = False ENV = 'prod' GA_PROPERTY_ID = 'UA-35073503-1' LOG_DIR = '/home/rmc/logs' LOG_PATH = os.path.join(LOG_DIR, 'server/server.log') FB_APP_ID = '219309734863464' FB_APP_SECRET = s.FB_APP_SECRET_PROD
931ae68671abef1fedde46d585a4057b24ecbb04
reinforcement-learning/play.py
reinforcement-learning/play.py
"""Load the trained q table and make actions based on that. """ import time import env import rl rl.load_q() env.make("pygame") while True: env.reset() for _ in range(15): if env.done: break action = rl.choose_action(rl.table[env.object[0]]) env.action(action) time.sleep(0.03) env.render()
"""Load the trained q table and make actions based on that. """ import time import env import rl rl.load_q() env.make("pygame") while True: env.reset() for _ in range(15): if env.done: break action = rl.choose_action(env.player, "test") env.action(action) time.sleep(0.03) env.render()
Update to newest version of rl.py.
Update to newest version of rl.py.
Python
mit
danieloconell/Louis
"""Load the trained q table and make actions based on that. """ import time import env import rl rl.load_q() env.make("pygame") while True: env.reset() for _ in range(15): if env.done: break action = rl.choose_action(rl.table[env.object[0]]) env.action(action) time.sleep(0.03) env.render() Update to newest version of rl.py.
"""Load the trained q table and make actions based on that. """ import time import env import rl rl.load_q() env.make("pygame") while True: env.reset() for _ in range(15): if env.done: break action = rl.choose_action(env.player, "test") env.action(action) time.sleep(0.03) env.render()
<commit_before>"""Load the trained q table and make actions based on that. """ import time import env import rl rl.load_q() env.make("pygame") while True: env.reset() for _ in range(15): if env.done: break action = rl.choose_action(rl.table[env.object[0]]) env.action(action) time.sleep(0.03) env.render() <commit_msg>Update to newest version of rl.py.<commit_after>
"""Load the trained q table and make actions based on that. """ import time import env import rl rl.load_q() env.make("pygame") while True: env.reset() for _ in range(15): if env.done: break action = rl.choose_action(env.player, "test") env.action(action) time.sleep(0.03) env.render()
"""Load the trained q table and make actions based on that. """ import time import env import rl rl.load_q() env.make("pygame") while True: env.reset() for _ in range(15): if env.done: break action = rl.choose_action(rl.table[env.object[0]]) env.action(action) time.sleep(0.03) env.render() Update to newest version of rl.py."""Load the trained q table and make actions based on that. """ import time import env import rl rl.load_q() env.make("pygame") while True: env.reset() for _ in range(15): if env.done: break action = rl.choose_action(env.player, "test") env.action(action) time.sleep(0.03) env.render()
<commit_before>"""Load the trained q table and make actions based on that. """ import time import env import rl rl.load_q() env.make("pygame") while True: env.reset() for _ in range(15): if env.done: break action = rl.choose_action(rl.table[env.object[0]]) env.action(action) time.sleep(0.03) env.render() <commit_msg>Update to newest version of rl.py.<commit_after>"""Load the trained q table and make actions based on that. """ import time import env import rl rl.load_q() env.make("pygame") while True: env.reset() for _ in range(15): if env.done: break action = rl.choose_action(env.player, "test") env.action(action) time.sleep(0.03) env.render()
bb6b6b46860f6e03abc4ac9c47751fe4309f0e17
md2pdf/core.py
md2pdf/core.py
# -*- coding: utf-8 -*- from markdown2 import markdown, markdown_path from weasyprint import HTML, CSS from .exceptions import ValidationError __title__ = 'md2pdf' __version__ = '0.2.1' __author__ = 'Julien Maupetit' __license__ = 'MIT' __copyright__ = 'Copyright 2013 Julien Maupetit' def md2pdf(pdf_file_path, md_content=None, md_file_path=None, css_file_path=None): """ Convert markdown file to pdf with styles """ # Convert markdown to html raw_html = "" extras = ["cuddled-lists"] if md_file_path: raw_html = markdown_path(md_file_path, extras=extras) elif md_content: raw_html = markdown(md_content, extras=extras) if not len(raw_html): raise ValidationError('Input markdown seems empty') # Weasyprint HTML object html = HTML(string=raw_html) # Get styles css = [] if css_file_path: css.append(CSS(filename=css_file_path)) # Generate PDF html.write_pdf(pdf_file_path, stylesheets=css) return
# -*- coding: utf-8 -*- from markdown2 import markdown, markdown_path from weasyprint import HTML, CSS from .exceptions import ValidationError __title__ = 'md2pdf' __version__ = '0.2.1' __author__ = 'Julien Maupetit' __license__ = 'MIT' __copyright__ = 'Copyright 2013 Julien Maupetit' def md2pdf(pdf_file_path, md_content=None, md_file_path=None, css_file_path=None, base_url=None): """ Convert markdown file to pdf with styles """ # Convert markdown to html raw_html = "" extras = ["cuddled-lists"] if md_file_path: raw_html = markdown_path(md_file_path, extras=extras) elif md_content: raw_html = markdown(md_content, extras=extras) if not len(raw_html): raise ValidationError('Input markdown seems empty') # Weasyprint HTML object html = HTML(string=raw_html, base_url=base_url) # Get styles css = [] if css_file_path: css.append(CSS(filename=css_file_path)) # Generate PDF html.write_pdf(pdf_file_path, stylesheets=css) return
Allow to add a base url to find media
Allow to add a base url to find media
Python
mit
jmaupetit/md2pdf
# -*- coding: utf-8 -*- from markdown2 import markdown, markdown_path from weasyprint import HTML, CSS from .exceptions import ValidationError __title__ = 'md2pdf' __version__ = '0.2.1' __author__ = 'Julien Maupetit' __license__ = 'MIT' __copyright__ = 'Copyright 2013 Julien Maupetit' def md2pdf(pdf_file_path, md_content=None, md_file_path=None, css_file_path=None): """ Convert markdown file to pdf with styles """ # Convert markdown to html raw_html = "" extras = ["cuddled-lists"] if md_file_path: raw_html = markdown_path(md_file_path, extras=extras) elif md_content: raw_html = markdown(md_content, extras=extras) if not len(raw_html): raise ValidationError('Input markdown seems empty') # Weasyprint HTML object html = HTML(string=raw_html) # Get styles css = [] if css_file_path: css.append(CSS(filename=css_file_path)) # Generate PDF html.write_pdf(pdf_file_path, stylesheets=css) return Allow to add a base url to find media
# -*- coding: utf-8 -*- from markdown2 import markdown, markdown_path from weasyprint import HTML, CSS from .exceptions import ValidationError __title__ = 'md2pdf' __version__ = '0.2.1' __author__ = 'Julien Maupetit' __license__ = 'MIT' __copyright__ = 'Copyright 2013 Julien Maupetit' def md2pdf(pdf_file_path, md_content=None, md_file_path=None, css_file_path=None, base_url=None): """ Convert markdown file to pdf with styles """ # Convert markdown to html raw_html = "" extras = ["cuddled-lists"] if md_file_path: raw_html = markdown_path(md_file_path, extras=extras) elif md_content: raw_html = markdown(md_content, extras=extras) if not len(raw_html): raise ValidationError('Input markdown seems empty') # Weasyprint HTML object html = HTML(string=raw_html, base_url=base_url) # Get styles css = [] if css_file_path: css.append(CSS(filename=css_file_path)) # Generate PDF html.write_pdf(pdf_file_path, stylesheets=css) return
<commit_before># -*- coding: utf-8 -*- from markdown2 import markdown, markdown_path from weasyprint import HTML, CSS from .exceptions import ValidationError __title__ = 'md2pdf' __version__ = '0.2.1' __author__ = 'Julien Maupetit' __license__ = 'MIT' __copyright__ = 'Copyright 2013 Julien Maupetit' def md2pdf(pdf_file_path, md_content=None, md_file_path=None, css_file_path=None): """ Convert markdown file to pdf with styles """ # Convert markdown to html raw_html = "" extras = ["cuddled-lists"] if md_file_path: raw_html = markdown_path(md_file_path, extras=extras) elif md_content: raw_html = markdown(md_content, extras=extras) if not len(raw_html): raise ValidationError('Input markdown seems empty') # Weasyprint HTML object html = HTML(string=raw_html) # Get styles css = [] if css_file_path: css.append(CSS(filename=css_file_path)) # Generate PDF html.write_pdf(pdf_file_path, stylesheets=css) return <commit_msg>Allow to add a base url to find media<commit_after>
# -*- coding: utf-8 -*- from markdown2 import markdown, markdown_path from weasyprint import HTML, CSS from .exceptions import ValidationError __title__ = 'md2pdf' __version__ = '0.2.1' __author__ = 'Julien Maupetit' __license__ = 'MIT' __copyright__ = 'Copyright 2013 Julien Maupetit' def md2pdf(pdf_file_path, md_content=None, md_file_path=None, css_file_path=None, base_url=None): """ Convert markdown file to pdf with styles """ # Convert markdown to html raw_html = "" extras = ["cuddled-lists"] if md_file_path: raw_html = markdown_path(md_file_path, extras=extras) elif md_content: raw_html = markdown(md_content, extras=extras) if not len(raw_html): raise ValidationError('Input markdown seems empty') # Weasyprint HTML object html = HTML(string=raw_html, base_url=base_url) # Get styles css = [] if css_file_path: css.append(CSS(filename=css_file_path)) # Generate PDF html.write_pdf(pdf_file_path, stylesheets=css) return
# -*- coding: utf-8 -*- from markdown2 import markdown, markdown_path from weasyprint import HTML, CSS from .exceptions import ValidationError __title__ = 'md2pdf' __version__ = '0.2.1' __author__ = 'Julien Maupetit' __license__ = 'MIT' __copyright__ = 'Copyright 2013 Julien Maupetit' def md2pdf(pdf_file_path, md_content=None, md_file_path=None, css_file_path=None): """ Convert markdown file to pdf with styles """ # Convert markdown to html raw_html = "" extras = ["cuddled-lists"] if md_file_path: raw_html = markdown_path(md_file_path, extras=extras) elif md_content: raw_html = markdown(md_content, extras=extras) if not len(raw_html): raise ValidationError('Input markdown seems empty') # Weasyprint HTML object html = HTML(string=raw_html) # Get styles css = [] if css_file_path: css.append(CSS(filename=css_file_path)) # Generate PDF html.write_pdf(pdf_file_path, stylesheets=css) return Allow to add a base url to find media# -*- coding: utf-8 -*- from markdown2 import markdown, markdown_path from weasyprint import HTML, CSS from .exceptions import ValidationError __title__ = 'md2pdf' __version__ = '0.2.1' __author__ = 'Julien Maupetit' __license__ = 'MIT' __copyright__ = 'Copyright 2013 Julien Maupetit' def md2pdf(pdf_file_path, md_content=None, md_file_path=None, css_file_path=None, base_url=None): """ Convert markdown file to pdf with styles """ # Convert markdown to html raw_html = "" extras = ["cuddled-lists"] if md_file_path: raw_html = markdown_path(md_file_path, extras=extras) elif md_content: raw_html = markdown(md_content, extras=extras) if not len(raw_html): raise ValidationError('Input markdown seems empty') # Weasyprint HTML object html = HTML(string=raw_html, base_url=base_url) # Get styles css = [] if css_file_path: css.append(CSS(filename=css_file_path)) # Generate PDF html.write_pdf(pdf_file_path, stylesheets=css) return
<commit_before># -*- coding: utf-8 -*- from markdown2 import markdown, markdown_path from weasyprint import HTML, CSS from .exceptions import ValidationError __title__ = 'md2pdf' __version__ = '0.2.1' __author__ = 'Julien Maupetit' __license__ = 'MIT' __copyright__ = 'Copyright 2013 Julien Maupetit' def md2pdf(pdf_file_path, md_content=None, md_file_path=None, css_file_path=None): """ Convert markdown file to pdf with styles """ # Convert markdown to html raw_html = "" extras = ["cuddled-lists"] if md_file_path: raw_html = markdown_path(md_file_path, extras=extras) elif md_content: raw_html = markdown(md_content, extras=extras) if not len(raw_html): raise ValidationError('Input markdown seems empty') # Weasyprint HTML object html = HTML(string=raw_html) # Get styles css = [] if css_file_path: css.append(CSS(filename=css_file_path)) # Generate PDF html.write_pdf(pdf_file_path, stylesheets=css) return <commit_msg>Allow to add a base url to find media<commit_after># -*- coding: utf-8 -*- from markdown2 import markdown, markdown_path from weasyprint import HTML, CSS from .exceptions import ValidationError __title__ = 'md2pdf' __version__ = '0.2.1' __author__ = 'Julien Maupetit' __license__ = 'MIT' __copyright__ = 'Copyright 2013 Julien Maupetit' def md2pdf(pdf_file_path, md_content=None, md_file_path=None, css_file_path=None, base_url=None): """ Convert markdown file to pdf with styles """ # Convert markdown to html raw_html = "" extras = ["cuddled-lists"] if md_file_path: raw_html = markdown_path(md_file_path, extras=extras) elif md_content: raw_html = markdown(md_content, extras=extras) if not len(raw_html): raise ValidationError('Input markdown seems empty') # Weasyprint HTML object html = HTML(string=raw_html, base_url=base_url) # Get styles css = [] if css_file_path: css.append(CSS(filename=css_file_path)) # Generate PDF html.write_pdf(pdf_file_path, stylesheets=css) return
0e19f960b2234fcd9711f123526f8de507ed2d99
src/registry.py
src/registry.py
from .formatter import * class FormatterRegistry(): def __init__(self): self.__formatters = [] def populate(self): self.__formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), JsonFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] @property def all(self): return self.__formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def find(self, predicate, default=None): return next((x for x in self.all if predicate(x)), default) def by_view(self, view): source = view.scope_name(0).split(' ')[0].split('.')[1] return self.find(lambda x: source in x.sources) def by_name(self, name): return self.find(lambda x: x.name == name)
from .formatter import * class FormatterRegistry(): def __init__(self): self.__formatters = [] self.__formatter_source_map = {} def populate(self): self.__formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), JsonFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] self.__formatter_source_map = dict((source, formatter) for formatter in self.__formatters for source in formatter.sources) @property def all(self): return self.__formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def by_view(self, view): source = view.scope_name(0).split(' ')[0].split('.')[1] return self.__formatter_source_map.get(source) def by_name(self, name): return next((x for x in self.all if x.name == name))
Speed up lookup of formatter by source type
Speed up lookup of formatter by source type
Python
mit
Rypac/sublime-format
from .formatter import * class FormatterRegistry(): def __init__(self): self.__formatters = [] def populate(self): self.__formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), JsonFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] @property def all(self): return self.__formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def find(self, predicate, default=None): return next((x for x in self.all if predicate(x)), default) def by_view(self, view): source = view.scope_name(0).split(' ')[0].split('.')[1] return self.find(lambda x: source in x.sources) def by_name(self, name): return self.find(lambda x: x.name == name) Speed up lookup of formatter by source type
from .formatter import * class FormatterRegistry(): def __init__(self): self.__formatters = [] self.__formatter_source_map = {} def populate(self): self.__formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), JsonFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] self.__formatter_source_map = dict((source, formatter) for formatter in self.__formatters for source in formatter.sources) @property def all(self): return self.__formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def by_view(self, view): source = view.scope_name(0).split(' ')[0].split('.')[1] return self.__formatter_source_map.get(source) def by_name(self, name): return next((x for x in self.all if x.name == name))
<commit_before>from .formatter import * class FormatterRegistry(): def __init__(self): self.__formatters = [] def populate(self): self.__formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), JsonFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] @property def all(self): return self.__formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def find(self, predicate, default=None): return next((x for x in self.all if predicate(x)), default) def by_view(self, view): source = view.scope_name(0).split(' ')[0].split('.')[1] return self.find(lambda x: source in x.sources) def by_name(self, name): return self.find(lambda x: x.name == name) <commit_msg>Speed up lookup of formatter by source type<commit_after>
from .formatter import * class FormatterRegistry(): def __init__(self): self.__formatters = [] self.__formatter_source_map = {} def populate(self): self.__formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), JsonFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] self.__formatter_source_map = dict((source, formatter) for formatter in self.__formatters for source in formatter.sources) @property def all(self): return self.__formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def by_view(self, view): source = view.scope_name(0).split(' ')[0].split('.')[1] return self.__formatter_source_map.get(source) def by_name(self, name): return next((x for x in self.all if x.name == name))
from .formatter import * class FormatterRegistry(): def __init__(self): self.__formatters = [] def populate(self): self.__formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), JsonFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] @property def all(self): return self.__formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def find(self, predicate, default=None): return next((x for x in self.all if predicate(x)), default) def by_view(self, view): source = view.scope_name(0).split(' ')[0].split('.')[1] return self.find(lambda x: source in x.sources) def by_name(self, name): return self.find(lambda x: x.name == name) Speed up lookup of formatter by source typefrom .formatter import * class FormatterRegistry(): def __init__(self): self.__formatters = [] self.__formatter_source_map = {} def populate(self): self.__formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), JsonFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] self.__formatter_source_map = dict((source, formatter) for formatter in self.__formatters for source in formatter.sources) @property def all(self): return self.__formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def by_view(self, view): source = view.scope_name(0).split(' ')[0].split('.')[1] return self.__formatter_source_map.get(source) def by_name(self, name): return next((x for x in self.all if x.name == name))
<commit_before>from .formatter import * class FormatterRegistry(): def __init__(self): self.__formatters = [] def populate(self): self.__formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), JsonFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] @property def all(self): return self.__formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def find(self, predicate, default=None): return next((x for x in self.all if predicate(x)), default) def by_view(self, view): source = view.scope_name(0).split(' ')[0].split('.')[1] return self.find(lambda x: source in x.sources) def by_name(self, name): return self.find(lambda x: x.name == name) <commit_msg>Speed up lookup of formatter by source type<commit_after>from .formatter import * class FormatterRegistry(): def __init__(self): self.__formatters = [] self.__formatter_source_map = {} def populate(self): self.__formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), JsonFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] self.__formatter_source_map = dict((source, formatter) for formatter in self.__formatters for source in formatter.sources) @property def all(self): return self.__formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def by_view(self, view): source = view.scope_name(0).split(' ')[0].split('.')[1] return self.__formatter_source_map.get(source) def by_name(self, name): return next((x for x in self.all if x.name == name))
22cb22dfdb5ec4c19e8a90f65483cf372c5731a0
src/examples/command_group.py
src/examples/command_group.py
from cmdtree import group, argument, entry @group("docker") @argument("ip") def docker(): pass # nested command @docker.command("run") @argument("container-name") def run(ip, container_name): print( "container [{name}] on host [{ip}]".format( ip=ip, name=container_name, ) ) # nested command group @docker.group("image") def image(): pass @image.command("create") @argument("name") def image_create(ip, name): print( "iamge {name} on {ip} created.".format( ip=ip, name=name, ) ) if __name__ == "__main__": entry()
from cmdtree import group, argument, entry @group("fake-docker", "fake-docker command binds") def fake_docker(): pass @group("docker", "docker command binds") @argument("ip", help="docker daemon ip addr") def docker(): pass # nested command @docker.command("run", help="run docker command") @argument("container-name") def run(ip, container_name): print( "container [{name}] on host [{ip}]".format( ip=ip, name=container_name, ) ) # nested command group @docker.group("image", help="image operation") def image(): pass @image.command("create") @argument("name") def image_create(ip, name): print( "iamge {name} on {ip} created.".format( ip=ip, name=name, ) ) if __name__ == "__main__": entry()
Update examples for multiple group
Update: Update examples for multiple group
Python
mit
winkidney/cmdtree,winkidney/cmdtree
from cmdtree import group, argument, entry @group("docker") @argument("ip") def docker(): pass # nested command @docker.command("run") @argument("container-name") def run(ip, container_name): print( "container [{name}] on host [{ip}]".format( ip=ip, name=container_name, ) ) # nested command group @docker.group("image") def image(): pass @image.command("create") @argument("name") def image_create(ip, name): print( "iamge {name} on {ip} created.".format( ip=ip, name=name, ) ) if __name__ == "__main__": entry()Update: Update examples for multiple group
from cmdtree import group, argument, entry @group("fake-docker", "fake-docker command binds") def fake_docker(): pass @group("docker", "docker command binds") @argument("ip", help="docker daemon ip addr") def docker(): pass # nested command @docker.command("run", help="run docker command") @argument("container-name") def run(ip, container_name): print( "container [{name}] on host [{ip}]".format( ip=ip, name=container_name, ) ) # nested command group @docker.group("image", help="image operation") def image(): pass @image.command("create") @argument("name") def image_create(ip, name): print( "iamge {name} on {ip} created.".format( ip=ip, name=name, ) ) if __name__ == "__main__": entry()
<commit_before>from cmdtree import group, argument, entry @group("docker") @argument("ip") def docker(): pass # nested command @docker.command("run") @argument("container-name") def run(ip, container_name): print( "container [{name}] on host [{ip}]".format( ip=ip, name=container_name, ) ) # nested command group @docker.group("image") def image(): pass @image.command("create") @argument("name") def image_create(ip, name): print( "iamge {name} on {ip} created.".format( ip=ip, name=name, ) ) if __name__ == "__main__": entry()<commit_msg>Update: Update examples for multiple group<commit_after>
from cmdtree import group, argument, entry @group("fake-docker", "fake-docker command binds") def fake_docker(): pass @group("docker", "docker command binds") @argument("ip", help="docker daemon ip addr") def docker(): pass # nested command @docker.command("run", help="run docker command") @argument("container-name") def run(ip, container_name): print( "container [{name}] on host [{ip}]".format( ip=ip, name=container_name, ) ) # nested command group @docker.group("image", help="image operation") def image(): pass @image.command("create") @argument("name") def image_create(ip, name): print( "iamge {name} on {ip} created.".format( ip=ip, name=name, ) ) if __name__ == "__main__": entry()
from cmdtree import group, argument, entry @group("docker") @argument("ip") def docker(): pass # nested command @docker.command("run") @argument("container-name") def run(ip, container_name): print( "container [{name}] on host [{ip}]".format( ip=ip, name=container_name, ) ) # nested command group @docker.group("image") def image(): pass @image.command("create") @argument("name") def image_create(ip, name): print( "iamge {name} on {ip} created.".format( ip=ip, name=name, ) ) if __name__ == "__main__": entry()Update: Update examples for multiple groupfrom cmdtree import group, argument, entry @group("fake-docker", "fake-docker command binds") def fake_docker(): pass @group("docker", "docker command binds") @argument("ip", help="docker daemon ip addr") def docker(): pass # nested command @docker.command("run", help="run docker command") @argument("container-name") def run(ip, container_name): print( "container [{name}] on host [{ip}]".format( ip=ip, name=container_name, ) ) # nested command group @docker.group("image", help="image operation") def image(): pass @image.command("create") @argument("name") def image_create(ip, name): print( "iamge {name} on {ip} created.".format( ip=ip, name=name, ) ) if __name__ == "__main__": entry()
<commit_before>from cmdtree import group, argument, entry @group("docker") @argument("ip") def docker(): pass # nested command @docker.command("run") @argument("container-name") def run(ip, container_name): print( "container [{name}] on host [{ip}]".format( ip=ip, name=container_name, ) ) # nested command group @docker.group("image") def image(): pass @image.command("create") @argument("name") def image_create(ip, name): print( "iamge {name} on {ip} created.".format( ip=ip, name=name, ) ) if __name__ == "__main__": entry()<commit_msg>Update: Update examples for multiple group<commit_after>from cmdtree import group, argument, entry @group("fake-docker", "fake-docker command binds") def fake_docker(): pass @group("docker", "docker command binds") @argument("ip", help="docker daemon ip addr") def docker(): pass # nested command @docker.command("run", help="run docker command") @argument("container-name") def run(ip, container_name): print( "container [{name}] on host [{ip}]".format( ip=ip, name=container_name, ) ) # nested command group @docker.group("image", help="image operation") def image(): pass @image.command("create") @argument("name") def image_create(ip, name): print( "iamge {name} on {ip} created.".format( ip=ip, name=name, ) ) if __name__ == "__main__": entry()
934acb38906b9b6e42620d2e8153dbfd129f01e4
src/damis/models.py
src/damis/models.py
from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True)
from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) slug = models.SlugField(max_length=40) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True)
Add slug field to Dataset model. Using slugs for API looks better than pk.
Add slug field to Dataset model. Using slugs for API looks better than pk.
Python
agpl-3.0
InScience/DAMIS-old,InScience/DAMIS-old
from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) Add slug field to Dataset model. Using slugs for API looks better than pk.
from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) slug = models.SlugField(max_length=40) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True)
<commit_before>from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) <commit_msg>Add slug field to Dataset model. Using slugs for API looks better than pk.<commit_after>
from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) slug = models.SlugField(max_length=40) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True)
from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) Add slug field to Dataset model. Using slugs for API looks better than pk.from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) slug = models.SlugField(max_length=40) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True)
<commit_before>from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) <commit_msg>Add slug field to Dataset model. Using slugs for API looks better than pk.<commit_after>from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) slug = models.SlugField(max_length=40) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True)
d82588adaded8943543b9f0300e0d683925496ce
01/server.py
01/server.py
from google.appengine.api import users import webapp2 class MainPage(webapp2.RequestHandler): def get(self): self.response.set_status(200) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' if users.get_current_user(): url = users.create_logout_url(self.request.uri) url_linktext = 'Logout' else: url = users.create_login_url(self.request.uri) url_linktext = 'Login' self.response.out.write('Hello World') application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True)
import webapp2 class MainPage(webapp2.RequestHandler): def get(self): self.response.set_status(200) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' self.response.out.write('Hello World') application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True)
Simplify by removing user login.
Simplify by removing user login.
Python
apache-2.0
luisibanez/appengine-python-101
from google.appengine.api import users import webapp2 class MainPage(webapp2.RequestHandler): def get(self): self.response.set_status(200) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' if users.get_current_user(): url = users.create_logout_url(self.request.uri) url_linktext = 'Logout' else: url = users.create_login_url(self.request.uri) url_linktext = 'Login' self.response.out.write('Hello World') application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True) Simplify by removing user login.
import webapp2 class MainPage(webapp2.RequestHandler): def get(self): self.response.set_status(200) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' self.response.out.write('Hello World') application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True)
<commit_before> from google.appengine.api import users import webapp2 class MainPage(webapp2.RequestHandler): def get(self): self.response.set_status(200) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' if users.get_current_user(): url = users.create_logout_url(self.request.uri) url_linktext = 'Logout' else: url = users.create_login_url(self.request.uri) url_linktext = 'Login' self.response.out.write('Hello World') application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True) <commit_msg>Simplify by removing user login.<commit_after>
import webapp2 class MainPage(webapp2.RequestHandler): def get(self): self.response.set_status(200) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' self.response.out.write('Hello World') application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True)
from google.appengine.api import users import webapp2 class MainPage(webapp2.RequestHandler): def get(self): self.response.set_status(200) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' if users.get_current_user(): url = users.create_logout_url(self.request.uri) url_linktext = 'Logout' else: url = users.create_login_url(self.request.uri) url_linktext = 'Login' self.response.out.write('Hello World') application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True) Simplify by removing user login. import webapp2 class MainPage(webapp2.RequestHandler): def get(self): self.response.set_status(200) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' self.response.out.write('Hello World') application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True)
<commit_before> from google.appengine.api import users import webapp2 class MainPage(webapp2.RequestHandler): def get(self): self.response.set_status(200) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' if users.get_current_user(): url = users.create_logout_url(self.request.uri) url_linktext = 'Logout' else: url = users.create_login_url(self.request.uri) url_linktext = 'Login' self.response.out.write('Hello World') application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True) <commit_msg>Simplify by removing user login.<commit_after> import webapp2 class MainPage(webapp2.RequestHandler): def get(self): self.response.set_status(200) self.response.headers['Content-Type'] = 'text/html; charset=utf-8' self.response.out.write('Hello World') application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True)
6b1ad76140741fd29d8a0d0a0e057b59cd312587
corehq/sql_db/routers.py
corehq/sql_db/routers.py
from .config import PartitionConfig PROXY_APP = 'sql_proxy_accessors' SQL_ACCESSORS_APP = 'sql_accessors' FORM_PROCESSING_GROUP = 'form_processing' PROXY_GROUP = 'proxy' MAIN_GROUP = 'main' class PartitionRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): if app_label == PROXY_APP: return (db in self.config.dbs_by_group(PROXY_GROUP) or db in self.config.dbs_by_group(FORM_PROCESSING_GROUP)) elif app_label == SQL_ACCESSORS_APP: return db in self.config.dbs_by_group(FORM_PROCESSING_GROUP) else: return db in self.config.dbs_by_group(MAIN_GROUP) class MonolithRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): return app_label != PROXY_APP
from .config import PartitionConfig PROXY_APP = 'sql_proxy_accessors' SQL_ACCESSORS_APP = 'sql_accessors' FORM_PROCESSING_GROUP = 'form_processing' PROXY_GROUP = 'proxy' MAIN_GROUP = 'main' class PartitionRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): if app_label == PROXY_APP: return (db in self.config.dbs_by_group(PROXY_GROUP) or db in self.config.dbs_by_group(FORM_PROCESSING_GROUP)) elif app_label == SQL_ACCESSORS_APP: return db in self.config.dbs_by_group(FORM_PROCESSING_GROUP) else: return db in self.config.dbs_by_group(MAIN_GROUP) class MonolithRouter(object): def allow_migrate(self, db, app_label, model=None, **hints): return app_label != PROXY_APP
Remove unnecessary call to PartitionConfig
Remove unnecessary call to PartitionConfig
Python
bsd-3-clause
dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
from .config import PartitionConfig PROXY_APP = 'sql_proxy_accessors' SQL_ACCESSORS_APP = 'sql_accessors' FORM_PROCESSING_GROUP = 'form_processing' PROXY_GROUP = 'proxy' MAIN_GROUP = 'main' class PartitionRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): if app_label == PROXY_APP: return (db in self.config.dbs_by_group(PROXY_GROUP) or db in self.config.dbs_by_group(FORM_PROCESSING_GROUP)) elif app_label == SQL_ACCESSORS_APP: return db in self.config.dbs_by_group(FORM_PROCESSING_GROUP) else: return db in self.config.dbs_by_group(MAIN_GROUP) class MonolithRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): return app_label != PROXY_APP Remove unnecessary call to PartitionConfig
from .config import PartitionConfig PROXY_APP = 'sql_proxy_accessors' SQL_ACCESSORS_APP = 'sql_accessors' FORM_PROCESSING_GROUP = 'form_processing' PROXY_GROUP = 'proxy' MAIN_GROUP = 'main' class PartitionRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): if app_label == PROXY_APP: return (db in self.config.dbs_by_group(PROXY_GROUP) or db in self.config.dbs_by_group(FORM_PROCESSING_GROUP)) elif app_label == SQL_ACCESSORS_APP: return db in self.config.dbs_by_group(FORM_PROCESSING_GROUP) else: return db in self.config.dbs_by_group(MAIN_GROUP) class MonolithRouter(object): def allow_migrate(self, db, app_label, model=None, **hints): return app_label != PROXY_APP
<commit_before>from .config import PartitionConfig PROXY_APP = 'sql_proxy_accessors' SQL_ACCESSORS_APP = 'sql_accessors' FORM_PROCESSING_GROUP = 'form_processing' PROXY_GROUP = 'proxy' MAIN_GROUP = 'main' class PartitionRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): if app_label == PROXY_APP: return (db in self.config.dbs_by_group(PROXY_GROUP) or db in self.config.dbs_by_group(FORM_PROCESSING_GROUP)) elif app_label == SQL_ACCESSORS_APP: return db in self.config.dbs_by_group(FORM_PROCESSING_GROUP) else: return db in self.config.dbs_by_group(MAIN_GROUP) class MonolithRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): return app_label != PROXY_APP <commit_msg>Remove unnecessary call to PartitionConfig<commit_after>
from .config import PartitionConfig PROXY_APP = 'sql_proxy_accessors' SQL_ACCESSORS_APP = 'sql_accessors' FORM_PROCESSING_GROUP = 'form_processing' PROXY_GROUP = 'proxy' MAIN_GROUP = 'main' class PartitionRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): if app_label == PROXY_APP: return (db in self.config.dbs_by_group(PROXY_GROUP) or db in self.config.dbs_by_group(FORM_PROCESSING_GROUP)) elif app_label == SQL_ACCESSORS_APP: return db in self.config.dbs_by_group(FORM_PROCESSING_GROUP) else: return db in self.config.dbs_by_group(MAIN_GROUP) class MonolithRouter(object): def allow_migrate(self, db, app_label, model=None, **hints): return app_label != PROXY_APP
from .config import PartitionConfig PROXY_APP = 'sql_proxy_accessors' SQL_ACCESSORS_APP = 'sql_accessors' FORM_PROCESSING_GROUP = 'form_processing' PROXY_GROUP = 'proxy' MAIN_GROUP = 'main' class PartitionRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): if app_label == PROXY_APP: return (db in self.config.dbs_by_group(PROXY_GROUP) or db in self.config.dbs_by_group(FORM_PROCESSING_GROUP)) elif app_label == SQL_ACCESSORS_APP: return db in self.config.dbs_by_group(FORM_PROCESSING_GROUP) else: return db in self.config.dbs_by_group(MAIN_GROUP) class MonolithRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): return app_label != PROXY_APP Remove unnecessary call to PartitionConfigfrom .config import PartitionConfig PROXY_APP = 'sql_proxy_accessors' SQL_ACCESSORS_APP = 'sql_accessors' FORM_PROCESSING_GROUP = 'form_processing' PROXY_GROUP = 'proxy' MAIN_GROUP = 'main' class PartitionRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): if app_label == PROXY_APP: return (db in self.config.dbs_by_group(PROXY_GROUP) or db in self.config.dbs_by_group(FORM_PROCESSING_GROUP)) elif app_label == SQL_ACCESSORS_APP: return db in self.config.dbs_by_group(FORM_PROCESSING_GROUP) else: return db in self.config.dbs_by_group(MAIN_GROUP) class MonolithRouter(object): def allow_migrate(self, db, app_label, model=None, **hints): return app_label != PROXY_APP
<commit_before>from .config import PartitionConfig PROXY_APP = 'sql_proxy_accessors' SQL_ACCESSORS_APP = 'sql_accessors' FORM_PROCESSING_GROUP = 'form_processing' PROXY_GROUP = 'proxy' MAIN_GROUP = 'main' class PartitionRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): if app_label == PROXY_APP: return (db in self.config.dbs_by_group(PROXY_GROUP) or db in self.config.dbs_by_group(FORM_PROCESSING_GROUP)) elif app_label == SQL_ACCESSORS_APP: return db in self.config.dbs_by_group(FORM_PROCESSING_GROUP) else: return db in self.config.dbs_by_group(MAIN_GROUP) class MonolithRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): return app_label != PROXY_APP <commit_msg>Remove unnecessary call to PartitionConfig<commit_after>from .config import PartitionConfig PROXY_APP = 'sql_proxy_accessors' SQL_ACCESSORS_APP = 'sql_accessors' FORM_PROCESSING_GROUP = 'form_processing' PROXY_GROUP = 'proxy' MAIN_GROUP = 'main' class PartitionRouter(object): def __init__(self): self.config = PartitionConfig() def allow_migrate(self, db, app_label, model=None, **hints): if app_label == PROXY_APP: return (db in self.config.dbs_by_group(PROXY_GROUP) or db in self.config.dbs_by_group(FORM_PROCESSING_GROUP)) elif app_label == SQL_ACCESSORS_APP: return db in self.config.dbs_by_group(FORM_PROCESSING_GROUP) else: return db in self.config.dbs_by_group(MAIN_GROUP) class MonolithRouter(object): def allow_migrate(self, db, app_label, model=None, **hints): return app_label != PROXY_APP
8a0ce66150bb4e1147f5fb88fdd8fd0d391c7daa
dask_ndmeasure/_utils.py
dask_ndmeasure/_utils.py
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*- import numpy import dask.array from . import _compat def _norm_input_labels_index(input, labels=None, index=None): """ Normalize arguments to a standard form. """ input = _compat._asarray(input) if labels is None: labels = (input != 0).astype(numpy.int64) index = None if index is None: labels = (labels > 0).astype(numpy.int64) index = dask.array.ones(tuple(), dtype=numpy.int64, chunks=tuple()) labels = _compat._asarray(labels) index = _compat._asarray(index) # SciPy transposes these for some reason. # So we do the same thing here. # This only matters if index is some array. index = index.T if input.shape != labels.shape: raise ValueError("The input and labels arrays must be the same shape.") return (input, labels, index)
Add arg normalizing function for labels and index
Add arg normalizing function for labels and index Refactored from code in `center_of_mass`, the `_norm_input_labels_index` function handles the normalization of `input`, `labels`, and `index` arguments. As these particular arguments will show up repeatedly in the API, it will be very helpful to have normalize them in the general case so the API functions can be focused on their particular computations.
Python
bsd-3-clause
dask-image/dask-ndmeasure
# -*- coding: utf-8 -*- Add arg normalizing function for labels and index Refactored from code in `center_of_mass`, the `_norm_input_labels_index` function handles the normalization of `input`, `labels`, and `index` arguments. As these particular arguments will show up repeatedly in the API, it will be very helpful to have normalize them in the general case so the API functions can be focused on their particular computations.
# -*- coding: utf-8 -*- import numpy import dask.array from . import _compat def _norm_input_labels_index(input, labels=None, index=None): """ Normalize arguments to a standard form. """ input = _compat._asarray(input) if labels is None: labels = (input != 0).astype(numpy.int64) index = None if index is None: labels = (labels > 0).astype(numpy.int64) index = dask.array.ones(tuple(), dtype=numpy.int64, chunks=tuple()) labels = _compat._asarray(labels) index = _compat._asarray(index) # SciPy transposes these for some reason. # So we do the same thing here. # This only matters if index is some array. index = index.T if input.shape != labels.shape: raise ValueError("The input and labels arrays must be the same shape.") return (input, labels, index)
<commit_before># -*- coding: utf-8 -*- <commit_msg>Add arg normalizing function for labels and index Refactored from code in `center_of_mass`, the `_norm_input_labels_index` function handles the normalization of `input`, `labels`, and `index` arguments. As these particular arguments will show up repeatedly in the API, it will be very helpful to have normalize them in the general case so the API functions can be focused on their particular computations.<commit_after>
# -*- coding: utf-8 -*- import numpy import dask.array from . import _compat def _norm_input_labels_index(input, labels=None, index=None): """ Normalize arguments to a standard form. """ input = _compat._asarray(input) if labels is None: labels = (input != 0).astype(numpy.int64) index = None if index is None: labels = (labels > 0).astype(numpy.int64) index = dask.array.ones(tuple(), dtype=numpy.int64, chunks=tuple()) labels = _compat._asarray(labels) index = _compat._asarray(index) # SciPy transposes these for some reason. # So we do the same thing here. # This only matters if index is some array. index = index.T if input.shape != labels.shape: raise ValueError("The input and labels arrays must be the same shape.") return (input, labels, index)
# -*- coding: utf-8 -*- Add arg normalizing function for labels and index Refactored from code in `center_of_mass`, the `_norm_input_labels_index` function handles the normalization of `input`, `labels`, and `index` arguments. As these particular arguments will show up repeatedly in the API, it will be very helpful to have normalize them in the general case so the API functions can be focused on their particular computations.# -*- coding: utf-8 -*- import numpy import dask.array from . import _compat def _norm_input_labels_index(input, labels=None, index=None): """ Normalize arguments to a standard form. """ input = _compat._asarray(input) if labels is None: labels = (input != 0).astype(numpy.int64) index = None if index is None: labels = (labels > 0).astype(numpy.int64) index = dask.array.ones(tuple(), dtype=numpy.int64, chunks=tuple()) labels = _compat._asarray(labels) index = _compat._asarray(index) # SciPy transposes these for some reason. # So we do the same thing here. # This only matters if index is some array. index = index.T if input.shape != labels.shape: raise ValueError("The input and labels arrays must be the same shape.") return (input, labels, index)
<commit_before># -*- coding: utf-8 -*- <commit_msg>Add arg normalizing function for labels and index Refactored from code in `center_of_mass`, the `_norm_input_labels_index` function handles the normalization of `input`, `labels`, and `index` arguments. As these particular arguments will show up repeatedly in the API, it will be very helpful to have normalize them in the general case so the API functions can be focused on their particular computations.<commit_after># -*- coding: utf-8 -*- import numpy import dask.array from . import _compat def _norm_input_labels_index(input, labels=None, index=None): """ Normalize arguments to a standard form. """ input = _compat._asarray(input) if labels is None: labels = (input != 0).astype(numpy.int64) index = None if index is None: labels = (labels > 0).astype(numpy.int64) index = dask.array.ones(tuple(), dtype=numpy.int64, chunks=tuple()) labels = _compat._asarray(labels) index = _compat._asarray(index) # SciPy transposes these for some reason. # So we do the same thing here. # This only matters if index is some array. index = index.T if input.shape != labels.shape: raise ValueError("The input and labels arrays must be the same shape.") return (input, labels, index)
b8d50cf4f7431ed617957e7d6e432a1729656524
setuptools/command/__init__.py
setuptools/command/__init__.py
from distutils.command.bdist import bdist import sys if 'egg' not in bdist.format_commands: bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') del bdist, sys
from distutils.command.bdist import bdist import sys if 'egg' not in bdist.format_commands: try: bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file") except TypeError: # For backward compatibility with older distutils (stdlib) bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') del bdist, sys
Update 'bdist' format addition to assume a single 'format_commands' as a dictionary, but fall back to the dual dict/list model for compatibility with stdlib.
Update 'bdist' format addition to assume a single 'format_commands' as a dictionary, but fall back to the dual dict/list model for compatibility with stdlib.
Python
mit
pypa/setuptools,pypa/setuptools,pypa/setuptools
from distutils.command.bdist import bdist import sys if 'egg' not in bdist.format_commands: bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') del bdist, sys Update 'bdist' format addition to assume a single 'format_commands' as a dictionary, but fall back to the dual dict/list model for compatibility with stdlib.
from distutils.command.bdist import bdist import sys if 'egg' not in bdist.format_commands: try: bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file") except TypeError: # For backward compatibility with older distutils (stdlib) bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') del bdist, sys
<commit_before>from distutils.command.bdist import bdist import sys if 'egg' not in bdist.format_commands: bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') del bdist, sys <commit_msg>Update 'bdist' format addition to assume a single 'format_commands' as a dictionary, but fall back to the dual dict/list model for compatibility with stdlib.<commit_after>
from distutils.command.bdist import bdist import sys if 'egg' not in bdist.format_commands: try: bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file") except TypeError: # For backward compatibility with older distutils (stdlib) bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') del bdist, sys
from distutils.command.bdist import bdist import sys if 'egg' not in bdist.format_commands: bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') del bdist, sys Update 'bdist' format addition to assume a single 'format_commands' as a dictionary, but fall back to the dual dict/list model for compatibility with stdlib.from distutils.command.bdist import bdist import sys if 'egg' not in bdist.format_commands: try: bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file") except TypeError: # For backward compatibility with older distutils (stdlib) bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') del bdist, sys
<commit_before>from distutils.command.bdist import bdist import sys if 'egg' not in bdist.format_commands: bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') del bdist, sys <commit_msg>Update 'bdist' format addition to assume a single 'format_commands' as a dictionary, but fall back to the dual dict/list model for compatibility with stdlib.<commit_after>from distutils.command.bdist import bdist import sys if 'egg' not in bdist.format_commands: try: bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file") except TypeError: # For backward compatibility with older distutils (stdlib) bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") bdist.format_commands.append('egg') del bdist, sys
e392998022ec41b82276464ffecbd859d5e13c63
src/models/facility_monitoring.py
src/models/facility_monitoring.py
import pandas as pd import numpy as np import matplotlib.pyplot as plt class facility(object): """ A Facility currently under monitoring """ def __init__(self , data) : self.validated_data = data def monitor_new_report(self) : out = np.random.choice(['Validate' , 'Supervise - Data' , 'Supervise - Services' , 'Supervise - Data and Quality']) return out u = facility('a') u.monitor_new_report()
import pandas as pd import numpy as np import matplotlib.pyplot as plt from reports_monitoring import * store = pd.HDFStore('../../data/processed/orbf_benin.h5') data_orbf = store['data'] store.close() class facility(object): """ A Facility currently under monitoring """ def __init__(self , data) : self.validated_data = data def monitor_new_report(self) : out = np.random.choice(['Validate' , 'Supervise - Data' , 'Supervise - Services' , 'Supervise - Data and Quality']) return out def make_reports(self): reports = {} for month in list(data.date.unique()) : reports[str(month)[:7]] = report(data[data.date == month]) self.reports = reports return reports
Add report making for facility
Add report making for facility
Python
mit
grlurton/orbf_data_validation,grlurton/orbf_data_validation
import pandas as pd import numpy as np import matplotlib.pyplot as plt class facility(object): """ A Facility currently under monitoring """ def __init__(self , data) : self.validated_data = data def monitor_new_report(self) : out = np.random.choice(['Validate' , 'Supervise - Data' , 'Supervise - Services' , 'Supervise - Data and Quality']) return out u = facility('a') u.monitor_new_report() Add report making for facility
import pandas as pd import numpy as np import matplotlib.pyplot as plt from reports_monitoring import * store = pd.HDFStore('../../data/processed/orbf_benin.h5') data_orbf = store['data'] store.close() class facility(object): """ A Facility currently under monitoring """ def __init__(self , data) : self.validated_data = data def monitor_new_report(self) : out = np.random.choice(['Validate' , 'Supervise - Data' , 'Supervise - Services' , 'Supervise - Data and Quality']) return out def make_reports(self): reports = {} for month in list(data.date.unique()) : reports[str(month)[:7]] = report(data[data.date == month]) self.reports = reports return reports
<commit_before>import pandas as pd import numpy as np import matplotlib.pyplot as plt class facility(object): """ A Facility currently under monitoring """ def __init__(self , data) : self.validated_data = data def monitor_new_report(self) : out = np.random.choice(['Validate' , 'Supervise - Data' , 'Supervise - Services' , 'Supervise - Data and Quality']) return out u = facility('a') u.monitor_new_report() <commit_msg>Add report making for facility<commit_after>
import pandas as pd import numpy as np import matplotlib.pyplot as plt from reports_monitoring import * store = pd.HDFStore('../../data/processed/orbf_benin.h5') data_orbf = store['data'] store.close() class facility(object): """ A Facility currently under monitoring """ def __init__(self , data) : self.validated_data = data def monitor_new_report(self) : out = np.random.choice(['Validate' , 'Supervise - Data' , 'Supervise - Services' , 'Supervise - Data and Quality']) return out def make_reports(self): reports = {} for month in list(data.date.unique()) : reports[str(month)[:7]] = report(data[data.date == month]) self.reports = reports return reports
import pandas as pd import numpy as np import matplotlib.pyplot as plt class facility(object): """ A Facility currently under monitoring """ def __init__(self , data) : self.validated_data = data def monitor_new_report(self) : out = np.random.choice(['Validate' , 'Supervise - Data' , 'Supervise - Services' , 'Supervise - Data and Quality']) return out u = facility('a') u.monitor_new_report() Add report making for facilityimport pandas as pd import numpy as np import matplotlib.pyplot as plt from reports_monitoring import * store = pd.HDFStore('../../data/processed/orbf_benin.h5') data_orbf = store['data'] store.close() class facility(object): """ A Facility currently under monitoring """ def __init__(self , data) : self.validated_data = data def monitor_new_report(self) : out = np.random.choice(['Validate' , 'Supervise - Data' , 'Supervise - Services' , 'Supervise - Data and Quality']) return out def make_reports(self): reports = {} for month in list(data.date.unique()) : reports[str(month)[:7]] = report(data[data.date == month]) self.reports = reports return reports
<commit_before>import pandas as pd import numpy as np import matplotlib.pyplot as plt class facility(object): """ A Facility currently under monitoring """ def __init__(self , data) : self.validated_data = data def monitor_new_report(self) : out = np.random.choice(['Validate' , 'Supervise - Data' , 'Supervise - Services' , 'Supervise - Data and Quality']) return out u = facility('a') u.monitor_new_report() <commit_msg>Add report making for facility<commit_after>import pandas as pd import numpy as np import matplotlib.pyplot as plt from reports_monitoring import * store = pd.HDFStore('../../data/processed/orbf_benin.h5') data_orbf = store['data'] store.close() class facility(object): """ A Facility currently under monitoring """ def __init__(self , data) : self.validated_data = data def monitor_new_report(self) : out = np.random.choice(['Validate' , 'Supervise - Data' , 'Supervise - Services' , 'Supervise - Data and Quality']) return out def make_reports(self): reports = {} for month in list(data.date.unique()) : reports[str(month)[:7]] = report(data[data.date == month]) self.reports = reports return reports
7c47a2960d644b34ce3ff569042fb5e965270e8c
netsecus/task.py
netsecus/task.py
from __future__ import unicode_literals class Task(object): def __init__(self, taskID, sheetID, name, description, maxPoints, reachedPoints=0): self.id = taskID self.sheetID = sheetID self.name = name self.description = description self.maxPoints = maxPoints self.reachedPoints = reachedPoints
from __future__ import unicode_literals class Task(object): def __init__(self, taskID, sheetID, name, description, maxPoints): self.id = taskID self.sheetID = sheetID self.name = name self.description = description self.maxPoints = maxPoints
Remove unneeded 'reachedPoints' variable from Task class
Remove unneeded 'reachedPoints' variable from Task class
Python
mit
hhucn/netsec-uebungssystem,hhucn/netsec-uebungssystem,hhucn/netsec-uebungssystem
from __future__ import unicode_literals class Task(object): def __init__(self, taskID, sheetID, name, description, maxPoints, reachedPoints=0): self.id = taskID self.sheetID = sheetID self.name = name self.description = description self.maxPoints = maxPoints self.reachedPoints = reachedPoints Remove unneeded 'reachedPoints' variable from Task class
from __future__ import unicode_literals class Task(object): def __init__(self, taskID, sheetID, name, description, maxPoints): self.id = taskID self.sheetID = sheetID self.name = name self.description = description self.maxPoints = maxPoints
<commit_before>from __future__ import unicode_literals class Task(object): def __init__(self, taskID, sheetID, name, description, maxPoints, reachedPoints=0): self.id = taskID self.sheetID = sheetID self.name = name self.description = description self.maxPoints = maxPoints self.reachedPoints = reachedPoints <commit_msg>Remove unneeded 'reachedPoints' variable from Task class<commit_after>
from __future__ import unicode_literals class Task(object): def __init__(self, taskID, sheetID, name, description, maxPoints): self.id = taskID self.sheetID = sheetID self.name = name self.description = description self.maxPoints = maxPoints
from __future__ import unicode_literals class Task(object): def __init__(self, taskID, sheetID, name, description, maxPoints, reachedPoints=0): self.id = taskID self.sheetID = sheetID self.name = name self.description = description self.maxPoints = maxPoints self.reachedPoints = reachedPoints Remove unneeded 'reachedPoints' variable from Task classfrom __future__ import unicode_literals class Task(object): def __init__(self, taskID, sheetID, name, description, maxPoints): self.id = taskID self.sheetID = sheetID self.name = name self.description = description self.maxPoints = maxPoints
<commit_before>from __future__ import unicode_literals class Task(object): def __init__(self, taskID, sheetID, name, description, maxPoints, reachedPoints=0): self.id = taskID self.sheetID = sheetID self.name = name self.description = description self.maxPoints = maxPoints self.reachedPoints = reachedPoints <commit_msg>Remove unneeded 'reachedPoints' variable from Task class<commit_after>from __future__ import unicode_literals class Task(object): def __init__(self, taskID, sheetID, name, description, maxPoints): self.id = taskID self.sheetID = sheetID self.name = name self.description = description self.maxPoints = maxPoints
fac2335ddbd0b3924ab5fe899ce547734b286471
spec/data/fixtures/__init__.py
spec/data/fixtures/__init__.py
from data import anagram_index, crossword, warehouse from spec.data.fixtures import tries def _get_unigram_anagram_index(): return anagram_index.AnagramIndex(warehouse.get('/words/unigram/trie')) def _get_unigram_trie(): return tries.kitchen_sink() def _get_crossword(): connection = crossword.init(':memory:') cursor = connection.cursor() crossword.add(cursor, 'query', 1, {'ask': 1, 'question': 1}) return connection, cursor def _get_crossword_connection(): connection, cursor = warehouse.get('/phrases/crossword') del cursor return connection def _get_crossword_cursor(): connection, cursor = warehouse.get('/phrases/crossword') del connection return cursor warehouse.init() warehouse.register('/phrases/crossword', _get_crossword) warehouse.register('/phrases/crossword/connection', _get_crossword_connection) warehouse.register('/phrases/crossword/cursor', _get_crossword_cursor) warehouse.register('/words/unigram/anagram_index', _get_unigram_anagram_index) warehouse.register('/words/unigram/trie', _get_unigram_trie)
import collections from data import anagram_index, crossword, warehouse from spec.data.fixtures import tries def _get_unigram(): return collections.OrderedDict(tries.kitchen_sink_data()) def _get_unigram_anagram_index(): return anagram_index.AnagramIndex(warehouse.get('/words/unigram')) def _get_unigram_trie(): return tries.kitchen_sink() def _get_crossword(): connection = crossword.init(':memory:') cursor = connection.cursor() crossword.add(cursor, 'query', 1, {'ask': 1, 'question': 1}) return connection, cursor def _get_crossword_connection(): connection, cursor = warehouse.get('/phrases/crossword') del cursor return connection def _get_crossword_cursor(): connection, cursor = warehouse.get('/phrases/crossword') del connection return cursor warehouse.init() warehouse.register('/phrases/crossword', _get_crossword) warehouse.register('/phrases/crossword/connection', _get_crossword_connection) warehouse.register('/phrases/crossword/cursor', _get_crossword_cursor) warehouse.register('/words/unigram', _get_unigram) warehouse.register('/words/unigram/anagram_index', _get_unigram_anagram_index) warehouse.register('/words/unigram/trie', _get_unigram_trie)
Introduce unigram data to warehouse module.
Introduce unigram data to warehouse module.
Python
mit
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
from data import anagram_index, crossword, warehouse from spec.data.fixtures import tries def _get_unigram_anagram_index(): return anagram_index.AnagramIndex(warehouse.get('/words/unigram/trie')) def _get_unigram_trie(): return tries.kitchen_sink() def _get_crossword(): connection = crossword.init(':memory:') cursor = connection.cursor() crossword.add(cursor, 'query', 1, {'ask': 1, 'question': 1}) return connection, cursor def _get_crossword_connection(): connection, cursor = warehouse.get('/phrases/crossword') del cursor return connection def _get_crossword_cursor(): connection, cursor = warehouse.get('/phrases/crossword') del connection return cursor warehouse.init() warehouse.register('/phrases/crossword', _get_crossword) warehouse.register('/phrases/crossword/connection', _get_crossword_connection) warehouse.register('/phrases/crossword/cursor', _get_crossword_cursor) warehouse.register('/words/unigram/anagram_index', _get_unigram_anagram_index) warehouse.register('/words/unigram/trie', _get_unigram_trie) Introduce unigram data to warehouse module.
import collections from data import anagram_index, crossword, warehouse from spec.data.fixtures import tries def _get_unigram(): return collections.OrderedDict(tries.kitchen_sink_data()) def _get_unigram_anagram_index(): return anagram_index.AnagramIndex(warehouse.get('/words/unigram')) def _get_unigram_trie(): return tries.kitchen_sink() def _get_crossword(): connection = crossword.init(':memory:') cursor = connection.cursor() crossword.add(cursor, 'query', 1, {'ask': 1, 'question': 1}) return connection, cursor def _get_crossword_connection(): connection, cursor = warehouse.get('/phrases/crossword') del cursor return connection def _get_crossword_cursor(): connection, cursor = warehouse.get('/phrases/crossword') del connection return cursor warehouse.init() warehouse.register('/phrases/crossword', _get_crossword) warehouse.register('/phrases/crossword/connection', _get_crossword_connection) warehouse.register('/phrases/crossword/cursor', _get_crossword_cursor) warehouse.register('/words/unigram', _get_unigram) warehouse.register('/words/unigram/anagram_index', _get_unigram_anagram_index) warehouse.register('/words/unigram/trie', _get_unigram_trie)
<commit_before>from data import anagram_index, crossword, warehouse from spec.data.fixtures import tries def _get_unigram_anagram_index(): return anagram_index.AnagramIndex(warehouse.get('/words/unigram/trie')) def _get_unigram_trie(): return tries.kitchen_sink() def _get_crossword(): connection = crossword.init(':memory:') cursor = connection.cursor() crossword.add(cursor, 'query', 1, {'ask': 1, 'question': 1}) return connection, cursor def _get_crossword_connection(): connection, cursor = warehouse.get('/phrases/crossword') del cursor return connection def _get_crossword_cursor(): connection, cursor = warehouse.get('/phrases/crossword') del connection return cursor warehouse.init() warehouse.register('/phrases/crossword', _get_crossword) warehouse.register('/phrases/crossword/connection', _get_crossword_connection) warehouse.register('/phrases/crossword/cursor', _get_crossword_cursor) warehouse.register('/words/unigram/anagram_index', _get_unigram_anagram_index) warehouse.register('/words/unigram/trie', _get_unigram_trie) <commit_msg>Introduce unigram data to warehouse module.<commit_after>
import collections from data import anagram_index, crossword, warehouse from spec.data.fixtures import tries def _get_unigram(): return collections.OrderedDict(tries.kitchen_sink_data()) def _get_unigram_anagram_index(): return anagram_index.AnagramIndex(warehouse.get('/words/unigram')) def _get_unigram_trie(): return tries.kitchen_sink() def _get_crossword(): connection = crossword.init(':memory:') cursor = connection.cursor() crossword.add(cursor, 'query', 1, {'ask': 1, 'question': 1}) return connection, cursor def _get_crossword_connection(): connection, cursor = warehouse.get('/phrases/crossword') del cursor return connection def _get_crossword_cursor(): connection, cursor = warehouse.get('/phrases/crossword') del connection return cursor warehouse.init() warehouse.register('/phrases/crossword', _get_crossword) warehouse.register('/phrases/crossword/connection', _get_crossword_connection) warehouse.register('/phrases/crossword/cursor', _get_crossword_cursor) warehouse.register('/words/unigram', _get_unigram) warehouse.register('/words/unigram/anagram_index', _get_unigram_anagram_index) warehouse.register('/words/unigram/trie', _get_unigram_trie)
from data import anagram_index, crossword, warehouse from spec.data.fixtures import tries def _get_unigram_anagram_index(): return anagram_index.AnagramIndex(warehouse.get('/words/unigram/trie')) def _get_unigram_trie(): return tries.kitchen_sink() def _get_crossword(): connection = crossword.init(':memory:') cursor = connection.cursor() crossword.add(cursor, 'query', 1, {'ask': 1, 'question': 1}) return connection, cursor def _get_crossword_connection(): connection, cursor = warehouse.get('/phrases/crossword') del cursor return connection def _get_crossword_cursor(): connection, cursor = warehouse.get('/phrases/crossword') del connection return cursor warehouse.init() warehouse.register('/phrases/crossword', _get_crossword) warehouse.register('/phrases/crossword/connection', _get_crossword_connection) warehouse.register('/phrases/crossword/cursor', _get_crossword_cursor) warehouse.register('/words/unigram/anagram_index', _get_unigram_anagram_index) warehouse.register('/words/unigram/trie', _get_unigram_trie) Introduce unigram data to warehouse module.import collections from data import anagram_index, crossword, warehouse from spec.data.fixtures import tries def _get_unigram(): return collections.OrderedDict(tries.kitchen_sink_data()) def _get_unigram_anagram_index(): return anagram_index.AnagramIndex(warehouse.get('/words/unigram')) def _get_unigram_trie(): return tries.kitchen_sink() def _get_crossword(): connection = crossword.init(':memory:') cursor = connection.cursor() crossword.add(cursor, 'query', 1, {'ask': 1, 'question': 1}) return connection, cursor def _get_crossword_connection(): connection, cursor = warehouse.get('/phrases/crossword') del cursor return connection def _get_crossword_cursor(): connection, cursor = warehouse.get('/phrases/crossword') del connection return cursor warehouse.init() warehouse.register('/phrases/crossword', _get_crossword) warehouse.register('/phrases/crossword/connection', _get_crossword_connection) warehouse.register('/phrases/crossword/cursor', _get_crossword_cursor) warehouse.register('/words/unigram', _get_unigram) warehouse.register('/words/unigram/anagram_index', _get_unigram_anagram_index) warehouse.register('/words/unigram/trie', _get_unigram_trie)
<commit_before>from data import anagram_index, crossword, warehouse from spec.data.fixtures import tries def _get_unigram_anagram_index(): return anagram_index.AnagramIndex(warehouse.get('/words/unigram/trie')) def _get_unigram_trie(): return tries.kitchen_sink() def _get_crossword(): connection = crossword.init(':memory:') cursor = connection.cursor() crossword.add(cursor, 'query', 1, {'ask': 1, 'question': 1}) return connection, cursor def _get_crossword_connection(): connection, cursor = warehouse.get('/phrases/crossword') del cursor return connection def _get_crossword_cursor(): connection, cursor = warehouse.get('/phrases/crossword') del connection return cursor warehouse.init() warehouse.register('/phrases/crossword', _get_crossword) warehouse.register('/phrases/crossword/connection', _get_crossword_connection) warehouse.register('/phrases/crossword/cursor', _get_crossword_cursor) warehouse.register('/words/unigram/anagram_index', _get_unigram_anagram_index) warehouse.register('/words/unigram/trie', _get_unigram_trie) <commit_msg>Introduce unigram data to warehouse module.<commit_after>import collections from data import anagram_index, crossword, warehouse from spec.data.fixtures import tries def _get_unigram(): return collections.OrderedDict(tries.kitchen_sink_data()) def _get_unigram_anagram_index(): return anagram_index.AnagramIndex(warehouse.get('/words/unigram')) def _get_unigram_trie(): return tries.kitchen_sink() def _get_crossword(): connection = crossword.init(':memory:') cursor = connection.cursor() crossword.add(cursor, 'query', 1, {'ask': 1, 'question': 1}) return connection, cursor def _get_crossword_connection(): connection, cursor = warehouse.get('/phrases/crossword') del cursor return connection def _get_crossword_cursor(): connection, cursor = warehouse.get('/phrases/crossword') del connection return cursor warehouse.init() warehouse.register('/phrases/crossword', _get_crossword) warehouse.register('/phrases/crossword/connection', _get_crossword_connection) warehouse.register('/phrases/crossword/cursor', _get_crossword_cursor) warehouse.register('/words/unigram', _get_unigram) warehouse.register('/words/unigram/anagram_index', _get_unigram_anagram_index) warehouse.register('/words/unigram/trie', _get_unigram_trie)
c20a0eb932f99ee4d1336560f25e3e46f86d9d17
oembed/models.py
oembed/models.py
import datetime from django.db import models try: import json except ImportError: from django.utils import simplejson as json from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ JSON = 1 XML = 2 FORMAT_CHOICES = ( (JSON, "JSON"), (XML, "XML"), ) class ProviderRule(models.Model): name = models.CharField(_("name"), max_length=128, null=True, blank=True) regex = models.CharField(_("regex"), max_length=2000) endpoint = models.CharField(_("endpoint"), max_length=2000) format = models.IntegerField(_("format"), choices=FORMAT_CHOICES) def __unicode__(self): return self.name or self.endpoint class StoredOEmbed(models.Model): match = models.TextField(_("match")) max_width = models.IntegerField(_("max width")) max_height = models.IntegerField(_("max height")) html = models.TextField(_("html")) json = models.TextField(_("json")) date_added = models.DateTimeField( _("date added"), default=now) class Meta: ordering = ('-max_width',) # larger ones take precedence verbose_name = u'Stored OEmbed' verbose_name_plural = u'Stored OEmbeds' def __unicode__(self): return self.match def get_json(self, name): """ Convenience for JSON properties; e.g. get_json('thumbnail_url') """ return json.loads(self.json).get(name, None)
import json from django.db import models from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ JSON = 1 XML = 2 FORMAT_CHOICES = ( (JSON, "JSON"), (XML, "XML"), ) class ProviderRule(models.Model): name = models.CharField(_("name"), max_length=128, null=True, blank=True) regex = models.CharField(_("regex"), max_length=2000) endpoint = models.CharField(_("endpoint"), max_length=2000) format = models.IntegerField(_("format"), choices=FORMAT_CHOICES) def __unicode__(self): return self.name or self.endpoint class StoredOEmbed(models.Model): match = models.TextField(_("match")) max_width = models.IntegerField(_("max width")) max_height = models.IntegerField(_("max height")) html = models.TextField(_("html")) json = models.TextField(_("json")) date_added = models.DateTimeField( _("date added"), default=now) class Meta: ordering = ('-max_width',) # larger ones take precedence verbose_name = u'Stored OEmbed' verbose_name_plural = u'Stored OEmbeds' def __unicode__(self): return self.match def get_json(self, name): """ Convenience for JSON properties; e.g. get_json('thumbnail_url') """ return json.loads(self.json).get(name, None)
Update imports (json in Py2.6+); removed unused datetime import
Update imports (json in Py2.6+); removed unused datetime import
Python
bsd-3-clause
JordanReiter/django-oembed,JordanReiter/django-oembed
import datetime from django.db import models try: import json except ImportError: from django.utils import simplejson as json from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ JSON = 1 XML = 2 FORMAT_CHOICES = ( (JSON, "JSON"), (XML, "XML"), ) class ProviderRule(models.Model): name = models.CharField(_("name"), max_length=128, null=True, blank=True) regex = models.CharField(_("regex"), max_length=2000) endpoint = models.CharField(_("endpoint"), max_length=2000) format = models.IntegerField(_("format"), choices=FORMAT_CHOICES) def __unicode__(self): return self.name or self.endpoint class StoredOEmbed(models.Model): match = models.TextField(_("match")) max_width = models.IntegerField(_("max width")) max_height = models.IntegerField(_("max height")) html = models.TextField(_("html")) json = models.TextField(_("json")) date_added = models.DateTimeField( _("date added"), default=now) class Meta: ordering = ('-max_width',) # larger ones take precedence verbose_name = u'Stored OEmbed' verbose_name_plural = u'Stored OEmbeds' def __unicode__(self): return self.match def get_json(self, name): """ Convenience for JSON properties; e.g. get_json('thumbnail_url') """ return json.loads(self.json).get(name, None) Update imports (json in Py2.6+); removed unused datetime import
import json from django.db import models from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ JSON = 1 XML = 2 FORMAT_CHOICES = ( (JSON, "JSON"), (XML, "XML"), ) class ProviderRule(models.Model): name = models.CharField(_("name"), max_length=128, null=True, blank=True) regex = models.CharField(_("regex"), max_length=2000) endpoint = models.CharField(_("endpoint"), max_length=2000) format = models.IntegerField(_("format"), choices=FORMAT_CHOICES) def __unicode__(self): return self.name or self.endpoint class StoredOEmbed(models.Model): match = models.TextField(_("match")) max_width = models.IntegerField(_("max width")) max_height = models.IntegerField(_("max height")) html = models.TextField(_("html")) json = models.TextField(_("json")) date_added = models.DateTimeField( _("date added"), default=now) class Meta: ordering = ('-max_width',) # larger ones take precedence verbose_name = u'Stored OEmbed' verbose_name_plural = u'Stored OEmbeds' def __unicode__(self): return self.match def get_json(self, name): """ Convenience for JSON properties; e.g. get_json('thumbnail_url') """ return json.loads(self.json).get(name, None)
<commit_before>import datetime from django.db import models try: import json except ImportError: from django.utils import simplejson as json from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ JSON = 1 XML = 2 FORMAT_CHOICES = ( (JSON, "JSON"), (XML, "XML"), ) class ProviderRule(models.Model): name = models.CharField(_("name"), max_length=128, null=True, blank=True) regex = models.CharField(_("regex"), max_length=2000) endpoint = models.CharField(_("endpoint"), max_length=2000) format = models.IntegerField(_("format"), choices=FORMAT_CHOICES) def __unicode__(self): return self.name or self.endpoint class StoredOEmbed(models.Model): match = models.TextField(_("match")) max_width = models.IntegerField(_("max width")) max_height = models.IntegerField(_("max height")) html = models.TextField(_("html")) json = models.TextField(_("json")) date_added = models.DateTimeField( _("date added"), default=now) class Meta: ordering = ('-max_width',) # larger ones take precedence verbose_name = u'Stored OEmbed' verbose_name_plural = u'Stored OEmbeds' def __unicode__(self): return self.match def get_json(self, name): """ Convenience for JSON properties; e.g. get_json('thumbnail_url') """ return json.loads(self.json).get(name, None) <commit_msg>Update imports (json in Py2.6+); removed unused datetime import<commit_after>
import json from django.db import models from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ JSON = 1 XML = 2 FORMAT_CHOICES = ( (JSON, "JSON"), (XML, "XML"), ) class ProviderRule(models.Model): name = models.CharField(_("name"), max_length=128, null=True, blank=True) regex = models.CharField(_("regex"), max_length=2000) endpoint = models.CharField(_("endpoint"), max_length=2000) format = models.IntegerField(_("format"), choices=FORMAT_CHOICES) def __unicode__(self): return self.name or self.endpoint class StoredOEmbed(models.Model): match = models.TextField(_("match")) max_width = models.IntegerField(_("max width")) max_height = models.IntegerField(_("max height")) html = models.TextField(_("html")) json = models.TextField(_("json")) date_added = models.DateTimeField( _("date added"), default=now) class Meta: ordering = ('-max_width',) # larger ones take precedence verbose_name = u'Stored OEmbed' verbose_name_plural = u'Stored OEmbeds' def __unicode__(self): return self.match def get_json(self, name): """ Convenience for JSON properties; e.g. get_json('thumbnail_url') """ return json.loads(self.json).get(name, None)
import datetime from django.db import models try: import json except ImportError: from django.utils import simplejson as json from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ JSON = 1 XML = 2 FORMAT_CHOICES = ( (JSON, "JSON"), (XML, "XML"), ) class ProviderRule(models.Model): name = models.CharField(_("name"), max_length=128, null=True, blank=True) regex = models.CharField(_("regex"), max_length=2000) endpoint = models.CharField(_("endpoint"), max_length=2000) format = models.IntegerField(_("format"), choices=FORMAT_CHOICES) def __unicode__(self): return self.name or self.endpoint class StoredOEmbed(models.Model): match = models.TextField(_("match")) max_width = models.IntegerField(_("max width")) max_height = models.IntegerField(_("max height")) html = models.TextField(_("html")) json = models.TextField(_("json")) date_added = models.DateTimeField( _("date added"), default=now) class Meta: ordering = ('-max_width',) # larger ones take precedence verbose_name = u'Stored OEmbed' verbose_name_plural = u'Stored OEmbeds' def __unicode__(self): return self.match def get_json(self, name): """ Convenience for JSON properties; e.g. get_json('thumbnail_url') """ return json.loads(self.json).get(name, None) Update imports (json in Py2.6+); removed unused datetime importimport json from django.db import models from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ JSON = 1 XML = 2 FORMAT_CHOICES = ( (JSON, "JSON"), (XML, "XML"), ) class ProviderRule(models.Model): name = models.CharField(_("name"), max_length=128, null=True, blank=True) regex = models.CharField(_("regex"), max_length=2000) endpoint = models.CharField(_("endpoint"), max_length=2000) format = models.IntegerField(_("format"), choices=FORMAT_CHOICES) def __unicode__(self): return self.name or self.endpoint class StoredOEmbed(models.Model): match = models.TextField(_("match")) max_width = models.IntegerField(_("max width")) max_height = models.IntegerField(_("max height")) html = models.TextField(_("html")) json = models.TextField(_("json")) date_added = models.DateTimeField( _("date added"), default=now) class Meta: ordering = ('-max_width',) # larger ones take precedence verbose_name = u'Stored OEmbed' verbose_name_plural = u'Stored OEmbeds' def __unicode__(self): return self.match def get_json(self, name): """ Convenience for JSON properties; e.g. get_json('thumbnail_url') """ return json.loads(self.json).get(name, None)
<commit_before>import datetime from django.db import models try: import json except ImportError: from django.utils import simplejson as json from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ JSON = 1 XML = 2 FORMAT_CHOICES = ( (JSON, "JSON"), (XML, "XML"), ) class ProviderRule(models.Model): name = models.CharField(_("name"), max_length=128, null=True, blank=True) regex = models.CharField(_("regex"), max_length=2000) endpoint = models.CharField(_("endpoint"), max_length=2000) format = models.IntegerField(_("format"), choices=FORMAT_CHOICES) def __unicode__(self): return self.name or self.endpoint class StoredOEmbed(models.Model): match = models.TextField(_("match")) max_width = models.IntegerField(_("max width")) max_height = models.IntegerField(_("max height")) html = models.TextField(_("html")) json = models.TextField(_("json")) date_added = models.DateTimeField( _("date added"), default=now) class Meta: ordering = ('-max_width',) # larger ones take precedence verbose_name = u'Stored OEmbed' verbose_name_plural = u'Stored OEmbeds' def __unicode__(self): return self.match def get_json(self, name): """ Convenience for JSON properties; e.g. get_json('thumbnail_url') """ return json.loads(self.json).get(name, None) <commit_msg>Update imports (json in Py2.6+); removed unused datetime import<commit_after>import json from django.db import models from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ JSON = 1 XML = 2 FORMAT_CHOICES = ( (JSON, "JSON"), (XML, "XML"), ) class ProviderRule(models.Model): name = models.CharField(_("name"), max_length=128, null=True, blank=True) regex = models.CharField(_("regex"), max_length=2000) endpoint = models.CharField(_("endpoint"), max_length=2000) format = models.IntegerField(_("format"), choices=FORMAT_CHOICES) def __unicode__(self): return self.name or self.endpoint class StoredOEmbed(models.Model): match = models.TextField(_("match")) max_width = models.IntegerField(_("max width")) max_height = models.IntegerField(_("max height")) html = models.TextField(_("html")) json = models.TextField(_("json")) date_added = models.DateTimeField( _("date added"), default=now) class Meta: ordering = ('-max_width',) # larger ones take precedence verbose_name = u'Stored OEmbed' verbose_name_plural = u'Stored OEmbeds' def __unicode__(self): return self.match def get_json(self, name): """ Convenience for JSON properties; e.g. get_json('thumbnail_url') """ return json.loads(self.json).get(name, None)
4283aa4bc2c831dc99968929c24b11496078fd26
nightreads/emails/admin.py
nightreads/emails/admin.py
from django.contrib import admin from .models import Email class EmailAdmin(admin.ModelAdmin): exclude = ('targetted_users', 'is_sent') admin.site.register(Email, EmailAdmin)
from django.contrib import admin from .models import Email class EmailAdmin(admin.ModelAdmin): readonly_fields = ('targetted_users', 'is_sent',) add_fieldsets = ( (None, { 'fields': ('subject', 'message', 'post'), }), ) def get_fieldsets(self, request, obj=None): if not obj: return self.add_fieldsets return super(EmailAdmin, self).get_fieldsets(request, obj) admin.site.register(Email, EmailAdmin)
Customize how fields on Email are displayed while adding & editing
Customize how fields on Email are displayed while adding & editing - Hide fields `targetted_users`, `is_sent` while adding a new Email object - Display all fields but make `targetted_users`, `is_sent` fields read only when editing an Email object
Python
mit
avinassh/nightreads,avinassh/nightreads
from django.contrib import admin from .models import Email class EmailAdmin(admin.ModelAdmin): exclude = ('targetted_users', 'is_sent') admin.site.register(Email, EmailAdmin) Customize how fields on Email are displayed while adding & editing - Hide fields `targetted_users`, `is_sent` while adding a new Email object - Display all fields but make `targetted_users`, `is_sent` fields read only when editing an Email object
from django.contrib import admin from .models import Email class EmailAdmin(admin.ModelAdmin): readonly_fields = ('targetted_users', 'is_sent',) add_fieldsets = ( (None, { 'fields': ('subject', 'message', 'post'), }), ) def get_fieldsets(self, request, obj=None): if not obj: return self.add_fieldsets return super(EmailAdmin, self).get_fieldsets(request, obj) admin.site.register(Email, EmailAdmin)
<commit_before>from django.contrib import admin from .models import Email class EmailAdmin(admin.ModelAdmin): exclude = ('targetted_users', 'is_sent') admin.site.register(Email, EmailAdmin) <commit_msg>Customize how fields on Email are displayed while adding & editing - Hide fields `targetted_users`, `is_sent` while adding a new Email object - Display all fields but make `targetted_users`, `is_sent` fields read only when editing an Email object<commit_after>
from django.contrib import admin from .models import Email class EmailAdmin(admin.ModelAdmin): readonly_fields = ('targetted_users', 'is_sent',) add_fieldsets = ( (None, { 'fields': ('subject', 'message', 'post'), }), ) def get_fieldsets(self, request, obj=None): if not obj: return self.add_fieldsets return super(EmailAdmin, self).get_fieldsets(request, obj) admin.site.register(Email, EmailAdmin)
from django.contrib import admin from .models import Email class EmailAdmin(admin.ModelAdmin): exclude = ('targetted_users', 'is_sent') admin.site.register(Email, EmailAdmin) Customize how fields on Email are displayed while adding & editing - Hide fields `targetted_users`, `is_sent` while adding a new Email object - Display all fields but make `targetted_users`, `is_sent` fields read only when editing an Email objectfrom django.contrib import admin from .models import Email class EmailAdmin(admin.ModelAdmin): readonly_fields = ('targetted_users', 'is_sent',) add_fieldsets = ( (None, { 'fields': ('subject', 'message', 'post'), }), ) def get_fieldsets(self, request, obj=None): if not obj: return self.add_fieldsets return super(EmailAdmin, self).get_fieldsets(request, obj) admin.site.register(Email, EmailAdmin)
<commit_before>from django.contrib import admin from .models import Email class EmailAdmin(admin.ModelAdmin): exclude = ('targetted_users', 'is_sent') admin.site.register(Email, EmailAdmin) <commit_msg>Customize how fields on Email are displayed while adding & editing - Hide fields `targetted_users`, `is_sent` while adding a new Email object - Display all fields but make `targetted_users`, `is_sent` fields read only when editing an Email object<commit_after>from django.contrib import admin from .models import Email class EmailAdmin(admin.ModelAdmin): readonly_fields = ('targetted_users', 'is_sent',) add_fieldsets = ( (None, { 'fields': ('subject', 'message', 'post'), }), ) def get_fieldsets(self, request, obj=None): if not obj: return self.add_fieldsets return super(EmailAdmin, self).get_fieldsets(request, obj) admin.site.register(Email, EmailAdmin)
d1f1664f7c15a156270dc7e506bb3edb37dc517d
dipy/reconst/__init__.py
dipy/reconst/__init__.py
#init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester
# init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester
Update code in dipy/reconst (PEP8)
Update code in dipy/reconst (PEP8) Using `pycodestyle` output, the file `dipy/reconst/__init__.py` was updated to pass `pycodestyle` check Signed-off-by: Antonio Ossa <1ecf3d2f96b6e61cf9b68f0fc294cab57dc5d597@uc.cl>
Python
bsd-3-clause
FrancoisRheaultUS/dipy,FrancoisRheaultUS/dipy,nilgoyyou/dipy,nilgoyyou/dipy
#init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester Update code in dipy/reconst (PEP8) Using `pycodestyle` output, the file `dipy/reconst/__init__.py` was updated to pass `pycodestyle` check Signed-off-by: Antonio Ossa <1ecf3d2f96b6e61cf9b68f0fc294cab57dc5d597@uc.cl>
# init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester
<commit_before>#init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester <commit_msg>Update code in dipy/reconst (PEP8) Using `pycodestyle` output, the file `dipy/reconst/__init__.py` was updated to pass `pycodestyle` check Signed-off-by: Antonio Ossa <1ecf3d2f96b6e61cf9b68f0fc294cab57dc5d597@uc.cl><commit_after>
# init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester
#init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester Update code in dipy/reconst (PEP8) Using `pycodestyle` output, the file `dipy/reconst/__init__.py` was updated to pass `pycodestyle` check Signed-off-by: Antonio Ossa <1ecf3d2f96b6e61cf9b68f0fc294cab57dc5d597@uc.cl># init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester
<commit_before>#init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester <commit_msg>Update code in dipy/reconst (PEP8) Using `pycodestyle` output, the file `dipy/reconst/__init__.py` was updated to pass `pycodestyle` check Signed-off-by: Antonio Ossa <1ecf3d2f96b6e61cf9b68f0fc294cab57dc5d597@uc.cl><commit_after># init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester
dd89173cc177f7130eca426eb4fa5737ec59c91d
test/vpp_mac.py
test/vpp_mac.py
""" MAC Types """ from util import mactobinary class VppMacAddress(): def __init__(self, addr): self.address = addr def encode(self): return { 'bytes': self.bytes } @property def bytes(self): return mactobinary(self.address) @property def address(self): return self.addr.address def __str__(self): return self.address def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.addres elif hasattr(other, "bytes"): # vl_api_mac_addres_t return self.bytes == other.bytes else: raise Exception("Comparing VppMacAddress:%s" "with unknown type: %s" % (self, other)) return False
""" MAC Types """ from util import mactobinary class VppMacAddress(): def __init__(self, addr): self.address = addr def encode(self): return { 'bytes': self.bytes } @property def bytes(self): return mactobinary(self.address) @property def address(self): return self.address @address.setter def address(self, value): self.address = value def __str__(self): return self.address def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.address elif hasattr(other, "bytes"): # vl_api_mac_addres_t return self.bytes == other.bytes else: raise TypeError("Comparing VppMacAddress:%s" "with unknown type: %s" % (self, other)) return False
Fix L2BD arp termination Test Case
Fix L2BD arp termination Test Case ============================================================================== L2BD arp termination Test Case ============================================================================== 12:02:21,850 Couldn't stat : /tmp/vpp-unittest-TestL2bdArpTerm-_h44qo/stats.sock L2BD arp term - add 5 hosts, verify arp responses OK L2BD arp term - delete 3 hosts, verify arp responses OK L2BD arp term - recreate BD1, readd 3 hosts, verify arp responses OK L2BD arp term - 2 IP4 addrs per host OK L2BD arp term - create and update 10 IP4-mac pairs OK L2BD arp/ND term - hosts with both ip4/ip6 OK L2BD ND term - Add and Del hosts, verify ND replies OK L2BD ND term - Add and update IP+mac, verify ND replies OK L2BD arp term - send garps, verify arp event reports OK L2BD arp term - send duplicate garps, verify suppression OK L2BD arp term - disable ip4 arp events,send garps, verify no events OK L2BD ND term - send NS packets verify reports OK L2BD ND term - send duplicate ns, verify suppression OK L2BD ND term - disable ip4 arp events,send ns, verify no events OK ============================================================================== TEST RESULTS: Scheduled tests: 14 Executed tests: 14 Passed tests: 14 ============================================================================== Test run was successful Change-Id: I6bb1ced11b88080ffaa845d22b0bc471c4f91683 Signed-off-by: Paul Vinciguerra <b92f79aabe4c9c18085c7347110a52af0898a0ef@vinciconsulting.com>
Python
apache-2.0
chrisy/vpp,vpp-dev/vpp,FDio/vpp,FDio/vpp,FDio/vpp,FDio/vpp,chrisy/vpp,FDio/vpp,chrisy/vpp,vpp-dev/vpp,chrisy/vpp,vpp-dev/vpp,vpp-dev/vpp,vpp-dev/vpp,FDio/vpp,chrisy/vpp,chrisy/vpp,chrisy/vpp,vpp-dev/vpp,vpp-dev/vpp,FDio/vpp,chrisy/vpp,FDio/vpp
""" MAC Types """ from util import mactobinary class VppMacAddress(): def __init__(self, addr): self.address = addr def encode(self): return { 'bytes': self.bytes } @property def bytes(self): return mactobinary(self.address) @property def address(self): return self.addr.address def __str__(self): return self.address def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.addres elif hasattr(other, "bytes"): # vl_api_mac_addres_t return self.bytes == other.bytes else: raise Exception("Comparing VppMacAddress:%s" "with unknown type: %s" % (self, other)) return False Fix L2BD arp termination Test Case ============================================================================== L2BD arp termination Test Case ============================================================================== 12:02:21,850 Couldn't stat : /tmp/vpp-unittest-TestL2bdArpTerm-_h44qo/stats.sock L2BD arp term - add 5 hosts, verify arp responses OK L2BD arp term - delete 3 hosts, verify arp responses OK L2BD arp term - recreate BD1, readd 3 hosts, verify arp responses OK L2BD arp term - 2 IP4 addrs per host OK L2BD arp term - create and update 10 IP4-mac pairs OK L2BD arp/ND term - hosts with both ip4/ip6 OK L2BD ND term - Add and Del hosts, verify ND replies OK L2BD ND term - Add and update IP+mac, verify ND replies OK L2BD arp term - send garps, verify arp event reports OK L2BD arp term - send duplicate garps, verify suppression OK L2BD arp term - disable ip4 arp events,send garps, verify no events OK L2BD ND term - send NS packets verify reports OK L2BD ND term - send duplicate ns, verify suppression OK L2BD ND term - disable ip4 arp events,send ns, verify no events OK ============================================================================== TEST RESULTS: Scheduled tests: 14 Executed tests: 14 Passed tests: 14 ============================================================================== Test run was successful Change-Id: I6bb1ced11b88080ffaa845d22b0bc471c4f91683 Signed-off-by: Paul Vinciguerra <b92f79aabe4c9c18085c7347110a52af0898a0ef@vinciconsulting.com>
""" MAC Types """ from util import mactobinary class VppMacAddress(): def __init__(self, addr): self.address = addr def encode(self): return { 'bytes': self.bytes } @property def bytes(self): return mactobinary(self.address) @property def address(self): return self.address @address.setter def address(self, value): self.address = value def __str__(self): return self.address def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.address elif hasattr(other, "bytes"): # vl_api_mac_addres_t return self.bytes == other.bytes else: raise TypeError("Comparing VppMacAddress:%s" "with unknown type: %s" % (self, other)) return False
<commit_before>""" MAC Types """ from util import mactobinary class VppMacAddress(): def __init__(self, addr): self.address = addr def encode(self): return { 'bytes': self.bytes } @property def bytes(self): return mactobinary(self.address) @property def address(self): return self.addr.address def __str__(self): return self.address def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.addres elif hasattr(other, "bytes"): # vl_api_mac_addres_t return self.bytes == other.bytes else: raise Exception("Comparing VppMacAddress:%s" "with unknown type: %s" % (self, other)) return False <commit_msg>Fix L2BD arp termination Test Case ============================================================================== L2BD arp termination Test Case ============================================================================== 12:02:21,850 Couldn't stat : /tmp/vpp-unittest-TestL2bdArpTerm-_h44qo/stats.sock L2BD arp term - add 5 hosts, verify arp responses OK L2BD arp term - delete 3 hosts, verify arp responses OK L2BD arp term - recreate BD1, readd 3 hosts, verify arp responses OK L2BD arp term - 2 IP4 addrs per host OK L2BD arp term - create and update 10 IP4-mac pairs OK L2BD arp/ND term - hosts with both ip4/ip6 OK L2BD ND term - Add and Del hosts, verify ND replies OK L2BD ND term - Add and update IP+mac, verify ND replies OK L2BD arp term - send garps, verify arp event reports OK L2BD arp term - send duplicate garps, verify suppression OK L2BD arp term - disable ip4 arp events,send garps, verify no events OK L2BD ND term - send NS packets verify reports OK L2BD ND term - send duplicate ns, verify suppression OK L2BD ND term - disable ip4 arp events,send ns, verify no events OK ============================================================================== TEST RESULTS: Scheduled tests: 14 Executed tests: 14 Passed tests: 14 ============================================================================== Test run was successful Change-Id: I6bb1ced11b88080ffaa845d22b0bc471c4f91683 Signed-off-by: Paul Vinciguerra <b92f79aabe4c9c18085c7347110a52af0898a0ef@vinciconsulting.com><commit_after>
""" MAC Types """ from util import mactobinary class VppMacAddress(): def __init__(self, addr): self.address = addr def encode(self): return { 'bytes': self.bytes } @property def bytes(self): return mactobinary(self.address) @property def address(self): return self.address @address.setter def address(self, value): self.address = value def __str__(self): return self.address def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.address elif hasattr(other, "bytes"): # vl_api_mac_addres_t return self.bytes == other.bytes else: raise TypeError("Comparing VppMacAddress:%s" "with unknown type: %s" % (self, other)) return False
""" MAC Types """ from util import mactobinary class VppMacAddress(): def __init__(self, addr): self.address = addr def encode(self): return { 'bytes': self.bytes } @property def bytes(self): return mactobinary(self.address) @property def address(self): return self.addr.address def __str__(self): return self.address def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.addres elif hasattr(other, "bytes"): # vl_api_mac_addres_t return self.bytes == other.bytes else: raise Exception("Comparing VppMacAddress:%s" "with unknown type: %s" % (self, other)) return False Fix L2BD arp termination Test Case ============================================================================== L2BD arp termination Test Case ============================================================================== 12:02:21,850 Couldn't stat : /tmp/vpp-unittest-TestL2bdArpTerm-_h44qo/stats.sock L2BD arp term - add 5 hosts, verify arp responses OK L2BD arp term - delete 3 hosts, verify arp responses OK L2BD arp term - recreate BD1, readd 3 hosts, verify arp responses OK L2BD arp term - 2 IP4 addrs per host OK L2BD arp term - create and update 10 IP4-mac pairs OK L2BD arp/ND term - hosts with both ip4/ip6 OK L2BD ND term - Add and Del hosts, verify ND replies OK L2BD ND term - Add and update IP+mac, verify ND replies OK L2BD arp term - send garps, verify arp event reports OK L2BD arp term - send duplicate garps, verify suppression OK L2BD arp term - disable ip4 arp events,send garps, verify no events OK L2BD ND term - send NS packets verify reports OK L2BD ND term - send duplicate ns, verify suppression OK L2BD ND term - disable ip4 arp events,send ns, verify no events OK ============================================================================== TEST RESULTS: Scheduled tests: 14 Executed tests: 14 Passed tests: 14 ============================================================================== Test run was successful Change-Id: I6bb1ced11b88080ffaa845d22b0bc471c4f91683 Signed-off-by: Paul Vinciguerra <b92f79aabe4c9c18085c7347110a52af0898a0ef@vinciconsulting.com>""" MAC Types """ from util import mactobinary class VppMacAddress(): def __init__(self, addr): self.address = addr def encode(self): return { 'bytes': self.bytes } @property def bytes(self): return mactobinary(self.address) @property def address(self): return self.address @address.setter def address(self, value): self.address = value def __str__(self): return self.address def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.address elif hasattr(other, "bytes"): # vl_api_mac_addres_t return self.bytes == other.bytes else: raise TypeError("Comparing VppMacAddress:%s" "with unknown type: %s" % (self, other)) return False
<commit_before>""" MAC Types """ from util import mactobinary class VppMacAddress(): def __init__(self, addr): self.address = addr def encode(self): return { 'bytes': self.bytes } @property def bytes(self): return mactobinary(self.address) @property def address(self): return self.addr.address def __str__(self): return self.address def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.addres elif hasattr(other, "bytes"): # vl_api_mac_addres_t return self.bytes == other.bytes else: raise Exception("Comparing VppMacAddress:%s" "with unknown type: %s" % (self, other)) return False <commit_msg>Fix L2BD arp termination Test Case ============================================================================== L2BD arp termination Test Case ============================================================================== 12:02:21,850 Couldn't stat : /tmp/vpp-unittest-TestL2bdArpTerm-_h44qo/stats.sock L2BD arp term - add 5 hosts, verify arp responses OK L2BD arp term - delete 3 hosts, verify arp responses OK L2BD arp term - recreate BD1, readd 3 hosts, verify arp responses OK L2BD arp term - 2 IP4 addrs per host OK L2BD arp term - create and update 10 IP4-mac pairs OK L2BD arp/ND term - hosts with both ip4/ip6 OK L2BD ND term - Add and Del hosts, verify ND replies OK L2BD ND term - Add and update IP+mac, verify ND replies OK L2BD arp term - send garps, verify arp event reports OK L2BD arp term - send duplicate garps, verify suppression OK L2BD arp term - disable ip4 arp events,send garps, verify no events OK L2BD ND term - send NS packets verify reports OK L2BD ND term - send duplicate ns, verify suppression OK L2BD ND term - disable ip4 arp events,send ns, verify no events OK ============================================================================== TEST RESULTS: Scheduled tests: 14 Executed tests: 14 Passed tests: 14 ============================================================================== Test run was successful Change-Id: I6bb1ced11b88080ffaa845d22b0bc471c4f91683 Signed-off-by: Paul Vinciguerra <b92f79aabe4c9c18085c7347110a52af0898a0ef@vinciconsulting.com><commit_after>""" MAC Types """ from util import mactobinary class VppMacAddress(): def __init__(self, addr): self.address = addr def encode(self): return { 'bytes': self.bytes } @property def bytes(self): return mactobinary(self.address) @property def address(self): return self.address @address.setter def address(self, value): self.address = value def __str__(self): return self.address def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.address elif hasattr(other, "bytes"): # vl_api_mac_addres_t return self.bytes == other.bytes else: raise TypeError("Comparing VppMacAddress:%s" "with unknown type: %s" % (self, other)) return False
3f7091cbf22c483672aa6c07ad640ee2c3d18e5b
lbrynet/daemon/auth/factory.py
lbrynet/daemon/auth/factory.py
import logging import os from twisted.web import server, guard, resource from twisted.cred import portal from lbrynet import conf from .auth import PasswordChecker, HttpPasswordRealm from .util import initialize_api_key_file log = logging.getLogger(__name__) class AuthJSONRPCResource(resource.Resource): def __init__(self, protocol): resource.Resource.__init__(self) self.putChild("", protocol) self.putChild(conf.settings['API_ADDRESS'], protocol) def getChild(self, name, request): request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') request.setHeader('expires', '0') return self if name == '' else resource.Resource.getChild(self, name, request) def getServerFactory(self): if conf.settings['use_auth_http']: log.info("Using authenticated API") pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") initialize_api_key_file(pw_path) checker = PasswordChecker.load_file(pw_path) realm = HttpPasswordRealm(self) portal_to_realm = portal.Portal(realm, [checker, ]) factory = guard.BasicCredentialFactory('Login to lbrynet api') root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) else: log.info("Using non-authenticated API") root = self return server.Site(root)
import logging import os from twisted.web import server, guard, resource from twisted.cred import portal from lbrynet import conf from .auth import PasswordChecker, HttpPasswordRealm from .util import initialize_api_key_file log = logging.getLogger(__name__) class AuthJSONRPCResource(resource.Resource): def __init__(self, protocol): resource.Resource.__init__(self) self.putChild(b"", protocol) self.putChild(conf.settings['API_ADDRESS'].encode(), protocol) def getChild(self, name, request): request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') request.setHeader('expires', '0') return self if name == '' else resource.Resource.getChild(self, name, request) def getServerFactory(self): if conf.settings['use_auth_http']: log.info("Using authenticated API") pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") initialize_api_key_file(pw_path) checker = PasswordChecker.load_file(pw_path) realm = HttpPasswordRealm(self) portal_to_realm = portal.Portal(realm, [checker, ]) factory = guard.BasicCredentialFactory('Login to lbrynet api') root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) else: log.info("Using non-authenticated API") root = self return server.Site(root)
Make curl work in py3 again
Make curl work in py3 again
Python
mit
lbryio/lbry,lbryio/lbry,lbryio/lbry
import logging import os from twisted.web import server, guard, resource from twisted.cred import portal from lbrynet import conf from .auth import PasswordChecker, HttpPasswordRealm from .util import initialize_api_key_file log = logging.getLogger(__name__) class AuthJSONRPCResource(resource.Resource): def __init__(self, protocol): resource.Resource.__init__(self) self.putChild("", protocol) self.putChild(conf.settings['API_ADDRESS'], protocol) def getChild(self, name, request): request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') request.setHeader('expires', '0') return self if name == '' else resource.Resource.getChild(self, name, request) def getServerFactory(self): if conf.settings['use_auth_http']: log.info("Using authenticated API") pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") initialize_api_key_file(pw_path) checker = PasswordChecker.load_file(pw_path) realm = HttpPasswordRealm(self) portal_to_realm = portal.Portal(realm, [checker, ]) factory = guard.BasicCredentialFactory('Login to lbrynet api') root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) else: log.info("Using non-authenticated API") root = self return server.Site(root) Make curl work in py3 again
import logging import os from twisted.web import server, guard, resource from twisted.cred import portal from lbrynet import conf from .auth import PasswordChecker, HttpPasswordRealm from .util import initialize_api_key_file log = logging.getLogger(__name__) class AuthJSONRPCResource(resource.Resource): def __init__(self, protocol): resource.Resource.__init__(self) self.putChild(b"", protocol) self.putChild(conf.settings['API_ADDRESS'].encode(), protocol) def getChild(self, name, request): request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') request.setHeader('expires', '0') return self if name == '' else resource.Resource.getChild(self, name, request) def getServerFactory(self): if conf.settings['use_auth_http']: log.info("Using authenticated API") pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") initialize_api_key_file(pw_path) checker = PasswordChecker.load_file(pw_path) realm = HttpPasswordRealm(self) portal_to_realm = portal.Portal(realm, [checker, ]) factory = guard.BasicCredentialFactory('Login to lbrynet api') root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) else: log.info("Using non-authenticated API") root = self return server.Site(root)
<commit_before>import logging import os from twisted.web import server, guard, resource from twisted.cred import portal from lbrynet import conf from .auth import PasswordChecker, HttpPasswordRealm from .util import initialize_api_key_file log = logging.getLogger(__name__) class AuthJSONRPCResource(resource.Resource): def __init__(self, protocol): resource.Resource.__init__(self) self.putChild("", protocol) self.putChild(conf.settings['API_ADDRESS'], protocol) def getChild(self, name, request): request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') request.setHeader('expires', '0') return self if name == '' else resource.Resource.getChild(self, name, request) def getServerFactory(self): if conf.settings['use_auth_http']: log.info("Using authenticated API") pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") initialize_api_key_file(pw_path) checker = PasswordChecker.load_file(pw_path) realm = HttpPasswordRealm(self) portal_to_realm = portal.Portal(realm, [checker, ]) factory = guard.BasicCredentialFactory('Login to lbrynet api') root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) else: log.info("Using non-authenticated API") root = self return server.Site(root) <commit_msg>Make curl work in py3 again<commit_after>
import logging import os from twisted.web import server, guard, resource from twisted.cred import portal from lbrynet import conf from .auth import PasswordChecker, HttpPasswordRealm from .util import initialize_api_key_file log = logging.getLogger(__name__) class AuthJSONRPCResource(resource.Resource): def __init__(self, protocol): resource.Resource.__init__(self) self.putChild(b"", protocol) self.putChild(conf.settings['API_ADDRESS'].encode(), protocol) def getChild(self, name, request): request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') request.setHeader('expires', '0') return self if name == '' else resource.Resource.getChild(self, name, request) def getServerFactory(self): if conf.settings['use_auth_http']: log.info("Using authenticated API") pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") initialize_api_key_file(pw_path) checker = PasswordChecker.load_file(pw_path) realm = HttpPasswordRealm(self) portal_to_realm = portal.Portal(realm, [checker, ]) factory = guard.BasicCredentialFactory('Login to lbrynet api') root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) else: log.info("Using non-authenticated API") root = self return server.Site(root)
import logging import os from twisted.web import server, guard, resource from twisted.cred import portal from lbrynet import conf from .auth import PasswordChecker, HttpPasswordRealm from .util import initialize_api_key_file log = logging.getLogger(__name__) class AuthJSONRPCResource(resource.Resource): def __init__(self, protocol): resource.Resource.__init__(self) self.putChild("", protocol) self.putChild(conf.settings['API_ADDRESS'], protocol) def getChild(self, name, request): request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') request.setHeader('expires', '0') return self if name == '' else resource.Resource.getChild(self, name, request) def getServerFactory(self): if conf.settings['use_auth_http']: log.info("Using authenticated API") pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") initialize_api_key_file(pw_path) checker = PasswordChecker.load_file(pw_path) realm = HttpPasswordRealm(self) portal_to_realm = portal.Portal(realm, [checker, ]) factory = guard.BasicCredentialFactory('Login to lbrynet api') root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) else: log.info("Using non-authenticated API") root = self return server.Site(root) Make curl work in py3 againimport logging import os from twisted.web import server, guard, resource from twisted.cred import portal from lbrynet import conf from .auth import PasswordChecker, HttpPasswordRealm from .util import initialize_api_key_file log = logging.getLogger(__name__) class AuthJSONRPCResource(resource.Resource): def __init__(self, protocol): resource.Resource.__init__(self) self.putChild(b"", protocol) self.putChild(conf.settings['API_ADDRESS'].encode(), protocol) def getChild(self, name, request): request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') request.setHeader('expires', '0') return self if name == '' else resource.Resource.getChild(self, name, request) def getServerFactory(self): if conf.settings['use_auth_http']: log.info("Using authenticated API") pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") initialize_api_key_file(pw_path) checker = PasswordChecker.load_file(pw_path) realm = HttpPasswordRealm(self) portal_to_realm = portal.Portal(realm, [checker, ]) factory = guard.BasicCredentialFactory('Login to lbrynet api') root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) else: log.info("Using non-authenticated API") root = self return server.Site(root)
<commit_before>import logging import os from twisted.web import server, guard, resource from twisted.cred import portal from lbrynet import conf from .auth import PasswordChecker, HttpPasswordRealm from .util import initialize_api_key_file log = logging.getLogger(__name__) class AuthJSONRPCResource(resource.Resource): def __init__(self, protocol): resource.Resource.__init__(self) self.putChild("", protocol) self.putChild(conf.settings['API_ADDRESS'], protocol) def getChild(self, name, request): request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') request.setHeader('expires', '0') return self if name == '' else resource.Resource.getChild(self, name, request) def getServerFactory(self): if conf.settings['use_auth_http']: log.info("Using authenticated API") pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") initialize_api_key_file(pw_path) checker = PasswordChecker.load_file(pw_path) realm = HttpPasswordRealm(self) portal_to_realm = portal.Portal(realm, [checker, ]) factory = guard.BasicCredentialFactory('Login to lbrynet api') root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) else: log.info("Using non-authenticated API") root = self return server.Site(root) <commit_msg>Make curl work in py3 again<commit_after>import logging import os from twisted.web import server, guard, resource from twisted.cred import portal from lbrynet import conf from .auth import PasswordChecker, HttpPasswordRealm from .util import initialize_api_key_file log = logging.getLogger(__name__) class AuthJSONRPCResource(resource.Resource): def __init__(self, protocol): resource.Resource.__init__(self) self.putChild(b"", protocol) self.putChild(conf.settings['API_ADDRESS'].encode(), protocol) def getChild(self, name, request): request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') request.setHeader('expires', '0') return self if name == '' else resource.Resource.getChild(self, name, request) def getServerFactory(self): if conf.settings['use_auth_http']: log.info("Using authenticated API") pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") initialize_api_key_file(pw_path) checker = PasswordChecker.load_file(pw_path) realm = HttpPasswordRealm(self) portal_to_realm = portal.Portal(realm, [checker, ]) factory = guard.BasicCredentialFactory('Login to lbrynet api') root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) else: log.info("Using non-authenticated API") root = self return server.Site(root)
74f25eccd2153bd63eff338fff19721dc1488b5c
plugoo/assets.py
plugoo/assets.py
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return line.replace('\n','') else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def parse_line(self, line): """ Override this method if you need line by line parsing of an Asset. """ return line.replace('\n','') def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return self.parse_line(line) else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
Add a line by line parser
Add a line by line parser
Python
bsd-2-clause
kdmurray91/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,hackerberry/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,hackerberry/ooni-probe
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return line.replace('\n','') else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration Add a line by line parser
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def parse_line(self, line): """ Override this method if you need line by line parsing of an Asset. """ return line.replace('\n','') def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return self.parse_line(line) else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
<commit_before>class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return line.replace('\n','') else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration <commit_msg>Add a line by line parser<commit_after>
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def parse_line(self, line): """ Override this method if you need line by line parsing of an Asset. """ return line.replace('\n','') def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return self.parse_line(line) else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return line.replace('\n','') else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration Add a line by line parserclass Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def parse_line(self, line): """ Override this method if you need line by line parsing of an Asset. """ return line.replace('\n','') def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return self.parse_line(line) else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
<commit_before>class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return line.replace('\n','') else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration <commit_msg>Add a line by line parser<commit_after>class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def parse_line(self, line): """ Override this method if you need line by line parsing of an Asset. """ return line.replace('\n','') def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return self.parse_line(line) else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
1455b0a77d323812417e561e50dbd69a219cc9e6
preconditions.py
preconditions.py
import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults or ()) if i == 0: appargs, closureargs = spec.args, [] else: appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): fspec = inspect.getargspec(f) for (appargs, closureargs, p) in precinfo: for apparg in appargs: if apparg not in fspec.args: raise PreconditionError( ('Precondition {!r} specifies non-default arg {!r}' + ' which is not one of the known application args:' + ' {!s}') .format(p, apparg, ', '.join(fspec.args))) for carg in closureargs: if carg in fspec.args: raise PreconditionError( ('Precondition {!r} specifies default arg {!r}' + ' which masks one of the known application args:' + ' {!s}') .format(p, carg, ', '.join(fspec.args))) def g(*a, **kw): return f(*a, **kw) return g return decorate
from functools import wraps import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults or ()) if i == 0: appargs, closureargs = spec.args, [] else: appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): fspec = inspect.getargspec(f) for (appargs, closureargs, p) in precinfo: for apparg in appargs: if apparg not in fspec.args: raise PreconditionError( ('Precondition {!r} specifies non-default arg {!r}' + ' which is not one of the known application args:' + ' {!s}') .format(p, apparg, ', '.join(fspec.args))) for carg in closureargs: if carg in fspec.args: raise PreconditionError( ('Precondition {!r} specifies default arg {!r}' + ' which masks one of the known application args:' + ' {!s}') .format(p, carg, ', '.join(fspec.args))) @wraps(f) def g(*a, **kw): return f(*a, **kw) g.nopre = f return g return decorate
Implement the interface specification in two easy lines (plus an import).
Implement the interface specification in two easy lines (plus an import).
Python
mit
nejucomo/preconditions
import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults or ()) if i == 0: appargs, closureargs = spec.args, [] else: appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): fspec = inspect.getargspec(f) for (appargs, closureargs, p) in precinfo: for apparg in appargs: if apparg not in fspec.args: raise PreconditionError( ('Precondition {!r} specifies non-default arg {!r}' + ' which is not one of the known application args:' + ' {!s}') .format(p, apparg, ', '.join(fspec.args))) for carg in closureargs: if carg in fspec.args: raise PreconditionError( ('Precondition {!r} specifies default arg {!r}' + ' which masks one of the known application args:' + ' {!s}') .format(p, carg, ', '.join(fspec.args))) def g(*a, **kw): return f(*a, **kw) return g return decorate Implement the interface specification in two easy lines (plus an import).
from functools import wraps import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults or ()) if i == 0: appargs, closureargs = spec.args, [] else: appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): fspec = inspect.getargspec(f) for (appargs, closureargs, p) in precinfo: for apparg in appargs: if apparg not in fspec.args: raise PreconditionError( ('Precondition {!r} specifies non-default arg {!r}' + ' which is not one of the known application args:' + ' {!s}') .format(p, apparg, ', '.join(fspec.args))) for carg in closureargs: if carg in fspec.args: raise PreconditionError( ('Precondition {!r} specifies default arg {!r}' + ' which masks one of the known application args:' + ' {!s}') .format(p, carg, ', '.join(fspec.args))) @wraps(f) def g(*a, **kw): return f(*a, **kw) g.nopre = f return g return decorate
<commit_before>import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults or ()) if i == 0: appargs, closureargs = spec.args, [] else: appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): fspec = inspect.getargspec(f) for (appargs, closureargs, p) in precinfo: for apparg in appargs: if apparg not in fspec.args: raise PreconditionError( ('Precondition {!r} specifies non-default arg {!r}' + ' which is not one of the known application args:' + ' {!s}') .format(p, apparg, ', '.join(fspec.args))) for carg in closureargs: if carg in fspec.args: raise PreconditionError( ('Precondition {!r} specifies default arg {!r}' + ' which masks one of the known application args:' + ' {!s}') .format(p, carg, ', '.join(fspec.args))) def g(*a, **kw): return f(*a, **kw) return g return decorate <commit_msg>Implement the interface specification in two easy lines (plus an import).<commit_after>
from functools import wraps import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults or ()) if i == 0: appargs, closureargs = spec.args, [] else: appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): fspec = inspect.getargspec(f) for (appargs, closureargs, p) in precinfo: for apparg in appargs: if apparg not in fspec.args: raise PreconditionError( ('Precondition {!r} specifies non-default arg {!r}' + ' which is not one of the known application args:' + ' {!s}') .format(p, apparg, ', '.join(fspec.args))) for carg in closureargs: if carg in fspec.args: raise PreconditionError( ('Precondition {!r} specifies default arg {!r}' + ' which masks one of the known application args:' + ' {!s}') .format(p, carg, ', '.join(fspec.args))) @wraps(f) def g(*a, **kw): return f(*a, **kw) g.nopre = f return g return decorate
import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults or ()) if i == 0: appargs, closureargs = spec.args, [] else: appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): fspec = inspect.getargspec(f) for (appargs, closureargs, p) in precinfo: for apparg in appargs: if apparg not in fspec.args: raise PreconditionError( ('Precondition {!r} specifies non-default arg {!r}' + ' which is not one of the known application args:' + ' {!s}') .format(p, apparg, ', '.join(fspec.args))) for carg in closureargs: if carg in fspec.args: raise PreconditionError( ('Precondition {!r} specifies default arg {!r}' + ' which masks one of the known application args:' + ' {!s}') .format(p, carg, ', '.join(fspec.args))) def g(*a, **kw): return f(*a, **kw) return g return decorate Implement the interface specification in two easy lines (plus an import).from functools import wraps import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults or ()) if i == 0: appargs, closureargs = spec.args, [] else: appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): fspec = inspect.getargspec(f) for (appargs, closureargs, p) in precinfo: for apparg in appargs: if apparg not in fspec.args: raise PreconditionError( ('Precondition {!r} specifies non-default arg {!r}' + ' which is not one of the known application args:' + ' {!s}') .format(p, apparg, ', '.join(fspec.args))) for carg in closureargs: if carg in fspec.args: raise PreconditionError( ('Precondition {!r} specifies default arg {!r}' + ' which masks one of the known application args:' + ' {!s}') .format(p, carg, ', '.join(fspec.args))) @wraps(f) def g(*a, **kw): return f(*a, **kw) g.nopre = f return g return decorate
<commit_before>import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults or ()) if i == 0: appargs, closureargs = spec.args, [] else: appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): fspec = inspect.getargspec(f) for (appargs, closureargs, p) in precinfo: for apparg in appargs: if apparg not in fspec.args: raise PreconditionError( ('Precondition {!r} specifies non-default arg {!r}' + ' which is not one of the known application args:' + ' {!s}') .format(p, apparg, ', '.join(fspec.args))) for carg in closureargs: if carg in fspec.args: raise PreconditionError( ('Precondition {!r} specifies default arg {!r}' + ' which masks one of the known application args:' + ' {!s}') .format(p, carg, ', '.join(fspec.args))) def g(*a, **kw): return f(*a, **kw) return g return decorate <commit_msg>Implement the interface specification in two easy lines (plus an import).<commit_after>from functools import wraps import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults or ()) if i == 0: appargs, closureargs = spec.args, [] else: appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): fspec = inspect.getargspec(f) for (appargs, closureargs, p) in precinfo: for apparg in appargs: if apparg not in fspec.args: raise PreconditionError( ('Precondition {!r} specifies non-default arg {!r}' + ' which is not one of the known application args:' + ' {!s}') .format(p, apparg, ', '.join(fspec.args))) for carg in closureargs: if carg in fspec.args: raise PreconditionError( ('Precondition {!r} specifies default arg {!r}' + ' which masks one of the known application args:' + ' {!s}') .format(p, carg, ', '.join(fspec.args))) @wraps(f) def g(*a, **kw): return f(*a, **kw) g.nopre = f return g return decorate
519ab89b892a3caead4d1d56a2bf017ef97c135d
tests/basics/OverflowFunctions.py
tests/basics/OverflowFunctions.py
# # Kay Hayen, mailto:kayhayen@gmx.de # # Python test originally created or extracted from other peoples work. The # parts from me are in the public domain. It is at least Free Software # where it's copied from other people. In these cases, it will normally be # indicated. # # If you submit Kay Hayen patches to this software in either form, you # automatically grant him a copyright assignment to the code, or in the # alternative a BSD license to the code, should your jurisdiction prevent # this. Obviously it won't affect code that comes to him indirectly or # code you don't submit to him. # # This is to reserve my ability to re-license the code at any time, e.g. # the PSF. With this version of Nuitka, using it for Closed Source will # not be allowed. # # Please leave the whole of this copyright notice intact. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() # print "Closure was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deepExec()
# # Kay Hayen, mailto:kayhayen@gmx.de # # Python test originally created or extracted from other peoples work. The # parts from me are in the public domain. It is at least Free Software # where it's copied from other people. In these cases, it will normally be # indicated. # # If you submit Kay Hayen patches to this software in either form, you # automatically grant him a copyright assignment to the code, or in the # alternative a BSD license to the code, should your jurisdiction prevent # this. Obviously it won't affect code that comes to him indirectly or # code you don't submit to him. # # This is to reserve my ability to re-license the code at any time, e.g. # the PSF. With this version of Nuitka, using it for Closed Source will # not be allowed. # # Please leave the whole of this copyright notice intact. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def deeper(): for_closure_as_well = 4 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() # print "Closure one level up was taken", for_closure_as_well # print "Closure two levels up was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deeper() deepExec()
Cover an even deeper nesting of closures for the overflow function, still commented out though.
Cover an even deeper nesting of closures for the overflow function, still commented out though.
Python
apache-2.0
wfxiang08/Nuitka,tempbottle/Nuitka,kayhayen/Nuitka,kayhayen/Nuitka,tempbottle/Nuitka,tempbottle/Nuitka,kayhayen/Nuitka,tempbottle/Nuitka,wfxiang08/Nuitka,wfxiang08/Nuitka,kayhayen/Nuitka,wfxiang08/Nuitka
# # Kay Hayen, mailto:kayhayen@gmx.de # # Python test originally created or extracted from other peoples work. The # parts from me are in the public domain. It is at least Free Software # where it's copied from other people. In these cases, it will normally be # indicated. # # If you submit Kay Hayen patches to this software in either form, you # automatically grant him a copyright assignment to the code, or in the # alternative a BSD license to the code, should your jurisdiction prevent # this. Obviously it won't affect code that comes to him indirectly or # code you don't submit to him. # # This is to reserve my ability to re-license the code at any time, e.g. # the PSF. With this version of Nuitka, using it for Closed Source will # not be allowed. # # Please leave the whole of this copyright notice intact. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() # print "Closure was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deepExec() Cover an even deeper nesting of closures for the overflow function, still commented out though.
# # Kay Hayen, mailto:kayhayen@gmx.de # # Python test originally created or extracted from other peoples work. The # parts from me are in the public domain. It is at least Free Software # where it's copied from other people. In these cases, it will normally be # indicated. # # If you submit Kay Hayen patches to this software in either form, you # automatically grant him a copyright assignment to the code, or in the # alternative a BSD license to the code, should your jurisdiction prevent # this. Obviously it won't affect code that comes to him indirectly or # code you don't submit to him. # # This is to reserve my ability to re-license the code at any time, e.g. # the PSF. With this version of Nuitka, using it for Closed Source will # not be allowed. # # Please leave the whole of this copyright notice intact. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def deeper(): for_closure_as_well = 4 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() # print "Closure one level up was taken", for_closure_as_well # print "Closure two levels up was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deeper() deepExec()
<commit_before># # Kay Hayen, mailto:kayhayen@gmx.de # # Python test originally created or extracted from other peoples work. The # parts from me are in the public domain. It is at least Free Software # where it's copied from other people. In these cases, it will normally be # indicated. # # If you submit Kay Hayen patches to this software in either form, you # automatically grant him a copyright assignment to the code, or in the # alternative a BSD license to the code, should your jurisdiction prevent # this. Obviously it won't affect code that comes to him indirectly or # code you don't submit to him. # # This is to reserve my ability to re-license the code at any time, e.g. # the PSF. With this version of Nuitka, using it for Closed Source will # not be allowed. # # Please leave the whole of this copyright notice intact. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() # print "Closure was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deepExec() <commit_msg>Cover an even deeper nesting of closures for the overflow function, still commented out though.<commit_after>
# # Kay Hayen, mailto:kayhayen@gmx.de # # Python test originally created or extracted from other peoples work. The # parts from me are in the public domain. It is at least Free Software # where it's copied from other people. In these cases, it will normally be # indicated. # # If you submit Kay Hayen patches to this software in either form, you # automatically grant him a copyright assignment to the code, or in the # alternative a BSD license to the code, should your jurisdiction prevent # this. Obviously it won't affect code that comes to him indirectly or # code you don't submit to him. # # This is to reserve my ability to re-license the code at any time, e.g. # the PSF. With this version of Nuitka, using it for Closed Source will # not be allowed. # # Please leave the whole of this copyright notice intact. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def deeper(): for_closure_as_well = 4 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() # print "Closure one level up was taken", for_closure_as_well # print "Closure two levels up was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deeper() deepExec()
# # Kay Hayen, mailto:kayhayen@gmx.de # # Python test originally created or extracted from other peoples work. The # parts from me are in the public domain. It is at least Free Software # where it's copied from other people. In these cases, it will normally be # indicated. # # If you submit Kay Hayen patches to this software in either form, you # automatically grant him a copyright assignment to the code, or in the # alternative a BSD license to the code, should your jurisdiction prevent # this. Obviously it won't affect code that comes to him indirectly or # code you don't submit to him. # # This is to reserve my ability to re-license the code at any time, e.g. # the PSF. With this version of Nuitka, using it for Closed Source will # not be allowed. # # Please leave the whole of this copyright notice intact. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() # print "Closure was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deepExec() Cover an even deeper nesting of closures for the overflow function, still commented out though.# # Kay Hayen, mailto:kayhayen@gmx.de # # Python test originally created or extracted from other peoples work. The # parts from me are in the public domain. It is at least Free Software # where it's copied from other people. In these cases, it will normally be # indicated. # # If you submit Kay Hayen patches to this software in either form, you # automatically grant him a copyright assignment to the code, or in the # alternative a BSD license to the code, should your jurisdiction prevent # this. Obviously it won't affect code that comes to him indirectly or # code you don't submit to him. # # This is to reserve my ability to re-license the code at any time, e.g. # the PSF. With this version of Nuitka, using it for Closed Source will # not be allowed. # # Please leave the whole of this copyright notice intact. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def deeper(): for_closure_as_well = 4 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() # print "Closure one level up was taken", for_closure_as_well # print "Closure two levels up was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deeper() deepExec()
<commit_before># # Kay Hayen, mailto:kayhayen@gmx.de # # Python test originally created or extracted from other peoples work. The # parts from me are in the public domain. It is at least Free Software # where it's copied from other people. In these cases, it will normally be # indicated. # # If you submit Kay Hayen patches to this software in either form, you # automatically grant him a copyright assignment to the code, or in the # alternative a BSD license to the code, should your jurisdiction prevent # this. Obviously it won't affect code that comes to him indirectly or # code you don't submit to him. # # This is to reserve my ability to re-license the code at any time, e.g. # the PSF. With this version of Nuitka, using it for Closed Source will # not be allowed. # # Please leave the whole of this copyright notice intact. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() # print "Closure was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deepExec() <commit_msg>Cover an even deeper nesting of closures for the overflow function, still commented out though.<commit_after># # Kay Hayen, mailto:kayhayen@gmx.de # # Python test originally created or extracted from other peoples work. The # parts from me are in the public domain. It is at least Free Software # where it's copied from other people. In these cases, it will normally be # indicated. # # If you submit Kay Hayen patches to this software in either form, you # automatically grant him a copyright assignment to the code, or in the # alternative a BSD license to the code, should your jurisdiction prevent # this. Obviously it won't affect code that comes to him indirectly or # code you don't submit to him. # # This is to reserve my ability to re-license the code at any time, e.g. # the PSF. With this version of Nuitka, using it for Closed Source will # not be allowed. # # Please leave the whole of this copyright notice intact. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def deeper(): for_closure_as_well = 4 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() # print "Closure one level up was taken", for_closure_as_well # print "Closure two levels up was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deeper() deepExec()
f6cfe079cadd545a27ad62ec416115d3bbcf357f
tests/test_between_correls.py
tests/test_between_correls.py
import pytest import os from SCNIC.general import simulate_correls from SCNIC.between_correls import between_correls @pytest.fixture() def args(): class Arguments(object): def __init__(self): self.table1 = "table1.biom" self.table2 = "table2.biom" self.output = "out_dir" self.correl_method = "spearman" self.p_adjust = "bh" self.min_sample = None self.min_p = None self.min_r = None self.sparcc_filter = False self.force = False self.procs = 1 return Arguments() def test_between_correls(args, tmpdir): table1 = simulate_correls() table2 = simulate_correls() loc = tmpdir.mkdir("with_correls_test") table1.to_json("madebyme", open(str(loc)+"/table1.biom", 'w')) table2.to_json("madebyme", open(str(loc) + "/table2.biom", 'w')) os.chdir(str(loc)) between_correls(args) files = os.listdir(str(loc)+'/out_dir') assert "correls.txt" in files assert "crossnet.gml" in files
import pytest import os from SCNIC.general import simulate_correls from SCNIC.between_correls import between_correls @pytest.fixture() def args(): class Arguments(object): def __init__(self): self.table1 = "table1.biom" self.table2 = "table2.biom" self.output = "out_dir" self.correl_method = "spearman" self.p_adjust = "bh" self.min_sample = None self.min_p = None self.min_r = None self.sparcc_filter = True self.force = False self.procs = 1 return Arguments() def test_between_correls(args, tmpdir): table1 = simulate_correls() table2 = simulate_correls() loc = tmpdir.mkdir("with_correls_test") table1.to_json("madebyme", open(str(loc)+"/table1.biom", 'w')) table2.to_json("madebyme", open(str(loc) + "/table2.biom", 'w')) os.chdir(str(loc)) between_correls(args) files = os.listdir(str(loc)+'/out_dir') assert "correls.txt" in files assert "crossnet.gml" in files
Add module.py test and modify some others for better coverage. Remove files not used.
Add module.py test and modify some others for better coverage. Remove files not used.
Python
bsd-3-clause
shafferm/SCNIC
import pytest import os from SCNIC.general import simulate_correls from SCNIC.between_correls import between_correls @pytest.fixture() def args(): class Arguments(object): def __init__(self): self.table1 = "table1.biom" self.table2 = "table2.biom" self.output = "out_dir" self.correl_method = "spearman" self.p_adjust = "bh" self.min_sample = None self.min_p = None self.min_r = None self.sparcc_filter = False self.force = False self.procs = 1 return Arguments() def test_between_correls(args, tmpdir): table1 = simulate_correls() table2 = simulate_correls() loc = tmpdir.mkdir("with_correls_test") table1.to_json("madebyme", open(str(loc)+"/table1.biom", 'w')) table2.to_json("madebyme", open(str(loc) + "/table2.biom", 'w')) os.chdir(str(loc)) between_correls(args) files = os.listdir(str(loc)+'/out_dir') assert "correls.txt" in files assert "crossnet.gml" in files Add module.py test and modify some others for better coverage. Remove files not used.
import pytest import os from SCNIC.general import simulate_correls from SCNIC.between_correls import between_correls @pytest.fixture() def args(): class Arguments(object): def __init__(self): self.table1 = "table1.biom" self.table2 = "table2.biom" self.output = "out_dir" self.correl_method = "spearman" self.p_adjust = "bh" self.min_sample = None self.min_p = None self.min_r = None self.sparcc_filter = True self.force = False self.procs = 1 return Arguments() def test_between_correls(args, tmpdir): table1 = simulate_correls() table2 = simulate_correls() loc = tmpdir.mkdir("with_correls_test") table1.to_json("madebyme", open(str(loc)+"/table1.biom", 'w')) table2.to_json("madebyme", open(str(loc) + "/table2.biom", 'w')) os.chdir(str(loc)) between_correls(args) files = os.listdir(str(loc)+'/out_dir') assert "correls.txt" in files assert "crossnet.gml" in files
<commit_before>import pytest import os from SCNIC.general import simulate_correls from SCNIC.between_correls import between_correls @pytest.fixture() def args(): class Arguments(object): def __init__(self): self.table1 = "table1.biom" self.table2 = "table2.biom" self.output = "out_dir" self.correl_method = "spearman" self.p_adjust = "bh" self.min_sample = None self.min_p = None self.min_r = None self.sparcc_filter = False self.force = False self.procs = 1 return Arguments() def test_between_correls(args, tmpdir): table1 = simulate_correls() table2 = simulate_correls() loc = tmpdir.mkdir("with_correls_test") table1.to_json("madebyme", open(str(loc)+"/table1.biom", 'w')) table2.to_json("madebyme", open(str(loc) + "/table2.biom", 'w')) os.chdir(str(loc)) between_correls(args) files = os.listdir(str(loc)+'/out_dir') assert "correls.txt" in files assert "crossnet.gml" in files <commit_msg>Add module.py test and modify some others for better coverage. Remove files not used.<commit_after>
import pytest import os from SCNIC.general import simulate_correls from SCNIC.between_correls import between_correls @pytest.fixture() def args(): class Arguments(object): def __init__(self): self.table1 = "table1.biom" self.table2 = "table2.biom" self.output = "out_dir" self.correl_method = "spearman" self.p_adjust = "bh" self.min_sample = None self.min_p = None self.min_r = None self.sparcc_filter = True self.force = False self.procs = 1 return Arguments() def test_between_correls(args, tmpdir): table1 = simulate_correls() table2 = simulate_correls() loc = tmpdir.mkdir("with_correls_test") table1.to_json("madebyme", open(str(loc)+"/table1.biom", 'w')) table2.to_json("madebyme", open(str(loc) + "/table2.biom", 'w')) os.chdir(str(loc)) between_correls(args) files = os.listdir(str(loc)+'/out_dir') assert "correls.txt" in files assert "crossnet.gml" in files
import pytest import os from SCNIC.general import simulate_correls from SCNIC.between_correls import between_correls @pytest.fixture() def args(): class Arguments(object): def __init__(self): self.table1 = "table1.biom" self.table2 = "table2.biom" self.output = "out_dir" self.correl_method = "spearman" self.p_adjust = "bh" self.min_sample = None self.min_p = None self.min_r = None self.sparcc_filter = False self.force = False self.procs = 1 return Arguments() def test_between_correls(args, tmpdir): table1 = simulate_correls() table2 = simulate_correls() loc = tmpdir.mkdir("with_correls_test") table1.to_json("madebyme", open(str(loc)+"/table1.biom", 'w')) table2.to_json("madebyme", open(str(loc) + "/table2.biom", 'w')) os.chdir(str(loc)) between_correls(args) files = os.listdir(str(loc)+'/out_dir') assert "correls.txt" in files assert "crossnet.gml" in files Add module.py test and modify some others for better coverage. Remove files not used.import pytest import os from SCNIC.general import simulate_correls from SCNIC.between_correls import between_correls @pytest.fixture() def args(): class Arguments(object): def __init__(self): self.table1 = "table1.biom" self.table2 = "table2.biom" self.output = "out_dir" self.correl_method = "spearman" self.p_adjust = "bh" self.min_sample = None self.min_p = None self.min_r = None self.sparcc_filter = True self.force = False self.procs = 1 return Arguments() def test_between_correls(args, tmpdir): table1 = simulate_correls() table2 = simulate_correls() loc = tmpdir.mkdir("with_correls_test") table1.to_json("madebyme", open(str(loc)+"/table1.biom", 'w')) table2.to_json("madebyme", open(str(loc) + "/table2.biom", 'w')) os.chdir(str(loc)) between_correls(args) files = os.listdir(str(loc)+'/out_dir') assert "correls.txt" in files assert "crossnet.gml" in files
<commit_before>import pytest import os from SCNIC.general import simulate_correls from SCNIC.between_correls import between_correls @pytest.fixture() def args(): class Arguments(object): def __init__(self): self.table1 = "table1.biom" self.table2 = "table2.biom" self.output = "out_dir" self.correl_method = "spearman" self.p_adjust = "bh" self.min_sample = None self.min_p = None self.min_r = None self.sparcc_filter = False self.force = False self.procs = 1 return Arguments() def test_between_correls(args, tmpdir): table1 = simulate_correls() table2 = simulate_correls() loc = tmpdir.mkdir("with_correls_test") table1.to_json("madebyme", open(str(loc)+"/table1.biom", 'w')) table2.to_json("madebyme", open(str(loc) + "/table2.biom", 'w')) os.chdir(str(loc)) between_correls(args) files = os.listdir(str(loc)+'/out_dir') assert "correls.txt" in files assert "crossnet.gml" in files <commit_msg>Add module.py test and modify some others for better coverage. Remove files not used.<commit_after>import pytest import os from SCNIC.general import simulate_correls from SCNIC.between_correls import between_correls @pytest.fixture() def args(): class Arguments(object): def __init__(self): self.table1 = "table1.biom" self.table2 = "table2.biom" self.output = "out_dir" self.correl_method = "spearman" self.p_adjust = "bh" self.min_sample = None self.min_p = None self.min_r = None self.sparcc_filter = True self.force = False self.procs = 1 return Arguments() def test_between_correls(args, tmpdir): table1 = simulate_correls() table2 = simulate_correls() loc = tmpdir.mkdir("with_correls_test") table1.to_json("madebyme", open(str(loc)+"/table1.biom", 'w')) table2.to_json("madebyme", open(str(loc) + "/table2.biom", 'w')) os.chdir(str(loc)) between_correls(args) files = os.listdir(str(loc)+'/out_dir') assert "correls.txt" in files assert "crossnet.gml" in files
792a4c92c9718e202fb8b180f76c8a08b374e223
lib/sparkperf/mesos_cluster.py
lib/sparkperf/mesos_cluster.py
from sparkperf.cluster import Cluster import json import re import os import sys import time import urllib2 class MesosCluster(Cluster): """ Functionality for interacting with a already running Mesos Spark cluster. All the behavior for starting and stopping the cluster is not supported. """ def __init__(self, spark_home, mesos_master, spark_conf_dir = None, commit_sha="unknown"): self.spark_home = spark_home self.mesos_master = mesos_master self.spark_conf_dir = spark_conf_dir self.commit_sha = commit_sha state_url = "http://" + os.path.join(mesos_master.strip("mesos://"), "state.json") resp = urllib2.urlopen(state_url) if resp.getcode() != 200: raise "Bad status code returned fetching state.json from mesos master" state = json.loads(resp.read()) self.slaves = list(map((lambda slave: slave["hostname"]), state["slaves"])) def sync_spark(self): None def stop(self): None def start(self): None def ensure_spark_stopped_on_slaves(self): None
from sparkperf.cluster import Cluster import json import os import urllib2 class MesosCluster(Cluster): """ Functionality for interacting with a already running Mesos Spark cluster. All the behavior for starting and stopping the cluster is not supported. """ def __init__(self, spark_home, mesos_master, spark_conf_dir = None, commit_sha="unknown"): self.spark_home = spark_home self.mesos_master = mesos_master self.spark_conf_dir = spark_conf_dir self.commit_sha = commit_sha state_url = "http://" + os.path.join(mesos_master.strip("mesos://"), "state.json") resp = urllib2.urlopen(state_url) if resp.getcode() != 200: raise "Bad status code returned fetching state.json from mesos master" state = json.loads(resp.read()) self.slaves = list(map((lambda slave: slave["hostname"]), state["slaves"])) def sync_spark(self): None def stop(self): None def start(self): None def ensure_spark_stopped_on_slaves(self): None
Remove unused imports in mesos cluster
Remove unused imports in mesos cluster
Python
apache-2.0
arijitt/spark-perf,jkbradley/spark-perf,feynmanliang/spark-perf,mengxr/spark-perf,XiaoqingWang/spark-perf,databricks/spark-perf,mengxr/spark-perf,nchammas/spark-perf,zsxwing/spark-perf,XiaoqingWang/spark-perf,zsxwing/spark-perf,arijitt/spark-perf,Altiscale/spark-perf,Altiscale/spark-perf,nchammas/spark-perf,jkbradley/spark-perf,databricks/spark-perf,feynmanliang/spark-perf
from sparkperf.cluster import Cluster import json import re import os import sys import time import urllib2 class MesosCluster(Cluster): """ Functionality for interacting with a already running Mesos Spark cluster. All the behavior for starting and stopping the cluster is not supported. """ def __init__(self, spark_home, mesos_master, spark_conf_dir = None, commit_sha="unknown"): self.spark_home = spark_home self.mesos_master = mesos_master self.spark_conf_dir = spark_conf_dir self.commit_sha = commit_sha state_url = "http://" + os.path.join(mesos_master.strip("mesos://"), "state.json") resp = urllib2.urlopen(state_url) if resp.getcode() != 200: raise "Bad status code returned fetching state.json from mesos master" state = json.loads(resp.read()) self.slaves = list(map((lambda slave: slave["hostname"]), state["slaves"])) def sync_spark(self): None def stop(self): None def start(self): None def ensure_spark_stopped_on_slaves(self): None Remove unused imports in mesos cluster
from sparkperf.cluster import Cluster import json import os import urllib2 class MesosCluster(Cluster): """ Functionality for interacting with a already running Mesos Spark cluster. All the behavior for starting and stopping the cluster is not supported. """ def __init__(self, spark_home, mesos_master, spark_conf_dir = None, commit_sha="unknown"): self.spark_home = spark_home self.mesos_master = mesos_master self.spark_conf_dir = spark_conf_dir self.commit_sha = commit_sha state_url = "http://" + os.path.join(mesos_master.strip("mesos://"), "state.json") resp = urllib2.urlopen(state_url) if resp.getcode() != 200: raise "Bad status code returned fetching state.json from mesos master" state = json.loads(resp.read()) self.slaves = list(map((lambda slave: slave["hostname"]), state["slaves"])) def sync_spark(self): None def stop(self): None def start(self): None def ensure_spark_stopped_on_slaves(self): None
<commit_before>from sparkperf.cluster import Cluster import json import re import os import sys import time import urllib2 class MesosCluster(Cluster): """ Functionality for interacting with a already running Mesos Spark cluster. All the behavior for starting and stopping the cluster is not supported. """ def __init__(self, spark_home, mesos_master, spark_conf_dir = None, commit_sha="unknown"): self.spark_home = spark_home self.mesos_master = mesos_master self.spark_conf_dir = spark_conf_dir self.commit_sha = commit_sha state_url = "http://" + os.path.join(mesos_master.strip("mesos://"), "state.json") resp = urllib2.urlopen(state_url) if resp.getcode() != 200: raise "Bad status code returned fetching state.json from mesos master" state = json.loads(resp.read()) self.slaves = list(map((lambda slave: slave["hostname"]), state["slaves"])) def sync_spark(self): None def stop(self): None def start(self): None def ensure_spark_stopped_on_slaves(self): None <commit_msg>Remove unused imports in mesos cluster<commit_after>
from sparkperf.cluster import Cluster import json import os import urllib2 class MesosCluster(Cluster): """ Functionality for interacting with a already running Mesos Spark cluster. All the behavior for starting and stopping the cluster is not supported. """ def __init__(self, spark_home, mesos_master, spark_conf_dir = None, commit_sha="unknown"): self.spark_home = spark_home self.mesos_master = mesos_master self.spark_conf_dir = spark_conf_dir self.commit_sha = commit_sha state_url = "http://" + os.path.join(mesos_master.strip("mesos://"), "state.json") resp = urllib2.urlopen(state_url) if resp.getcode() != 200: raise "Bad status code returned fetching state.json from mesos master" state = json.loads(resp.read()) self.slaves = list(map((lambda slave: slave["hostname"]), state["slaves"])) def sync_spark(self): None def stop(self): None def start(self): None def ensure_spark_stopped_on_slaves(self): None
from sparkperf.cluster import Cluster import json import re import os import sys import time import urllib2 class MesosCluster(Cluster): """ Functionality for interacting with a already running Mesos Spark cluster. All the behavior for starting and stopping the cluster is not supported. """ def __init__(self, spark_home, mesos_master, spark_conf_dir = None, commit_sha="unknown"): self.spark_home = spark_home self.mesos_master = mesos_master self.spark_conf_dir = spark_conf_dir self.commit_sha = commit_sha state_url = "http://" + os.path.join(mesos_master.strip("mesos://"), "state.json") resp = urllib2.urlopen(state_url) if resp.getcode() != 200: raise "Bad status code returned fetching state.json from mesos master" state = json.loads(resp.read()) self.slaves = list(map((lambda slave: slave["hostname"]), state["slaves"])) def sync_spark(self): None def stop(self): None def start(self): None def ensure_spark_stopped_on_slaves(self): None Remove unused imports in mesos clusterfrom sparkperf.cluster import Cluster import json import os import urllib2 class MesosCluster(Cluster): """ Functionality for interacting with a already running Mesos Spark cluster. All the behavior for starting and stopping the cluster is not supported. """ def __init__(self, spark_home, mesos_master, spark_conf_dir = None, commit_sha="unknown"): self.spark_home = spark_home self.mesos_master = mesos_master self.spark_conf_dir = spark_conf_dir self.commit_sha = commit_sha state_url = "http://" + os.path.join(mesos_master.strip("mesos://"), "state.json") resp = urllib2.urlopen(state_url) if resp.getcode() != 200: raise "Bad status code returned fetching state.json from mesos master" state = json.loads(resp.read()) self.slaves = list(map((lambda slave: slave["hostname"]), state["slaves"])) def sync_spark(self): None def stop(self): None def start(self): None def ensure_spark_stopped_on_slaves(self): None
<commit_before>from sparkperf.cluster import Cluster import json import re import os import sys import time import urllib2 class MesosCluster(Cluster): """ Functionality for interacting with a already running Mesos Spark cluster. All the behavior for starting and stopping the cluster is not supported. """ def __init__(self, spark_home, mesos_master, spark_conf_dir = None, commit_sha="unknown"): self.spark_home = spark_home self.mesos_master = mesos_master self.spark_conf_dir = spark_conf_dir self.commit_sha = commit_sha state_url = "http://" + os.path.join(mesos_master.strip("mesos://"), "state.json") resp = urllib2.urlopen(state_url) if resp.getcode() != 200: raise "Bad status code returned fetching state.json from mesos master" state = json.loads(resp.read()) self.slaves = list(map((lambda slave: slave["hostname"]), state["slaves"])) def sync_spark(self): None def stop(self): None def start(self): None def ensure_spark_stopped_on_slaves(self): None <commit_msg>Remove unused imports in mesos cluster<commit_after>from sparkperf.cluster import Cluster import json import os import urllib2 class MesosCluster(Cluster): """ Functionality for interacting with a already running Mesos Spark cluster. All the behavior for starting and stopping the cluster is not supported. """ def __init__(self, spark_home, mesos_master, spark_conf_dir = None, commit_sha="unknown"): self.spark_home = spark_home self.mesos_master = mesos_master self.spark_conf_dir = spark_conf_dir self.commit_sha = commit_sha state_url = "http://" + os.path.join(mesos_master.strip("mesos://"), "state.json") resp = urllib2.urlopen(state_url) if resp.getcode() != 200: raise "Bad status code returned fetching state.json from mesos master" state = json.loads(resp.read()) self.slaves = list(map((lambda slave: slave["hostname"]), state["slaves"])) def sync_spark(self): None def stop(self): None def start(self): None def ensure_spark_stopped_on_slaves(self): None
13b9aff7c134d5783987ca578cd8664effbe78ef
codegen/templates/python.ejs.py
codegen/templates/python.ejs.py
<% if (showSetup) { -%> from KalturaClient import * from KalturaClient.Plugins.Core import * <% plugins.forEach(function(p) { -%> from KalturaClient.Plugins.<%- p.charAt(0).toUpperCase() + p.substring(1) %> import * <% }) -%> config = KalturaConfiguration(<%- answers.partnerId %>) config.serviceUrl = "https://www.kaltura.com/" client = KalturaClient(config) <% if (!noSession) { -%> ks = client.session.start( <%- codegen.constant(answers.secret) %>, <%- codegen.constant(answers.userId) %>, <%- answers.sessionType === 0 ? 'KalturaSessionType.USER' : 'KalturaSessionType.ADMIN' %>, <%- codegen.constant(answers.partnerId) || 'YOUR_PARTNER_ID' %>) client.setKs(ks) <% } -%> <% } -%> <%- codegen.assignAllParameters(parameters, answers) -%> result = client.<%- service %>.<%- action %>(<%- parameterNames.join(', ') %>); print(result); <% -%>
<% if (showSetup) { -%> from KalturaClient import * from KalturaClient.Plugins.Core import * <% plugins.forEach(function(p) { -%> from KalturaClient.Plugins.<%- p.charAt(0).toUpperCase() + p.substring(1) %> import * <% }) -%> config = KalturaConfiguration(<%- answers.partnerId %>) config.serviceUrl = "https://www.kaltura.com/" client = KalturaClient(config) <% if (!noSession) { -%> ks = client.session.start( <%- codegen.constant(answers.secret) %>, <%- codegen.constant(answers.userId) %>, <%- answers.sessionType === 0 ? 'KalturaSessionType.USER' : 'KalturaSessionType.ADMIN' %>, <%- codegen.constant(answers.partnerId) || 'YOUR_PARTNER_ID' %>) client.setKs(ks) <% } -%> <% } -%> <%- codegen.assignAllParameters(parameters, answers) -%> result = client.<%- service %>.<%- action %>(<%- parameterNames.join(', ') %>) print(result) <% -%>
Drop the ';' in the Python codegen as it's not necessary.
Drop the ';' in the Python codegen as it's not necessary.
Python
agpl-3.0
kaltura/developer-platform,kaltura/developer-platform,kaltura/developer-platform,kaltura/developer-platform,kaltura/developer-platform,kaltura/developer-platform,kaltura/developer-platform,kaltura/developer-platform,kaltura/developer-platform,kaltura/developer-platform
<% if (showSetup) { -%> from KalturaClient import * from KalturaClient.Plugins.Core import * <% plugins.forEach(function(p) { -%> from KalturaClient.Plugins.<%- p.charAt(0).toUpperCase() + p.substring(1) %> import * <% }) -%> config = KalturaConfiguration(<%- answers.partnerId %>) config.serviceUrl = "https://www.kaltura.com/" client = KalturaClient(config) <% if (!noSession) { -%> ks = client.session.start( <%- codegen.constant(answers.secret) %>, <%- codegen.constant(answers.userId) %>, <%- answers.sessionType === 0 ? 'KalturaSessionType.USER' : 'KalturaSessionType.ADMIN' %>, <%- codegen.constant(answers.partnerId) || 'YOUR_PARTNER_ID' %>) client.setKs(ks) <% } -%> <% } -%> <%- codegen.assignAllParameters(parameters, answers) -%> result = client.<%- service %>.<%- action %>(<%- parameterNames.join(', ') %>); print(result); <% -%> Drop the ';' in the Python codegen as it's not necessary.
<% if (showSetup) { -%> from KalturaClient import * from KalturaClient.Plugins.Core import * <% plugins.forEach(function(p) { -%> from KalturaClient.Plugins.<%- p.charAt(0).toUpperCase() + p.substring(1) %> import * <% }) -%> config = KalturaConfiguration(<%- answers.partnerId %>) config.serviceUrl = "https://www.kaltura.com/" client = KalturaClient(config) <% if (!noSession) { -%> ks = client.session.start( <%- codegen.constant(answers.secret) %>, <%- codegen.constant(answers.userId) %>, <%- answers.sessionType === 0 ? 'KalturaSessionType.USER' : 'KalturaSessionType.ADMIN' %>, <%- codegen.constant(answers.partnerId) || 'YOUR_PARTNER_ID' %>) client.setKs(ks) <% } -%> <% } -%> <%- codegen.assignAllParameters(parameters, answers) -%> result = client.<%- service %>.<%- action %>(<%- parameterNames.join(', ') %>) print(result) <% -%>
<commit_before><% if (showSetup) { -%> from KalturaClient import * from KalturaClient.Plugins.Core import * <% plugins.forEach(function(p) { -%> from KalturaClient.Plugins.<%- p.charAt(0).toUpperCase() + p.substring(1) %> import * <% }) -%> config = KalturaConfiguration(<%- answers.partnerId %>) config.serviceUrl = "https://www.kaltura.com/" client = KalturaClient(config) <% if (!noSession) { -%> ks = client.session.start( <%- codegen.constant(answers.secret) %>, <%- codegen.constant(answers.userId) %>, <%- answers.sessionType === 0 ? 'KalturaSessionType.USER' : 'KalturaSessionType.ADMIN' %>, <%- codegen.constant(answers.partnerId) || 'YOUR_PARTNER_ID' %>) client.setKs(ks) <% } -%> <% } -%> <%- codegen.assignAllParameters(parameters, answers) -%> result = client.<%- service %>.<%- action %>(<%- parameterNames.join(', ') %>); print(result); <% -%> <commit_msg>Drop the ';' in the Python codegen as it's not necessary.<commit_after>
<% if (showSetup) { -%> from KalturaClient import * from KalturaClient.Plugins.Core import * <% plugins.forEach(function(p) { -%> from KalturaClient.Plugins.<%- p.charAt(0).toUpperCase() + p.substring(1) %> import * <% }) -%> config = KalturaConfiguration(<%- answers.partnerId %>) config.serviceUrl = "https://www.kaltura.com/" client = KalturaClient(config) <% if (!noSession) { -%> ks = client.session.start( <%- codegen.constant(answers.secret) %>, <%- codegen.constant(answers.userId) %>, <%- answers.sessionType === 0 ? 'KalturaSessionType.USER' : 'KalturaSessionType.ADMIN' %>, <%- codegen.constant(answers.partnerId) || 'YOUR_PARTNER_ID' %>) client.setKs(ks) <% } -%> <% } -%> <%- codegen.assignAllParameters(parameters, answers) -%> result = client.<%- service %>.<%- action %>(<%- parameterNames.join(', ') %>) print(result) <% -%>
<% if (showSetup) { -%> from KalturaClient import * from KalturaClient.Plugins.Core import * <% plugins.forEach(function(p) { -%> from KalturaClient.Plugins.<%- p.charAt(0).toUpperCase() + p.substring(1) %> import * <% }) -%> config = KalturaConfiguration(<%- answers.partnerId %>) config.serviceUrl = "https://www.kaltura.com/" client = KalturaClient(config) <% if (!noSession) { -%> ks = client.session.start( <%- codegen.constant(answers.secret) %>, <%- codegen.constant(answers.userId) %>, <%- answers.sessionType === 0 ? 'KalturaSessionType.USER' : 'KalturaSessionType.ADMIN' %>, <%- codegen.constant(answers.partnerId) || 'YOUR_PARTNER_ID' %>) client.setKs(ks) <% } -%> <% } -%> <%- codegen.assignAllParameters(parameters, answers) -%> result = client.<%- service %>.<%- action %>(<%- parameterNames.join(', ') %>); print(result); <% -%> Drop the ';' in the Python codegen as it's not necessary.<% if (showSetup) { -%> from KalturaClient import * from KalturaClient.Plugins.Core import * <% plugins.forEach(function(p) { -%> from KalturaClient.Plugins.<%- p.charAt(0).toUpperCase() + p.substring(1) %> import * <% }) -%> config = KalturaConfiguration(<%- answers.partnerId %>) config.serviceUrl = "https://www.kaltura.com/" client = KalturaClient(config) <% if (!noSession) { -%> ks = client.session.start( <%- codegen.constant(answers.secret) %>, <%- codegen.constant(answers.userId) %>, <%- answers.sessionType === 0 ? 'KalturaSessionType.USER' : 'KalturaSessionType.ADMIN' %>, <%- codegen.constant(answers.partnerId) || 'YOUR_PARTNER_ID' %>) client.setKs(ks) <% } -%> <% } -%> <%- codegen.assignAllParameters(parameters, answers) -%> result = client.<%- service %>.<%- action %>(<%- parameterNames.join(', ') %>) print(result) <% -%>
<commit_before><% if (showSetup) { -%> from KalturaClient import * from KalturaClient.Plugins.Core import * <% plugins.forEach(function(p) { -%> from KalturaClient.Plugins.<%- p.charAt(0).toUpperCase() + p.substring(1) %> import * <% }) -%> config = KalturaConfiguration(<%- answers.partnerId %>) config.serviceUrl = "https://www.kaltura.com/" client = KalturaClient(config) <% if (!noSession) { -%> ks = client.session.start( <%- codegen.constant(answers.secret) %>, <%- codegen.constant(answers.userId) %>, <%- answers.sessionType === 0 ? 'KalturaSessionType.USER' : 'KalturaSessionType.ADMIN' %>, <%- codegen.constant(answers.partnerId) || 'YOUR_PARTNER_ID' %>) client.setKs(ks) <% } -%> <% } -%> <%- codegen.assignAllParameters(parameters, answers) -%> result = client.<%- service %>.<%- action %>(<%- parameterNames.join(', ') %>); print(result); <% -%> <commit_msg>Drop the ';' in the Python codegen as it's not necessary.<commit_after><% if (showSetup) { -%> from KalturaClient import * from KalturaClient.Plugins.Core import * <% plugins.forEach(function(p) { -%> from KalturaClient.Plugins.<%- p.charAt(0).toUpperCase() + p.substring(1) %> import * <% }) -%> config = KalturaConfiguration(<%- answers.partnerId %>) config.serviceUrl = "https://www.kaltura.com/" client = KalturaClient(config) <% if (!noSession) { -%> ks = client.session.start( <%- codegen.constant(answers.secret) %>, <%- codegen.constant(answers.userId) %>, <%- answers.sessionType === 0 ? 'KalturaSessionType.USER' : 'KalturaSessionType.ADMIN' %>, <%- codegen.constant(answers.partnerId) || 'YOUR_PARTNER_ID' %>) client.setKs(ks) <% } -%> <% } -%> <%- codegen.assignAllParameters(parameters, answers) -%> result = client.<%- service %>.<%- action %>(<%- parameterNames.join(', ') %>) print(result) <% -%>
d5597911837967c1f34d1c904282f9464e38e767
flask_controllers/GameModes.py
flask_controllers/GameModes.py
import logging from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # Import the Game Controller from Game.GameController import GameController class GameModes(MethodView): def get(self): logging.debug("GameModes: GET: Initializing GameObject") game_object = GameController() logging.debug("GameModes: GET: GameObject initialized to {}".format(game_object.save())) logging.debug("GameModes: GET: Checking if textmode flag set") if request.args.get('textmode', None): logging.debug("GameModes: GET: Responding with list of names") response_data = game_object.game_mode_names else: logging.debug("GameModes: GET: Responding with JSON object") response_data = [{"mode": gt.mode, "digits": gt.digits, "digit-type": gt.digit_type, "guesses": gt.guesses_allowed } for gt in game_object.game_modes] logging.debug("GameModes: GET: Return {}".format(response_data)) return build_response( html_status=200, response_data=response_data, response_mimetype="application/json" )
import logging from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # Import the Game Controller from Game.GameController import GameController class GameModes(MethodView): def get(self): logging.debug("GameModes: GET: Initializing GameObject") game_object = GameController() logging.debug("GameModes: GET: GameObject initialized to {}".format(game_object.save())) logging.debug("GameModes: GET: Checking if textmode flag set") if request.args.get('textmode', None): logging.debug("GameModes: GET: Responding with list of names") response_data = game_object.game_mode_names else: logging.debug("GameModes: GET: Responding with JSON object: {}".format(game_object.game_modes)) response_data = [{"mode": gt.mode, "digits": gt.digits, "digit-type": gt.digit_type, "guesses": gt.guesses_allowed } for gt in game_object.game_modes] logging.debug("GameModes: GET: Return {}".format(response_data)) return build_response( html_status=200, response_data=response_data, response_mimetype="application/json" )
Update logging for message output and consistency.
Update logging for message output and consistency.
Python
apache-2.0
dsandersAzure/python_cowbull_server,dsandersAzure/python_cowbull_server
import logging from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # Import the Game Controller from Game.GameController import GameController class GameModes(MethodView): def get(self): logging.debug("GameModes: GET: Initializing GameObject") game_object = GameController() logging.debug("GameModes: GET: GameObject initialized to {}".format(game_object.save())) logging.debug("GameModes: GET: Checking if textmode flag set") if request.args.get('textmode', None): logging.debug("GameModes: GET: Responding with list of names") response_data = game_object.game_mode_names else: logging.debug("GameModes: GET: Responding with JSON object") response_data = [{"mode": gt.mode, "digits": gt.digits, "digit-type": gt.digit_type, "guesses": gt.guesses_allowed } for gt in game_object.game_modes] logging.debug("GameModes: GET: Return {}".format(response_data)) return build_response( html_status=200, response_data=response_data, response_mimetype="application/json" ) Update logging for message output and consistency.
import logging from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # Import the Game Controller from Game.GameController import GameController class GameModes(MethodView): def get(self): logging.debug("GameModes: GET: Initializing GameObject") game_object = GameController() logging.debug("GameModes: GET: GameObject initialized to {}".format(game_object.save())) logging.debug("GameModes: GET: Checking if textmode flag set") if request.args.get('textmode', None): logging.debug("GameModes: GET: Responding with list of names") response_data = game_object.game_mode_names else: logging.debug("GameModes: GET: Responding with JSON object: {}".format(game_object.game_modes)) response_data = [{"mode": gt.mode, "digits": gt.digits, "digit-type": gt.digit_type, "guesses": gt.guesses_allowed } for gt in game_object.game_modes] logging.debug("GameModes: GET: Return {}".format(response_data)) return build_response( html_status=200, response_data=response_data, response_mimetype="application/json" )
<commit_before>import logging from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # Import the Game Controller from Game.GameController import GameController class GameModes(MethodView): def get(self): logging.debug("GameModes: GET: Initializing GameObject") game_object = GameController() logging.debug("GameModes: GET: GameObject initialized to {}".format(game_object.save())) logging.debug("GameModes: GET: Checking if textmode flag set") if request.args.get('textmode', None): logging.debug("GameModes: GET: Responding with list of names") response_data = game_object.game_mode_names else: logging.debug("GameModes: GET: Responding with JSON object") response_data = [{"mode": gt.mode, "digits": gt.digits, "digit-type": gt.digit_type, "guesses": gt.guesses_allowed } for gt in game_object.game_modes] logging.debug("GameModes: GET: Return {}".format(response_data)) return build_response( html_status=200, response_data=response_data, response_mimetype="application/json" ) <commit_msg>Update logging for message output and consistency.<commit_after>
import logging from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # Import the Game Controller from Game.GameController import GameController class GameModes(MethodView): def get(self): logging.debug("GameModes: GET: Initializing GameObject") game_object = GameController() logging.debug("GameModes: GET: GameObject initialized to {}".format(game_object.save())) logging.debug("GameModes: GET: Checking if textmode flag set") if request.args.get('textmode', None): logging.debug("GameModes: GET: Responding with list of names") response_data = game_object.game_mode_names else: logging.debug("GameModes: GET: Responding with JSON object: {}".format(game_object.game_modes)) response_data = [{"mode": gt.mode, "digits": gt.digits, "digit-type": gt.digit_type, "guesses": gt.guesses_allowed } for gt in game_object.game_modes] logging.debug("GameModes: GET: Return {}".format(response_data)) return build_response( html_status=200, response_data=response_data, response_mimetype="application/json" )
import logging from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # Import the Game Controller from Game.GameController import GameController class GameModes(MethodView): def get(self): logging.debug("GameModes: GET: Initializing GameObject") game_object = GameController() logging.debug("GameModes: GET: GameObject initialized to {}".format(game_object.save())) logging.debug("GameModes: GET: Checking if textmode flag set") if request.args.get('textmode', None): logging.debug("GameModes: GET: Responding with list of names") response_data = game_object.game_mode_names else: logging.debug("GameModes: GET: Responding with JSON object") response_data = [{"mode": gt.mode, "digits": gt.digits, "digit-type": gt.digit_type, "guesses": gt.guesses_allowed } for gt in game_object.game_modes] logging.debug("GameModes: GET: Return {}".format(response_data)) return build_response( html_status=200, response_data=response_data, response_mimetype="application/json" ) Update logging for message output and consistency.import logging from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # Import the Game Controller from Game.GameController import GameController class GameModes(MethodView): def get(self): logging.debug("GameModes: GET: Initializing GameObject") game_object = GameController() logging.debug("GameModes: GET: GameObject initialized to {}".format(game_object.save())) logging.debug("GameModes: GET: Checking if textmode flag set") if request.args.get('textmode', None): logging.debug("GameModes: GET: Responding with list of names") response_data = game_object.game_mode_names else: logging.debug("GameModes: GET: Responding with JSON object: {}".format(game_object.game_modes)) response_data = [{"mode": gt.mode, "digits": gt.digits, "digit-type": gt.digit_type, "guesses": gt.guesses_allowed } for gt in game_object.game_modes] logging.debug("GameModes: GET: Return {}".format(response_data)) return build_response( html_status=200, response_data=response_data, response_mimetype="application/json" )
<commit_before>import logging from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # Import the Game Controller from Game.GameController import GameController class GameModes(MethodView): def get(self): logging.debug("GameModes: GET: Initializing GameObject") game_object = GameController() logging.debug("GameModes: GET: GameObject initialized to {}".format(game_object.save())) logging.debug("GameModes: GET: Checking if textmode flag set") if request.args.get('textmode', None): logging.debug("GameModes: GET: Responding with list of names") response_data = game_object.game_mode_names else: logging.debug("GameModes: GET: Responding with JSON object") response_data = [{"mode": gt.mode, "digits": gt.digits, "digit-type": gt.digit_type, "guesses": gt.guesses_allowed } for gt in game_object.game_modes] logging.debug("GameModes: GET: Return {}".format(response_data)) return build_response( html_status=200, response_data=response_data, response_mimetype="application/json" ) <commit_msg>Update logging for message output and consistency.<commit_after>import logging from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # Import the Game Controller from Game.GameController import GameController class GameModes(MethodView): def get(self): logging.debug("GameModes: GET: Initializing GameObject") game_object = GameController() logging.debug("GameModes: GET: GameObject initialized to {}".format(game_object.save())) logging.debug("GameModes: GET: Checking if textmode flag set") if request.args.get('textmode', None): logging.debug("GameModes: GET: Responding with list of names") response_data = game_object.game_mode_names else: logging.debug("GameModes: GET: Responding with JSON object: {}".format(game_object.game_modes)) response_data = [{"mode": gt.mode, "digits": gt.digits, "digit-type": gt.digit_type, "guesses": gt.guesses_allowed } for gt in game_object.game_modes] logging.debug("GameModes: GET: Return {}".format(response_data)) return build_response( html_status=200, response_data=response_data, response_mimetype="application/json" )
2c02816c05f3863ef76b3a412ac5bad9eecfafdd
testrepository/tests/test_setup.py
testrepository/tests/test_setup.py
# # Copyright (c) 2009 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("...running bdist...", doctest.ELLIPSIS))
# # Copyright (c) 2009 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("""... running install_scripts ... adding '...testr' ...""", doctest.ELLIPSIS))
Make setup.py smoke test more specific again as requested in review
Make setup.py smoke test more specific again as requested in review
Python
apache-2.0
masayukig/stestr,masayukig/stestr,mtreinish/stestr,mtreinish/stestr
# # Copyright (c) 2009 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("...running bdist...", doctest.ELLIPSIS)) Make setup.py smoke test more specific again as requested in review
# # Copyright (c) 2009 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("""... running install_scripts ... adding '...testr' ...""", doctest.ELLIPSIS))
<commit_before># # Copyright (c) 2009 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("...running bdist...", doctest.ELLIPSIS)) <commit_msg>Make setup.py smoke test more specific again as requested in review<commit_after>
# # Copyright (c) 2009 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("""... running install_scripts ... adding '...testr' ...""", doctest.ELLIPSIS))
# # Copyright (c) 2009 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("...running bdist...", doctest.ELLIPSIS)) Make setup.py smoke test more specific again as requested in review# # Copyright (c) 2009 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("""... running install_scripts ... adding '...testr' ...""", doctest.ELLIPSIS))
<commit_before># # Copyright (c) 2009 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("...running bdist...", doctest.ELLIPSIS)) <commit_msg>Make setup.py smoke test more specific again as requested in review<commit_after># # Copyright (c) 2009 Testrepository Contributors # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("""... running install_scripts ... adding '...testr' ...""", doctest.ELLIPSIS))
6ab508d67d5e178abbf961479cab25beb0b6dc32
tests/integration/aiohttp_utils.py
tests/integration/aiohttp_utils.py
import aiohttp async def aiohttp_request(loop, method, url, output='text', **kwargs): # NOQA: E999 async with aiohttp.ClientSession(loop=loop) as session: # NOQA: E999 response = await session.request(method, url, **kwargs) # NOQA: E999 if output == 'text': content = await response.text() # NOQA: E999 elif output == 'json': content = await response.json() # NOQA: E999 elif output == 'raw': content = await response.read() # NOQA: E999 return response, content
import aiohttp async def aiohttp_request(loop, method, url, output='text', **kwargs): # NOQA: E999 async with aiohttp.ClientSession(loop=loop) as session: # NOQA: E999 async with session.request(method, url, **kwargs) as response: # NOQA: E999 if output == 'text': content = await response.text() # NOQA: E999 elif output == 'json': content = await response.json() # NOQA: E999 elif output == 'raw': content = await response.read() # NOQA: E999 return response, content
Fix aiohttp_request to properly perform aiohttp requests
Fix aiohttp_request to properly perform aiohttp requests
Python
mit
kevin1024/vcrpy,kevin1024/vcrpy,graingert/vcrpy,graingert/vcrpy
import aiohttp async def aiohttp_request(loop, method, url, output='text', **kwargs): # NOQA: E999 async with aiohttp.ClientSession(loop=loop) as session: # NOQA: E999 response = await session.request(method, url, **kwargs) # NOQA: E999 if output == 'text': content = await response.text() # NOQA: E999 elif output == 'json': content = await response.json() # NOQA: E999 elif output == 'raw': content = await response.read() # NOQA: E999 return response, content Fix aiohttp_request to properly perform aiohttp requests
import aiohttp async def aiohttp_request(loop, method, url, output='text', **kwargs): # NOQA: E999 async with aiohttp.ClientSession(loop=loop) as session: # NOQA: E999 async with session.request(method, url, **kwargs) as response: # NOQA: E999 if output == 'text': content = await response.text() # NOQA: E999 elif output == 'json': content = await response.json() # NOQA: E999 elif output == 'raw': content = await response.read() # NOQA: E999 return response, content
<commit_before>import aiohttp async def aiohttp_request(loop, method, url, output='text', **kwargs): # NOQA: E999 async with aiohttp.ClientSession(loop=loop) as session: # NOQA: E999 response = await session.request(method, url, **kwargs) # NOQA: E999 if output == 'text': content = await response.text() # NOQA: E999 elif output == 'json': content = await response.json() # NOQA: E999 elif output == 'raw': content = await response.read() # NOQA: E999 return response, content <commit_msg>Fix aiohttp_request to properly perform aiohttp requests<commit_after>
import aiohttp async def aiohttp_request(loop, method, url, output='text', **kwargs): # NOQA: E999 async with aiohttp.ClientSession(loop=loop) as session: # NOQA: E999 async with session.request(method, url, **kwargs) as response: # NOQA: E999 if output == 'text': content = await response.text() # NOQA: E999 elif output == 'json': content = await response.json() # NOQA: E999 elif output == 'raw': content = await response.read() # NOQA: E999 return response, content
import aiohttp async def aiohttp_request(loop, method, url, output='text', **kwargs): # NOQA: E999 async with aiohttp.ClientSession(loop=loop) as session: # NOQA: E999 response = await session.request(method, url, **kwargs) # NOQA: E999 if output == 'text': content = await response.text() # NOQA: E999 elif output == 'json': content = await response.json() # NOQA: E999 elif output == 'raw': content = await response.read() # NOQA: E999 return response, content Fix aiohttp_request to properly perform aiohttp requestsimport aiohttp async def aiohttp_request(loop, method, url, output='text', **kwargs): # NOQA: E999 async with aiohttp.ClientSession(loop=loop) as session: # NOQA: E999 async with session.request(method, url, **kwargs) as response: # NOQA: E999 if output == 'text': content = await response.text() # NOQA: E999 elif output == 'json': content = await response.json() # NOQA: E999 elif output == 'raw': content = await response.read() # NOQA: E999 return response, content
<commit_before>import aiohttp async def aiohttp_request(loop, method, url, output='text', **kwargs): # NOQA: E999 async with aiohttp.ClientSession(loop=loop) as session: # NOQA: E999 response = await session.request(method, url, **kwargs) # NOQA: E999 if output == 'text': content = await response.text() # NOQA: E999 elif output == 'json': content = await response.json() # NOQA: E999 elif output == 'raw': content = await response.read() # NOQA: E999 return response, content <commit_msg>Fix aiohttp_request to properly perform aiohttp requests<commit_after>import aiohttp async def aiohttp_request(loop, method, url, output='text', **kwargs): # NOQA: E999 async with aiohttp.ClientSession(loop=loop) as session: # NOQA: E999 async with session.request(method, url, **kwargs) as response: # NOQA: E999 if output == 'text': content = await response.text() # NOQA: E999 elif output == 'json': content = await response.json() # NOQA: E999 elif output == 'raw': content = await response.read() # NOQA: E999 return response, content
81b3e9c6ec89123eedaf53931cfa9c9bc6817d3c
django_q/__init__.py
django_q/__init__.py
import os import sys from django import get_version myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, myPath) VERSION = (0, 7, 18) default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules split_version = get_version().split('.') if split_version[1][0] != '9' and split_version[1][:2] != '10': from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster from .status import Stat from .brokers import get_broker __all__ = ['conf', 'cluster', 'models', 'tasks']
import os import sys from django import get_version myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, myPath) VERSION = (0, 7, 18) default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules split_version = get_version().split('.') if split_version[1] not in ('9', '10', '11'): from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster from .status import Stat from .brokers import get_broker __all__ = ['conf', 'cluster', 'models', 'tasks']
Add django 1.11 to the import check
Add django 1.11 to the import check
Python
mit
Koed00/django-q
import os import sys from django import get_version myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, myPath) VERSION = (0, 7, 18) default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules split_version = get_version().split('.') if split_version[1][0] != '9' and split_version[1][:2] != '10': from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster from .status import Stat from .brokers import get_broker __all__ = ['conf', 'cluster', 'models', 'tasks'] Add django 1.11 to the import check
import os import sys from django import get_version myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, myPath) VERSION = (0, 7, 18) default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules split_version = get_version().split('.') if split_version[1] not in ('9', '10', '11'): from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster from .status import Stat from .brokers import get_broker __all__ = ['conf', 'cluster', 'models', 'tasks']
<commit_before>import os import sys from django import get_version myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, myPath) VERSION = (0, 7, 18) default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules split_version = get_version().split('.') if split_version[1][0] != '9' and split_version[1][:2] != '10': from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster from .status import Stat from .brokers import get_broker __all__ = ['conf', 'cluster', 'models', 'tasks'] <commit_msg>Add django 1.11 to the import check<commit_after>
import os import sys from django import get_version myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, myPath) VERSION = (0, 7, 18) default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules split_version = get_version().split('.') if split_version[1] not in ('9', '10', '11'): from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster from .status import Stat from .brokers import get_broker __all__ = ['conf', 'cluster', 'models', 'tasks']
import os import sys from django import get_version myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, myPath) VERSION = (0, 7, 18) default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules split_version = get_version().split('.') if split_version[1][0] != '9' and split_version[1][:2] != '10': from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster from .status import Stat from .brokers import get_broker __all__ = ['conf', 'cluster', 'models', 'tasks'] Add django 1.11 to the import checkimport os import sys from django import get_version myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, myPath) VERSION = (0, 7, 18) default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules split_version = get_version().split('.') if split_version[1] not in ('9', '10', '11'): from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster from .status import Stat from .brokers import get_broker __all__ = ['conf', 'cluster', 'models', 'tasks']
<commit_before>import os import sys from django import get_version myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, myPath) VERSION = (0, 7, 18) default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules split_version = get_version().split('.') if split_version[1][0] != '9' and split_version[1][:2] != '10': from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster from .status import Stat from .brokers import get_broker __all__ = ['conf', 'cluster', 'models', 'tasks'] <commit_msg>Add django 1.11 to the import check<commit_after>import os import sys from django import get_version myPath = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, myPath) VERSION = (0, 7, 18) default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules split_version = get_version().split('.') if split_version[1] not in ('9', '10', '11'): from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster from .status import Stat from .brokers import get_broker __all__ = ['conf', 'cluster', 'models', 'tasks']
b2cbc41f0ba422bfa666022e93be135899441430
tohu/cloning.py
tohu/cloning.py
__all__ = ['CloneableMeta'] class CloneableMeta(type): def __new__(metacls, cg_name, bases, clsdict): new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict) return new_cls
__all__ = ['CloneableMeta'] def attach_new_init_method(cls): """ Replace the existing cls.__init__() method with a new one which also initialises the _clones attribute to an empty list. """ orig_init = cls.__init__ def new_init(self, *args, **kwargs): orig_init(self, *args, **kwargs) self._clones = [] cls.__init__ = new_init class CloneableMeta(type): def __new__(metacls, cg_name, bases, clsdict): new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict) attach_new_init_method(new_cls) return new_cls
Add _clones attribute in new init method
Add _clones attribute in new init method
Python
mit
maxalbert/tohu
__all__ = ['CloneableMeta'] class CloneableMeta(type): def __new__(metacls, cg_name, bases, clsdict): new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict) return new_clsAdd _clones attribute in new init method
__all__ = ['CloneableMeta'] def attach_new_init_method(cls): """ Replace the existing cls.__init__() method with a new one which also initialises the _clones attribute to an empty list. """ orig_init = cls.__init__ def new_init(self, *args, **kwargs): orig_init(self, *args, **kwargs) self._clones = [] cls.__init__ = new_init class CloneableMeta(type): def __new__(metacls, cg_name, bases, clsdict): new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict) attach_new_init_method(new_cls) return new_cls
<commit_before>__all__ = ['CloneableMeta'] class CloneableMeta(type): def __new__(metacls, cg_name, bases, clsdict): new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict) return new_cls<commit_msg>Add _clones attribute in new init method<commit_after>
__all__ = ['CloneableMeta'] def attach_new_init_method(cls): """ Replace the existing cls.__init__() method with a new one which also initialises the _clones attribute to an empty list. """ orig_init = cls.__init__ def new_init(self, *args, **kwargs): orig_init(self, *args, **kwargs) self._clones = [] cls.__init__ = new_init class CloneableMeta(type): def __new__(metacls, cg_name, bases, clsdict): new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict) attach_new_init_method(new_cls) return new_cls
__all__ = ['CloneableMeta'] class CloneableMeta(type): def __new__(metacls, cg_name, bases, clsdict): new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict) return new_clsAdd _clones attribute in new init method__all__ = ['CloneableMeta'] def attach_new_init_method(cls): """ Replace the existing cls.__init__() method with a new one which also initialises the _clones attribute to an empty list. """ orig_init = cls.__init__ def new_init(self, *args, **kwargs): orig_init(self, *args, **kwargs) self._clones = [] cls.__init__ = new_init class CloneableMeta(type): def __new__(metacls, cg_name, bases, clsdict): new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict) attach_new_init_method(new_cls) return new_cls
<commit_before>__all__ = ['CloneableMeta'] class CloneableMeta(type): def __new__(metacls, cg_name, bases, clsdict): new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict) return new_cls<commit_msg>Add _clones attribute in new init method<commit_after>__all__ = ['CloneableMeta'] def attach_new_init_method(cls): """ Replace the existing cls.__init__() method with a new one which also initialises the _clones attribute to an empty list. """ orig_init = cls.__init__ def new_init(self, *args, **kwargs): orig_init(self, *args, **kwargs) self._clones = [] cls.__init__ = new_init class CloneableMeta(type): def __new__(metacls, cg_name, bases, clsdict): new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict) attach_new_init_method(new_cls) return new_cls
ed658354ebfa068441b974fe61056ed74aa4254d
lmod/__init__.py
lmod/__init__.py
import os # require by lmod output evaluated by exec() from functools import partial from os import environ from subprocess import Popen, PIPE LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '') def module(command, *args): cmd = (environ['LMOD_CMD'], 'python', '--terse', command) result = Popen(cmd + args, stdout=PIPE, stderr=PIPE) if command in ('load', 'unload', 'restore', 'save'): exec(result.stdout.read()) return result.stderr.read().decode() def avail(): string = module('avail') modules = [] for entry in string.split(): if not (entry.startswith('/') or entry.endswith('/')): modules.append(entry) return modules def list(hide_hidden=False): string = module('list').strip() if string != "No modules loaded": modules = string.split() if hide_hidden: modules = [m for m in modules if m.rsplit('/', 1)[-1][0] != '.'] return modules return [] def savelist(system=LMOD_SYSTEM_NAME): names = module('savelist').split() if system: suffix = '.{}'.format(system) n = len(suffix) names = [name[:-n] for name in names if name.endswith(suffix)] return names show = partial(module, 'show') load = partial(module, 'load') unload = partial(module, 'unload') restore = partial(module, 'restore') save = partial(module, 'save')
import os # require by lmod output evaluated by exec() from functools import partial from os import environ from subprocess import Popen, PIPE LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '') def module(command, *args): cmd = (environ['LMOD_CMD'], 'python', '--terse', command) result = Popen(cmd + args, stdout=PIPE, stderr=PIPE) if command in ('load', 'unload', 'restore', 'save'): exec(result.stdout.read()) return result.stderr.read().decode() def avail(): string = module('avail') modules = [] for entry in string.split(): if not (entry.startswith('/') or entry.endswith('/')): modules.append(entry) return modules def list(hide_hidden=False): string = module('list').strip() if string != "No modules loaded": modules = string.split() if hide_hidden: modules = [m for m in modules if m.rsplit('/', 1)[-1][0] != '.'] return modules return [] def savelist(): return module('savelist').split() show = partial(module, 'show') load = partial(module, 'load') unload = partial(module, 'unload') restore = partial(module, 'restore') save = partial(module, 'save')
Remove system name from savelist in lmod
Remove system name from savelist in lmod
Python
mit
cmd-ntrf/jupyter-lmod,cmd-ntrf/jupyter-lmod,cmd-ntrf/jupyter-lmod
import os # require by lmod output evaluated by exec() from functools import partial from os import environ from subprocess import Popen, PIPE LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '') def module(command, *args): cmd = (environ['LMOD_CMD'], 'python', '--terse', command) result = Popen(cmd + args, stdout=PIPE, stderr=PIPE) if command in ('load', 'unload', 'restore', 'save'): exec(result.stdout.read()) return result.stderr.read().decode() def avail(): string = module('avail') modules = [] for entry in string.split(): if not (entry.startswith('/') or entry.endswith('/')): modules.append(entry) return modules def list(hide_hidden=False): string = module('list').strip() if string != "No modules loaded": modules = string.split() if hide_hidden: modules = [m for m in modules if m.rsplit('/', 1)[-1][0] != '.'] return modules return [] def savelist(system=LMOD_SYSTEM_NAME): names = module('savelist').split() if system: suffix = '.{}'.format(system) n = len(suffix) names = [name[:-n] for name in names if name.endswith(suffix)] return names show = partial(module, 'show') load = partial(module, 'load') unload = partial(module, 'unload') restore = partial(module, 'restore') save = partial(module, 'save') Remove system name from savelist in lmod
import os # require by lmod output evaluated by exec() from functools import partial from os import environ from subprocess import Popen, PIPE LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '') def module(command, *args): cmd = (environ['LMOD_CMD'], 'python', '--terse', command) result = Popen(cmd + args, stdout=PIPE, stderr=PIPE) if command in ('load', 'unload', 'restore', 'save'): exec(result.stdout.read()) return result.stderr.read().decode() def avail(): string = module('avail') modules = [] for entry in string.split(): if not (entry.startswith('/') or entry.endswith('/')): modules.append(entry) return modules def list(hide_hidden=False): string = module('list').strip() if string != "No modules loaded": modules = string.split() if hide_hidden: modules = [m for m in modules if m.rsplit('/', 1)[-1][0] != '.'] return modules return [] def savelist(): return module('savelist').split() show = partial(module, 'show') load = partial(module, 'load') unload = partial(module, 'unload') restore = partial(module, 'restore') save = partial(module, 'save')
<commit_before>import os # require by lmod output evaluated by exec() from functools import partial from os import environ from subprocess import Popen, PIPE LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '') def module(command, *args): cmd = (environ['LMOD_CMD'], 'python', '--terse', command) result = Popen(cmd + args, stdout=PIPE, stderr=PIPE) if command in ('load', 'unload', 'restore', 'save'): exec(result.stdout.read()) return result.stderr.read().decode() def avail(): string = module('avail') modules = [] for entry in string.split(): if not (entry.startswith('/') or entry.endswith('/')): modules.append(entry) return modules def list(hide_hidden=False): string = module('list').strip() if string != "No modules loaded": modules = string.split() if hide_hidden: modules = [m for m in modules if m.rsplit('/', 1)[-1][0] != '.'] return modules return [] def savelist(system=LMOD_SYSTEM_NAME): names = module('savelist').split() if system: suffix = '.{}'.format(system) n = len(suffix) names = [name[:-n] for name in names if name.endswith(suffix)] return names show = partial(module, 'show') load = partial(module, 'load') unload = partial(module, 'unload') restore = partial(module, 'restore') save = partial(module, 'save') <commit_msg>Remove system name from savelist in lmod<commit_after>
import os # require by lmod output evaluated by exec() from functools import partial from os import environ from subprocess import Popen, PIPE LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '') def module(command, *args): cmd = (environ['LMOD_CMD'], 'python', '--terse', command) result = Popen(cmd + args, stdout=PIPE, stderr=PIPE) if command in ('load', 'unload', 'restore', 'save'): exec(result.stdout.read()) return result.stderr.read().decode() def avail(): string = module('avail') modules = [] for entry in string.split(): if not (entry.startswith('/') or entry.endswith('/')): modules.append(entry) return modules def list(hide_hidden=False): string = module('list').strip() if string != "No modules loaded": modules = string.split() if hide_hidden: modules = [m for m in modules if m.rsplit('/', 1)[-1][0] != '.'] return modules return [] def savelist(): return module('savelist').split() show = partial(module, 'show') load = partial(module, 'load') unload = partial(module, 'unload') restore = partial(module, 'restore') save = partial(module, 'save')
import os # require by lmod output evaluated by exec() from functools import partial from os import environ from subprocess import Popen, PIPE LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '') def module(command, *args): cmd = (environ['LMOD_CMD'], 'python', '--terse', command) result = Popen(cmd + args, stdout=PIPE, stderr=PIPE) if command in ('load', 'unload', 'restore', 'save'): exec(result.stdout.read()) return result.stderr.read().decode() def avail(): string = module('avail') modules = [] for entry in string.split(): if not (entry.startswith('/') or entry.endswith('/')): modules.append(entry) return modules def list(hide_hidden=False): string = module('list').strip() if string != "No modules loaded": modules = string.split() if hide_hidden: modules = [m for m in modules if m.rsplit('/', 1)[-1][0] != '.'] return modules return [] def savelist(system=LMOD_SYSTEM_NAME): names = module('savelist').split() if system: suffix = '.{}'.format(system) n = len(suffix) names = [name[:-n] for name in names if name.endswith(suffix)] return names show = partial(module, 'show') load = partial(module, 'load') unload = partial(module, 'unload') restore = partial(module, 'restore') save = partial(module, 'save') Remove system name from savelist in lmodimport os # require by lmod output evaluated by exec() from functools import partial from os import environ from subprocess import Popen, PIPE LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '') def module(command, *args): cmd = (environ['LMOD_CMD'], 'python', '--terse', command) result = Popen(cmd + args, stdout=PIPE, stderr=PIPE) if command in ('load', 'unload', 'restore', 'save'): exec(result.stdout.read()) return result.stderr.read().decode() def avail(): string = module('avail') modules = [] for entry in string.split(): if not (entry.startswith('/') or entry.endswith('/')): modules.append(entry) return modules def list(hide_hidden=False): string = module('list').strip() if string != "No modules loaded": modules = string.split() if hide_hidden: modules = [m for m in modules if m.rsplit('/', 1)[-1][0] != '.'] return modules return [] def savelist(): return module('savelist').split() show = partial(module, 'show') load = partial(module, 'load') unload = partial(module, 'unload') restore = partial(module, 'restore') save = partial(module, 'save')
<commit_before>import os # require by lmod output evaluated by exec() from functools import partial from os import environ from subprocess import Popen, PIPE LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '') def module(command, *args): cmd = (environ['LMOD_CMD'], 'python', '--terse', command) result = Popen(cmd + args, stdout=PIPE, stderr=PIPE) if command in ('load', 'unload', 'restore', 'save'): exec(result.stdout.read()) return result.stderr.read().decode() def avail(): string = module('avail') modules = [] for entry in string.split(): if not (entry.startswith('/') or entry.endswith('/')): modules.append(entry) return modules def list(hide_hidden=False): string = module('list').strip() if string != "No modules loaded": modules = string.split() if hide_hidden: modules = [m for m in modules if m.rsplit('/', 1)[-1][0] != '.'] return modules return [] def savelist(system=LMOD_SYSTEM_NAME): names = module('savelist').split() if system: suffix = '.{}'.format(system) n = len(suffix) names = [name[:-n] for name in names if name.endswith(suffix)] return names show = partial(module, 'show') load = partial(module, 'load') unload = partial(module, 'unload') restore = partial(module, 'restore') save = partial(module, 'save') <commit_msg>Remove system name from savelist in lmod<commit_after>import os # require by lmod output evaluated by exec() from functools import partial from os import environ from subprocess import Popen, PIPE LMOD_SYSTEM_NAME = environ.get('LMOD_SYSTEM_NAME', '') def module(command, *args): cmd = (environ['LMOD_CMD'], 'python', '--terse', command) result = Popen(cmd + args, stdout=PIPE, stderr=PIPE) if command in ('load', 'unload', 'restore', 'save'): exec(result.stdout.read()) return result.stderr.read().decode() def avail(): string = module('avail') modules = [] for entry in string.split(): if not (entry.startswith('/') or entry.endswith('/')): modules.append(entry) return modules def list(hide_hidden=False): string = module('list').strip() if string != "No modules loaded": modules = string.split() if hide_hidden: modules = [m for m in modules if m.rsplit('/', 1)[-1][0] != '.'] return modules return [] def savelist(): return module('savelist').split() show = partial(module, 'show') load = partial(module, 'load') unload = partial(module, 'unload') restore = partial(module, 'restore') save = partial(module, 'save')
9d162a2919a1c9b56ded74d40963fa022fc7943b
src/config/settings/testing.py
src/config/settings/testing.py
"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3'
"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } # Disable logging import logging logging.disable(logging.CRITICAL) env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3'
Disable logging in test runs
Disable logging in test runs SPEED!
Python
agpl-3.0
FlowFX/unkenmathe.de,FlowFX/unkenmathe.de,FlowFX/unkenmathe.de,FlowFX/unkenmathe.de
"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3' Disable logging in test runs SPEED!
"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } # Disable logging import logging logging.disable(logging.CRITICAL) env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3'
<commit_before>"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3' <commit_msg>Disable logging in test runs SPEED!<commit_after>
"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } # Disable logging import logging logging.disable(logging.CRITICAL) env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3'
"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3' Disable logging in test runs SPEED!"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } # Disable logging import logging logging.disable(logging.CRITICAL) env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3'
<commit_before>"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3' <commit_msg>Disable logging in test runs SPEED!<commit_after>"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } # Disable logging import logging logging.disable(logging.CRITICAL) env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3'
9e706341fafc8a931a662ea497df39fff6f9408b
wooey/migrations/0028_add_script_subparser.py
wooey/migrations/0028_add_script_subparser.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2017-04-25 09:25 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wooey.models.mixins class Migration(migrations.Migration): dependencies = [ ('wooey', '0027_parameter_order'), ] operations = [ migrations.CreateModel( name='ScriptParser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=255, default='')), ('script_version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptVersion')), ], bases=(wooey.models.mixins.WooeyPy2Mixin, models.Model), ), migrations.AddField( model_name='scriptparameter', name='parser', field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptParser'), preserve_default=False, ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2017-04-25 09:25 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wooey.models.mixins class Migration(migrations.Migration): dependencies = [ ('wooey', '0027_parameter_order'), ] operations = [ migrations.CreateModel( name='ScriptParser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=255, default='')), ('script_version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptVersion')), ], bases=(wooey.models.mixins.WooeyPy2Mixin, models.Model), ), migrations.AddField( model_name='scriptparameter', name='parser', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptParser'), preserve_default=False, ), ]
Add in shim nullable FK for initial scriptparameter update
Add in shim nullable FK for initial scriptparameter update
Python
bsd-3-clause
wooey/Wooey,wooey/Wooey,wooey/Wooey,wooey/Wooey
# -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2017-04-25 09:25 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wooey.models.mixins class Migration(migrations.Migration): dependencies = [ ('wooey', '0027_parameter_order'), ] operations = [ migrations.CreateModel( name='ScriptParser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=255, default='')), ('script_version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptVersion')), ], bases=(wooey.models.mixins.WooeyPy2Mixin, models.Model), ), migrations.AddField( model_name='scriptparameter', name='parser', field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptParser'), preserve_default=False, ), ] Add in shim nullable FK for initial scriptparameter update
# -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2017-04-25 09:25 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wooey.models.mixins class Migration(migrations.Migration): dependencies = [ ('wooey', '0027_parameter_order'), ] operations = [ migrations.CreateModel( name='ScriptParser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=255, default='')), ('script_version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptVersion')), ], bases=(wooey.models.mixins.WooeyPy2Mixin, models.Model), ), migrations.AddField( model_name='scriptparameter', name='parser', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptParser'), preserve_default=False, ), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2017-04-25 09:25 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wooey.models.mixins class Migration(migrations.Migration): dependencies = [ ('wooey', '0027_parameter_order'), ] operations = [ migrations.CreateModel( name='ScriptParser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=255, default='')), ('script_version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptVersion')), ], bases=(wooey.models.mixins.WooeyPy2Mixin, models.Model), ), migrations.AddField( model_name='scriptparameter', name='parser', field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptParser'), preserve_default=False, ), ] <commit_msg>Add in shim nullable FK for initial scriptparameter update<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2017-04-25 09:25 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wooey.models.mixins class Migration(migrations.Migration): dependencies = [ ('wooey', '0027_parameter_order'), ] operations = [ migrations.CreateModel( name='ScriptParser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=255, default='')), ('script_version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptVersion')), ], bases=(wooey.models.mixins.WooeyPy2Mixin, models.Model), ), migrations.AddField( model_name='scriptparameter', name='parser', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptParser'), preserve_default=False, ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2017-04-25 09:25 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wooey.models.mixins class Migration(migrations.Migration): dependencies = [ ('wooey', '0027_parameter_order'), ] operations = [ migrations.CreateModel( name='ScriptParser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=255, default='')), ('script_version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptVersion')), ], bases=(wooey.models.mixins.WooeyPy2Mixin, models.Model), ), migrations.AddField( model_name='scriptparameter', name='parser', field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptParser'), preserve_default=False, ), ] Add in shim nullable FK for initial scriptparameter update# -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2017-04-25 09:25 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wooey.models.mixins class Migration(migrations.Migration): dependencies = [ ('wooey', '0027_parameter_order'), ] operations = [ migrations.CreateModel( name='ScriptParser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=255, default='')), ('script_version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptVersion')), ], bases=(wooey.models.mixins.WooeyPy2Mixin, models.Model), ), migrations.AddField( model_name='scriptparameter', name='parser', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptParser'), preserve_default=False, ), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2017-04-25 09:25 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wooey.models.mixins class Migration(migrations.Migration): dependencies = [ ('wooey', '0027_parameter_order'), ] operations = [ migrations.CreateModel( name='ScriptParser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=255, default='')), ('script_version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptVersion')), ], bases=(wooey.models.mixins.WooeyPy2Mixin, models.Model), ), migrations.AddField( model_name='scriptparameter', name='parser', field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptParser'), preserve_default=False, ), ] <commit_msg>Add in shim nullable FK for initial scriptparameter update<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2017-04-25 09:25 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wooey.models.mixins class Migration(migrations.Migration): dependencies = [ ('wooey', '0027_parameter_order'), ] operations = [ migrations.CreateModel( name='ScriptParser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=255, default='')), ('script_version', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptVersion')), ], bases=(wooey.models.mixins.WooeyPy2Mixin, models.Model), ), migrations.AddField( model_name='scriptparameter', name='parser', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='wooey.ScriptParser'), preserve_default=False, ), ]
255d4d551b9eb576c787396f67ca165addf99dbe
cropimg/thumbnail_processors.py
cropimg/thumbnail_processors.py
try: from PIL import Image except ImportError: import Image def crop_ci_box(im, size, ci_box=False, **kwargs): """ Crop image based on very specific pixel values (x,y,width,height) ci_box Crop the source image to exactly match the requested box """ if not ci_box: return im assert isinstance(ci_box, basestring) if ci_box.count(",") != 3: raise ValueError("ci_box must contain exactly 4 values x,y,width,height") x, y, width, height = [int(i) for i in ci_box.split(",")] # Handle one-dimensional targets. im = im.convert("RGBA").crop((x, y, x + width, y + height)) return im
try: from PIL import Image except ImportError: import Image def crop_box(im, size, ci_box=False, **kwargs): """ Crop image based on very specific pixel values (x,y,width,height) ci_box Crop the source image to exactly match the requested box """ if not ci_box: return im assert isinstance(ci_box, basestring) if ci_box.count(",") != 3: raise ValueError("ci_box must contain exactly 4 values x,y,width,height") x, y, width, height = [int(i) for i in ci_box.split(",")] # Handle one-dimensional targets. im = im.convert("RGBA").crop((x, y, x + width, y + height)) return im
Rename the processor back to its original name, crop_box
Rename the processor back to its original name, crop_box
Python
mit
rewardz/cropimg-django,rewardz/cropimg-django,rewardz/cropimg-django
try: from PIL import Image except ImportError: import Image def crop_ci_box(im, size, ci_box=False, **kwargs): """ Crop image based on very specific pixel values (x,y,width,height) ci_box Crop the source image to exactly match the requested box """ if not ci_box: return im assert isinstance(ci_box, basestring) if ci_box.count(",") != 3: raise ValueError("ci_box must contain exactly 4 values x,y,width,height") x, y, width, height = [int(i) for i in ci_box.split(",")] # Handle one-dimensional targets. im = im.convert("RGBA").crop((x, y, x + width, y + height)) return im Rename the processor back to its original name, crop_box
try: from PIL import Image except ImportError: import Image def crop_box(im, size, ci_box=False, **kwargs): """ Crop image based on very specific pixel values (x,y,width,height) ci_box Crop the source image to exactly match the requested box """ if not ci_box: return im assert isinstance(ci_box, basestring) if ci_box.count(",") != 3: raise ValueError("ci_box must contain exactly 4 values x,y,width,height") x, y, width, height = [int(i) for i in ci_box.split(",")] # Handle one-dimensional targets. im = im.convert("RGBA").crop((x, y, x + width, y + height)) return im
<commit_before>try: from PIL import Image except ImportError: import Image def crop_ci_box(im, size, ci_box=False, **kwargs): """ Crop image based on very specific pixel values (x,y,width,height) ci_box Crop the source image to exactly match the requested box """ if not ci_box: return im assert isinstance(ci_box, basestring) if ci_box.count(",") != 3: raise ValueError("ci_box must contain exactly 4 values x,y,width,height") x, y, width, height = [int(i) for i in ci_box.split(",")] # Handle one-dimensional targets. im = im.convert("RGBA").crop((x, y, x + width, y + height)) return im <commit_msg>Rename the processor back to its original name, crop_box<commit_after>
try: from PIL import Image except ImportError: import Image def crop_box(im, size, ci_box=False, **kwargs): """ Crop image based on very specific pixel values (x,y,width,height) ci_box Crop the source image to exactly match the requested box """ if not ci_box: return im assert isinstance(ci_box, basestring) if ci_box.count(",") != 3: raise ValueError("ci_box must contain exactly 4 values x,y,width,height") x, y, width, height = [int(i) for i in ci_box.split(",")] # Handle one-dimensional targets. im = im.convert("RGBA").crop((x, y, x + width, y + height)) return im
try: from PIL import Image except ImportError: import Image def crop_ci_box(im, size, ci_box=False, **kwargs): """ Crop image based on very specific pixel values (x,y,width,height) ci_box Crop the source image to exactly match the requested box """ if not ci_box: return im assert isinstance(ci_box, basestring) if ci_box.count(",") != 3: raise ValueError("ci_box must contain exactly 4 values x,y,width,height") x, y, width, height = [int(i) for i in ci_box.split(",")] # Handle one-dimensional targets. im = im.convert("RGBA").crop((x, y, x + width, y + height)) return im Rename the processor back to its original name, crop_boxtry: from PIL import Image except ImportError: import Image def crop_box(im, size, ci_box=False, **kwargs): """ Crop image based on very specific pixel values (x,y,width,height) ci_box Crop the source image to exactly match the requested box """ if not ci_box: return im assert isinstance(ci_box, basestring) if ci_box.count(",") != 3: raise ValueError("ci_box must contain exactly 4 values x,y,width,height") x, y, width, height = [int(i) for i in ci_box.split(",")] # Handle one-dimensional targets. im = im.convert("RGBA").crop((x, y, x + width, y + height)) return im
<commit_before>try: from PIL import Image except ImportError: import Image def crop_ci_box(im, size, ci_box=False, **kwargs): """ Crop image based on very specific pixel values (x,y,width,height) ci_box Crop the source image to exactly match the requested box """ if not ci_box: return im assert isinstance(ci_box, basestring) if ci_box.count(",") != 3: raise ValueError("ci_box must contain exactly 4 values x,y,width,height") x, y, width, height = [int(i) for i in ci_box.split(",")] # Handle one-dimensional targets. im = im.convert("RGBA").crop((x, y, x + width, y + height)) return im <commit_msg>Rename the processor back to its original name, crop_box<commit_after>try: from PIL import Image except ImportError: import Image def crop_box(im, size, ci_box=False, **kwargs): """ Crop image based on very specific pixel values (x,y,width,height) ci_box Crop the source image to exactly match the requested box """ if not ci_box: return im assert isinstance(ci_box, basestring) if ci_box.count(",") != 3: raise ValueError("ci_box must contain exactly 4 values x,y,width,height") x, y, width, height = [int(i) for i in ci_box.split(",")] # Handle one-dimensional targets. im = im.convert("RGBA").crop((x, y, x + width, y + height)) return im
2033c71a84f03e7e8d40c567e632afd2e013aad3
url/__init__.py
url/__init__.py
#!/usr/bin/env python # # Copyright (c) 2012-2013 SEOmoz, Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''This is a module for dealing with urls. In particular, sanitizing them.''' import sys from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
#!/usr/bin/env python # # Copyright (c) 2012-2013 SEOmoz, Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''This is a module for dealing with urls. In particular, sanitizing them.''' from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
Drop unused import of sys.
Drop unused import of sys.
Python
mit
seomoz/url-py,seomoz/url-py
#!/usr/bin/env python # # Copyright (c) 2012-2013 SEOmoz, Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''This is a module for dealing with urls. In particular, sanitizing them.''' import sys from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding) Drop unused import of sys.
#!/usr/bin/env python # # Copyright (c) 2012-2013 SEOmoz, Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''This is a module for dealing with urls. In particular, sanitizing them.''' from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
<commit_before>#!/usr/bin/env python # # Copyright (c) 2012-2013 SEOmoz, Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''This is a module for dealing with urls. In particular, sanitizing them.''' import sys from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding) <commit_msg>Drop unused import of sys.<commit_after>
#!/usr/bin/env python # # Copyright (c) 2012-2013 SEOmoz, Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''This is a module for dealing with urls. In particular, sanitizing them.''' from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
#!/usr/bin/env python # # Copyright (c) 2012-2013 SEOmoz, Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''This is a module for dealing with urls. In particular, sanitizing them.''' import sys from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding) Drop unused import of sys.#!/usr/bin/env python # # Copyright (c) 2012-2013 SEOmoz, Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''This is a module for dealing with urls. In particular, sanitizing them.''' from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
<commit_before>#!/usr/bin/env python # # Copyright (c) 2012-2013 SEOmoz, Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''This is a module for dealing with urls. In particular, sanitizing them.''' import sys from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding) <commit_msg>Drop unused import of sys.<commit_after>#!/usr/bin/env python # # Copyright (c) 2012-2013 SEOmoz, Inc. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''This is a module for dealing with urls. In particular, sanitizing them.''' from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
a10e21a8fe811e896998ba510255592a966f0782
infra/recipes/build_windows.py
infra/recipes/build_windows.py
# Copyright 2022 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine.post_process import Filter PYTHON_VERSION_COMPATIBILITY = "PY3" DEPS = [ "crosvm", "recipe_engine/buildbucket", "recipe_engine/context", "recipe_engine/properties", "recipe_engine/step", ] def RunSteps(api): # Note: The recipe does work on linux as well, if the required dependencies have been installed # on the host via ./tools/install-deps. # This allows the build to be tested via `./recipe.py run build_windows` with api.crosvm.host_build_context(): api.step( "Build crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", "--build-only", ], ) api.step( "Run crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", ], ) def GenTests(api): filter_steps = Filter("Build crosvm tests", "Run crosvm tests") yield ( api.test( "build", api.buildbucket.ci_build(project="crosvm/crosvm"), ) + api.post_process(filter_steps) )
# Copyright 2022 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine.post_process import Filter PYTHON_VERSION_COMPATIBILITY = "PY3" DEPS = [ "crosvm", "recipe_engine/buildbucket", "recipe_engine/context", "recipe_engine/properties", "recipe_engine/step", ] def RunSteps(api): # Note: The recipe does work on linux as well, if the required dependencies have been installed # on the host via ./tools/install-deps. # This allows the build to be tested via `./recipe.py run build_windows` with api.crosvm.host_build_context(): api.step( "Build crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", "--build-only", ], ) api.step( "Run crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", ], ) api.step( "Clippy windows crosvm", [ "vpython3", "./tools/clippy", ], ) def GenTests(api): filter_steps = Filter("Build crosvm tests", "Run crosvm tests") yield ( api.test( "build", api.buildbucket.ci_build(project="crosvm/crosvm"), ) + api.post_process(filter_steps) )
Enable clippy in windows LUCI
crosvm: Enable clippy in windows LUCI For linux based systems, clippy continues to run in health_check BUG=b:257249038 TEST=CQ Change-Id: I39d3d45a0db72c61e79fd2c51b195b82c067a244 Reviewed-on: https://chromium-review.googlesource.com/c/crosvm/crosvm/+/3993934 Reviewed-by: Dennis Kempin <cd09796fb571bec2782819dbfd33307f65b1c778@google.com> Commit-Queue: Vikram Auradkar <50abc1d85bedfd527fc7dee23f4819d1f0d946ec@google.com>
Python
bsd-3-clause
google/crosvm,google/crosvm,google/crosvm,google/crosvm
# Copyright 2022 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine.post_process import Filter PYTHON_VERSION_COMPATIBILITY = "PY3" DEPS = [ "crosvm", "recipe_engine/buildbucket", "recipe_engine/context", "recipe_engine/properties", "recipe_engine/step", ] def RunSteps(api): # Note: The recipe does work on linux as well, if the required dependencies have been installed # on the host via ./tools/install-deps. # This allows the build to be tested via `./recipe.py run build_windows` with api.crosvm.host_build_context(): api.step( "Build crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", "--build-only", ], ) api.step( "Run crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", ], ) def GenTests(api): filter_steps = Filter("Build crosvm tests", "Run crosvm tests") yield ( api.test( "build", api.buildbucket.ci_build(project="crosvm/crosvm"), ) + api.post_process(filter_steps) ) crosvm: Enable clippy in windows LUCI For linux based systems, clippy continues to run in health_check BUG=b:257249038 TEST=CQ Change-Id: I39d3d45a0db72c61e79fd2c51b195b82c067a244 Reviewed-on: https://chromium-review.googlesource.com/c/crosvm/crosvm/+/3993934 Reviewed-by: Dennis Kempin <cd09796fb571bec2782819dbfd33307f65b1c778@google.com> Commit-Queue: Vikram Auradkar <50abc1d85bedfd527fc7dee23f4819d1f0d946ec@google.com>
# Copyright 2022 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine.post_process import Filter PYTHON_VERSION_COMPATIBILITY = "PY3" DEPS = [ "crosvm", "recipe_engine/buildbucket", "recipe_engine/context", "recipe_engine/properties", "recipe_engine/step", ] def RunSteps(api): # Note: The recipe does work on linux as well, if the required dependencies have been installed # on the host via ./tools/install-deps. # This allows the build to be tested via `./recipe.py run build_windows` with api.crosvm.host_build_context(): api.step( "Build crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", "--build-only", ], ) api.step( "Run crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", ], ) api.step( "Clippy windows crosvm", [ "vpython3", "./tools/clippy", ], ) def GenTests(api): filter_steps = Filter("Build crosvm tests", "Run crosvm tests") yield ( api.test( "build", api.buildbucket.ci_build(project="crosvm/crosvm"), ) + api.post_process(filter_steps) )
<commit_before># Copyright 2022 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine.post_process import Filter PYTHON_VERSION_COMPATIBILITY = "PY3" DEPS = [ "crosvm", "recipe_engine/buildbucket", "recipe_engine/context", "recipe_engine/properties", "recipe_engine/step", ] def RunSteps(api): # Note: The recipe does work on linux as well, if the required dependencies have been installed # on the host via ./tools/install-deps. # This allows the build to be tested via `./recipe.py run build_windows` with api.crosvm.host_build_context(): api.step( "Build crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", "--build-only", ], ) api.step( "Run crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", ], ) def GenTests(api): filter_steps = Filter("Build crosvm tests", "Run crosvm tests") yield ( api.test( "build", api.buildbucket.ci_build(project="crosvm/crosvm"), ) + api.post_process(filter_steps) ) <commit_msg>crosvm: Enable clippy in windows LUCI For linux based systems, clippy continues to run in health_check BUG=b:257249038 TEST=CQ Change-Id: I39d3d45a0db72c61e79fd2c51b195b82c067a244 Reviewed-on: https://chromium-review.googlesource.com/c/crosvm/crosvm/+/3993934 Reviewed-by: Dennis Kempin <cd09796fb571bec2782819dbfd33307f65b1c778@google.com> Commit-Queue: Vikram Auradkar <50abc1d85bedfd527fc7dee23f4819d1f0d946ec@google.com><commit_after>
# Copyright 2022 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine.post_process import Filter PYTHON_VERSION_COMPATIBILITY = "PY3" DEPS = [ "crosvm", "recipe_engine/buildbucket", "recipe_engine/context", "recipe_engine/properties", "recipe_engine/step", ] def RunSteps(api): # Note: The recipe does work on linux as well, if the required dependencies have been installed # on the host via ./tools/install-deps. # This allows the build to be tested via `./recipe.py run build_windows` with api.crosvm.host_build_context(): api.step( "Build crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", "--build-only", ], ) api.step( "Run crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", ], ) api.step( "Clippy windows crosvm", [ "vpython3", "./tools/clippy", ], ) def GenTests(api): filter_steps = Filter("Build crosvm tests", "Run crosvm tests") yield ( api.test( "build", api.buildbucket.ci_build(project="crosvm/crosvm"), ) + api.post_process(filter_steps) )
# Copyright 2022 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine.post_process import Filter PYTHON_VERSION_COMPATIBILITY = "PY3" DEPS = [ "crosvm", "recipe_engine/buildbucket", "recipe_engine/context", "recipe_engine/properties", "recipe_engine/step", ] def RunSteps(api): # Note: The recipe does work on linux as well, if the required dependencies have been installed # on the host via ./tools/install-deps. # This allows the build to be tested via `./recipe.py run build_windows` with api.crosvm.host_build_context(): api.step( "Build crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", "--build-only", ], ) api.step( "Run crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", ], ) def GenTests(api): filter_steps = Filter("Build crosvm tests", "Run crosvm tests") yield ( api.test( "build", api.buildbucket.ci_build(project="crosvm/crosvm"), ) + api.post_process(filter_steps) ) crosvm: Enable clippy in windows LUCI For linux based systems, clippy continues to run in health_check BUG=b:257249038 TEST=CQ Change-Id: I39d3d45a0db72c61e79fd2c51b195b82c067a244 Reviewed-on: https://chromium-review.googlesource.com/c/crosvm/crosvm/+/3993934 Reviewed-by: Dennis Kempin <cd09796fb571bec2782819dbfd33307f65b1c778@google.com> Commit-Queue: Vikram Auradkar <50abc1d85bedfd527fc7dee23f4819d1f0d946ec@google.com># Copyright 2022 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine.post_process import Filter PYTHON_VERSION_COMPATIBILITY = "PY3" DEPS = [ "crosvm", "recipe_engine/buildbucket", "recipe_engine/context", "recipe_engine/properties", "recipe_engine/step", ] def RunSteps(api): # Note: The recipe does work on linux as well, if the required dependencies have been installed # on the host via ./tools/install-deps. # This allows the build to be tested via `./recipe.py run build_windows` with api.crosvm.host_build_context(): api.step( "Build crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", "--build-only", ], ) api.step( "Run crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", ], ) api.step( "Clippy windows crosvm", [ "vpython3", "./tools/clippy", ], ) def GenTests(api): filter_steps = Filter("Build crosvm tests", "Run crosvm tests") yield ( api.test( "build", api.buildbucket.ci_build(project="crosvm/crosvm"), ) + api.post_process(filter_steps) )
<commit_before># Copyright 2022 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine.post_process import Filter PYTHON_VERSION_COMPATIBILITY = "PY3" DEPS = [ "crosvm", "recipe_engine/buildbucket", "recipe_engine/context", "recipe_engine/properties", "recipe_engine/step", ] def RunSteps(api): # Note: The recipe does work on linux as well, if the required dependencies have been installed # on the host via ./tools/install-deps. # This allows the build to be tested via `./recipe.py run build_windows` with api.crosvm.host_build_context(): api.step( "Build crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", "--build-only", ], ) api.step( "Run crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", ], ) def GenTests(api): filter_steps = Filter("Build crosvm tests", "Run crosvm tests") yield ( api.test( "build", api.buildbucket.ci_build(project="crosvm/crosvm"), ) + api.post_process(filter_steps) ) <commit_msg>crosvm: Enable clippy in windows LUCI For linux based systems, clippy continues to run in health_check BUG=b:257249038 TEST=CQ Change-Id: I39d3d45a0db72c61e79fd2c51b195b82c067a244 Reviewed-on: https://chromium-review.googlesource.com/c/crosvm/crosvm/+/3993934 Reviewed-by: Dennis Kempin <cd09796fb571bec2782819dbfd33307f65b1c778@google.com> Commit-Queue: Vikram Auradkar <50abc1d85bedfd527fc7dee23f4819d1f0d946ec@google.com><commit_after># Copyright 2022 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine.post_process import Filter PYTHON_VERSION_COMPATIBILITY = "PY3" DEPS = [ "crosvm", "recipe_engine/buildbucket", "recipe_engine/context", "recipe_engine/properties", "recipe_engine/step", ] def RunSteps(api): # Note: The recipe does work on linux as well, if the required dependencies have been installed # on the host via ./tools/install-deps. # This allows the build to be tested via `./recipe.py run build_windows` with api.crosvm.host_build_context(): api.step( "Build crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", "--build-only", ], ) api.step( "Run crosvm tests", [ "vpython3", "./tools/run_tests", "--verbose", ], ) api.step( "Clippy windows crosvm", [ "vpython3", "./tools/clippy", ], ) def GenTests(api): filter_steps = Filter("Build crosvm tests", "Run crosvm tests") yield ( api.test( "build", api.buildbucket.ci_build(project="crosvm/crosvm"), ) + api.post_process(filter_steps) )
fbdf73e3e9fb5f2801ec11637caa6020095acfdf
terrabot/packets/packetE.py
terrabot/packets/packetE.py
from terrabot.util.streamer import Streamer from terrabot.events.events import Events class PacketEParser(object): def parse(self, world, player, data, ev_man): #If player is active if data[2] == 1: #Raise event with player_id ev_man.raise_event(Events.NewPlayer, data[1])
from terrabot.util.streamer import Streamer from terrabot.events.events import Events class PacketEParser(object): def parse(self, world, player, data, ev_man): #If player is active if data[2] == 1 and player.logged_in: #Raise event with player_id ev_man.raise_event(Events.NewPlayer, data[1])
Fix with 'newPlayer' event triggering on load
Fix with 'newPlayer' event triggering on load
Python
mit
flammified/terrabot
from terrabot.util.streamer import Streamer from terrabot.events.events import Events class PacketEParser(object): def parse(self, world, player, data, ev_man): #If player is active if data[2] == 1: #Raise event with player_id ev_man.raise_event(Events.NewPlayer, data[1]) Fix with 'newPlayer' event triggering on load
from terrabot.util.streamer import Streamer from terrabot.events.events import Events class PacketEParser(object): def parse(self, world, player, data, ev_man): #If player is active if data[2] == 1 and player.logged_in: #Raise event with player_id ev_man.raise_event(Events.NewPlayer, data[1])
<commit_before>from terrabot.util.streamer import Streamer from terrabot.events.events import Events class PacketEParser(object): def parse(self, world, player, data, ev_man): #If player is active if data[2] == 1: #Raise event with player_id ev_man.raise_event(Events.NewPlayer, data[1]) <commit_msg>Fix with 'newPlayer' event triggering on load<commit_after>
from terrabot.util.streamer import Streamer from terrabot.events.events import Events class PacketEParser(object): def parse(self, world, player, data, ev_man): #If player is active if data[2] == 1 and player.logged_in: #Raise event with player_id ev_man.raise_event(Events.NewPlayer, data[1])
from terrabot.util.streamer import Streamer from terrabot.events.events import Events class PacketEParser(object): def parse(self, world, player, data, ev_man): #If player is active if data[2] == 1: #Raise event with player_id ev_man.raise_event(Events.NewPlayer, data[1]) Fix with 'newPlayer' event triggering on loadfrom terrabot.util.streamer import Streamer from terrabot.events.events import Events class PacketEParser(object): def parse(self, world, player, data, ev_man): #If player is active if data[2] == 1 and player.logged_in: #Raise event with player_id ev_man.raise_event(Events.NewPlayer, data[1])
<commit_before>from terrabot.util.streamer import Streamer from terrabot.events.events import Events class PacketEParser(object): def parse(self, world, player, data, ev_man): #If player is active if data[2] == 1: #Raise event with player_id ev_man.raise_event(Events.NewPlayer, data[1]) <commit_msg>Fix with 'newPlayer' event triggering on load<commit_after>from terrabot.util.streamer import Streamer from terrabot.events.events import Events class PacketEParser(object): def parse(self, world, player, data, ev_man): #If player is active if data[2] == 1 and player.logged_in: #Raise event with player_id ev_man.raise_event(Events.NewPlayer, data[1])
088295e7d4d81b5e2fb23564b49ddd08beb3f720
chatterbot/__init__.py
chatterbot/__init__.py
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.4' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.5' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
Update package version to 0.5.5
Update package version to 0.5.5
Python
bsd-3-clause
Reinaesaya/OUIRL-ChatBot,Gustavo6046/ChatterBot,gunthercox/ChatterBot,davizucon/ChatterBot,Reinaesaya/OUIRL-ChatBot,maclogan/VirtualPenPal,vkosuri/ChatterBot
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.4' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' Update package version to 0.5.5
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.5' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
<commit_before>""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.4' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' <commit_msg>Update package version to 0.5.5<commit_after>
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.5' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.4' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' Update package version to 0.5.5""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.5' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
<commit_before>""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.4' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' <commit_msg>Update package version to 0.5.5<commit_after>""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.5' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
39ec7961bb3c4431bda67956f0208ab524a80213
blo/DBControl.py
blo/DBControl.py
import sqlite3 class DBControl: def __init__(self, db_name : str=":memory:"): self.db_conn = sqlite3.connect(db_name) def create_tables(self): c = self.db_conn.cursor() c.execute("""CREATE TABLE IF NOT EXISTS Articles ( id INTEGER PRIMARY KEY AUTOINCREMENT, text TEXT, digest TEXT UNIQUE, updatedate TEXT)""") c.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )") self.db_conn.commit() def close_connect(self): self.db_conn.close() self.db_conn = None def _select_all(self, table_name : str) -> list: c = self.db_conn.cursor() c.execute("SELECT * FROM " + table_name) return c.fetchall()
import sqlite3 from .BloArticle import BloArticle class DBControl: def __init__(self, db_name : str=":memory:"): self.db_conn = sqlite3.connect(db_name) def create_tables(self): c = self.db_conn.cursor() c.execute("""CREATE TABLE IF NOT EXISTS Articles ( id INTEGER PRIMARY KEY AUTOINCREMENT, text TEXT, digest TEXT UNIQUE, updatedate TEXT)""") c.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )") self.db_conn.commit() def close_connect(self): self.db_conn.close() self.db_conn = None def insert_article(self, article: BloArticle): assert(article is not None) # if has not text data then no action on this method. if article.has_text : c = self.db_conn.cursor() # TODO: Implement insert query for Article and Article_fts c.execute("") self.db_conn.commit() def _select_all(self, table_name : str) -> list: c = self.db_conn.cursor() c.execute("SELECT * FROM " + table_name) return c.fetchall()
Implement stab of insert article information method.
Implement stab of insert article information method.
Python
mit
10nin/blo,10nin/blo
import sqlite3 class DBControl: def __init__(self, db_name : str=":memory:"): self.db_conn = sqlite3.connect(db_name) def create_tables(self): c = self.db_conn.cursor() c.execute("""CREATE TABLE IF NOT EXISTS Articles ( id INTEGER PRIMARY KEY AUTOINCREMENT, text TEXT, digest TEXT UNIQUE, updatedate TEXT)""") c.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )") self.db_conn.commit() def close_connect(self): self.db_conn.close() self.db_conn = None def _select_all(self, table_name : str) -> list: c = self.db_conn.cursor() c.execute("SELECT * FROM " + table_name) return c.fetchall() Implement stab of insert article information method.
import sqlite3 from .BloArticle import BloArticle class DBControl: def __init__(self, db_name : str=":memory:"): self.db_conn = sqlite3.connect(db_name) def create_tables(self): c = self.db_conn.cursor() c.execute("""CREATE TABLE IF NOT EXISTS Articles ( id INTEGER PRIMARY KEY AUTOINCREMENT, text TEXT, digest TEXT UNIQUE, updatedate TEXT)""") c.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )") self.db_conn.commit() def close_connect(self): self.db_conn.close() self.db_conn = None def insert_article(self, article: BloArticle): assert(article is not None) # if has not text data then no action on this method. if article.has_text : c = self.db_conn.cursor() # TODO: Implement insert query for Article and Article_fts c.execute("") self.db_conn.commit() def _select_all(self, table_name : str) -> list: c = self.db_conn.cursor() c.execute("SELECT * FROM " + table_name) return c.fetchall()
<commit_before>import sqlite3 class DBControl: def __init__(self, db_name : str=":memory:"): self.db_conn = sqlite3.connect(db_name) def create_tables(self): c = self.db_conn.cursor() c.execute("""CREATE TABLE IF NOT EXISTS Articles ( id INTEGER PRIMARY KEY AUTOINCREMENT, text TEXT, digest TEXT UNIQUE, updatedate TEXT)""") c.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )") self.db_conn.commit() def close_connect(self): self.db_conn.close() self.db_conn = None def _select_all(self, table_name : str) -> list: c = self.db_conn.cursor() c.execute("SELECT * FROM " + table_name) return c.fetchall() <commit_msg>Implement stab of insert article information method.<commit_after>
import sqlite3 from .BloArticle import BloArticle class DBControl: def __init__(self, db_name : str=":memory:"): self.db_conn = sqlite3.connect(db_name) def create_tables(self): c = self.db_conn.cursor() c.execute("""CREATE TABLE IF NOT EXISTS Articles ( id INTEGER PRIMARY KEY AUTOINCREMENT, text TEXT, digest TEXT UNIQUE, updatedate TEXT)""") c.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )") self.db_conn.commit() def close_connect(self): self.db_conn.close() self.db_conn = None def insert_article(self, article: BloArticle): assert(article is not None) # if has not text data then no action on this method. if article.has_text : c = self.db_conn.cursor() # TODO: Implement insert query for Article and Article_fts c.execute("") self.db_conn.commit() def _select_all(self, table_name : str) -> list: c = self.db_conn.cursor() c.execute("SELECT * FROM " + table_name) return c.fetchall()
import sqlite3 class DBControl: def __init__(self, db_name : str=":memory:"): self.db_conn = sqlite3.connect(db_name) def create_tables(self): c = self.db_conn.cursor() c.execute("""CREATE TABLE IF NOT EXISTS Articles ( id INTEGER PRIMARY KEY AUTOINCREMENT, text TEXT, digest TEXT UNIQUE, updatedate TEXT)""") c.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )") self.db_conn.commit() def close_connect(self): self.db_conn.close() self.db_conn = None def _select_all(self, table_name : str) -> list: c = self.db_conn.cursor() c.execute("SELECT * FROM " + table_name) return c.fetchall() Implement stab of insert article information method.import sqlite3 from .BloArticle import BloArticle class DBControl: def __init__(self, db_name : str=":memory:"): self.db_conn = sqlite3.connect(db_name) def create_tables(self): c = self.db_conn.cursor() c.execute("""CREATE TABLE IF NOT EXISTS Articles ( id INTEGER PRIMARY KEY AUTOINCREMENT, text TEXT, digest TEXT UNIQUE, updatedate TEXT)""") c.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )") self.db_conn.commit() def close_connect(self): self.db_conn.close() self.db_conn = None def insert_article(self, article: BloArticle): assert(article is not None) # if has not text data then no action on this method. if article.has_text : c = self.db_conn.cursor() # TODO: Implement insert query for Article and Article_fts c.execute("") self.db_conn.commit() def _select_all(self, table_name : str) -> list: c = self.db_conn.cursor() c.execute("SELECT * FROM " + table_name) return c.fetchall()
<commit_before>import sqlite3 class DBControl: def __init__(self, db_name : str=":memory:"): self.db_conn = sqlite3.connect(db_name) def create_tables(self): c = self.db_conn.cursor() c.execute("""CREATE TABLE IF NOT EXISTS Articles ( id INTEGER PRIMARY KEY AUTOINCREMENT, text TEXT, digest TEXT UNIQUE, updatedate TEXT)""") c.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )") self.db_conn.commit() def close_connect(self): self.db_conn.close() self.db_conn = None def _select_all(self, table_name : str) -> list: c = self.db_conn.cursor() c.execute("SELECT * FROM " + table_name) return c.fetchall() <commit_msg>Implement stab of insert article information method.<commit_after>import sqlite3 from .BloArticle import BloArticle class DBControl: def __init__(self, db_name : str=":memory:"): self.db_conn = sqlite3.connect(db_name) def create_tables(self): c = self.db_conn.cursor() c.execute("""CREATE TABLE IF NOT EXISTS Articles ( id INTEGER PRIMARY KEY AUTOINCREMENT, text TEXT, digest TEXT UNIQUE, updatedate TEXT)""") c.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )") self.db_conn.commit() def close_connect(self): self.db_conn.close() self.db_conn = None def insert_article(self, article: BloArticle): assert(article is not None) # if has not text data then no action on this method. if article.has_text : c = self.db_conn.cursor() # TODO: Implement insert query for Article and Article_fts c.execute("") self.db_conn.commit() def _select_all(self, table_name : str) -> list: c = self.db_conn.cursor() c.execute("SELECT * FROM " + table_name) return c.fetchall()
5dcb2564653e4b38359ca6f3e55195839d32ae67
tests/test_cmd.py
tests/test_cmd.py
import unittest from unittest import mock from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=str(result)) @mock.patch('flask.Flask.run', create=True, return_value=True) def test_start(self, new_app_run_func): result = self.cli_run('start') self.assertEqual(0, result.exit_code, msg=str(result))
import unittest from unittest import mock from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=str(result)) @mock.patch('flask.Flask.run', create=True, return_value=True) def test_start(self, new_app_run_func): result = self.cli_run('start', '--env', 'test') self.assertEqual(0, result.exit_code, msg=str(result))
Set test for start cmd to test env
Set test for start cmd to test env
Python
agpl-3.0
SCUEvals/scuevals-api,SCUEvals/scuevals-api
import unittest from unittest import mock from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=str(result)) @mock.patch('flask.Flask.run', create=True, return_value=True) def test_start(self, new_app_run_func): result = self.cli_run('start') self.assertEqual(0, result.exit_code, msg=str(result)) Set test for start cmd to test env
import unittest from unittest import mock from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=str(result)) @mock.patch('flask.Flask.run', create=True, return_value=True) def test_start(self, new_app_run_func): result = self.cli_run('start', '--env', 'test') self.assertEqual(0, result.exit_code, msg=str(result))
<commit_before>import unittest from unittest import mock from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=str(result)) @mock.patch('flask.Flask.run', create=True, return_value=True) def test_start(self, new_app_run_func): result = self.cli_run('start') self.assertEqual(0, result.exit_code, msg=str(result)) <commit_msg>Set test for start cmd to test env<commit_after>
import unittest from unittest import mock from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=str(result)) @mock.patch('flask.Flask.run', create=True, return_value=True) def test_start(self, new_app_run_func): result = self.cli_run('start', '--env', 'test') self.assertEqual(0, result.exit_code, msg=str(result))
import unittest from unittest import mock from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=str(result)) @mock.patch('flask.Flask.run', create=True, return_value=True) def test_start(self, new_app_run_func): result = self.cli_run('start') self.assertEqual(0, result.exit_code, msg=str(result)) Set test for start cmd to test envimport unittest from unittest import mock from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=str(result)) @mock.patch('flask.Flask.run', create=True, return_value=True) def test_start(self, new_app_run_func): result = self.cli_run('start', '--env', 'test') self.assertEqual(0, result.exit_code, msg=str(result))
<commit_before>import unittest from unittest import mock from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=str(result)) @mock.patch('flask.Flask.run', create=True, return_value=True) def test_start(self, new_app_run_func): result = self.cli_run('start') self.assertEqual(0, result.exit_code, msg=str(result)) <commit_msg>Set test for start cmd to test env<commit_after>import unittest from unittest import mock from click.testing import CliRunner from scuevals_api.cmd import cli class CmdsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.runner = CliRunner() def cli_run(self, *cmds): return self.runner.invoke(cli, cmds) cls.cli_run = cli_run def test_initdb(self): result = self.cli_run('initdb') self.assertEqual(0, result.exit_code, msg=str(result)) @mock.patch('flask.Flask.run', create=True, return_value=True) def test_start(self, new_app_run_func): result = self.cli_run('start', '--env', 'test') self.assertEqual(0, result.exit_code, msg=str(result))
0ccb85a45e56438a5e4b7c0566634587624f0ce4
tests/test_log.py
tests/test_log.py
# -*- coding: utf-8 -*- from flask import url_for def test_view_build_log(test_client): test_client.get(url_for('log.build_log', sha='123456')) def test_view_lint_log(test_client): test_client.get(url_for('log.lint_log', sha='123456'))
# -*- coding: utf-8 -*- from flask import url_for def test_view_build_log(test_client): test_client.get(url_for('log.build_log', sha='123456'))
Remove lint_log view test case
Remove lint_log view test case
Python
mit
bosondata/badwolf,bosondata/badwolf,bosondata/badwolf
# -*- coding: utf-8 -*- from flask import url_for def test_view_build_log(test_client): test_client.get(url_for('log.build_log', sha='123456')) def test_view_lint_log(test_client): test_client.get(url_for('log.lint_log', sha='123456')) Remove lint_log view test case
# -*- coding: utf-8 -*- from flask import url_for def test_view_build_log(test_client): test_client.get(url_for('log.build_log', sha='123456'))
<commit_before># -*- coding: utf-8 -*- from flask import url_for def test_view_build_log(test_client): test_client.get(url_for('log.build_log', sha='123456')) def test_view_lint_log(test_client): test_client.get(url_for('log.lint_log', sha='123456')) <commit_msg>Remove lint_log view test case<commit_after>
# -*- coding: utf-8 -*- from flask import url_for def test_view_build_log(test_client): test_client.get(url_for('log.build_log', sha='123456'))
# -*- coding: utf-8 -*- from flask import url_for def test_view_build_log(test_client): test_client.get(url_for('log.build_log', sha='123456')) def test_view_lint_log(test_client): test_client.get(url_for('log.lint_log', sha='123456')) Remove lint_log view test case# -*- coding: utf-8 -*- from flask import url_for def test_view_build_log(test_client): test_client.get(url_for('log.build_log', sha='123456'))
<commit_before># -*- coding: utf-8 -*- from flask import url_for def test_view_build_log(test_client): test_client.get(url_for('log.build_log', sha='123456')) def test_view_lint_log(test_client): test_client.get(url_for('log.lint_log', sha='123456')) <commit_msg>Remove lint_log view test case<commit_after># -*- coding: utf-8 -*- from flask import url_for def test_view_build_log(test_client): test_client.get(url_for('log.build_log', sha='123456'))
aa008a13d9d10107f440dca71085f21d9622cd95
src/pybel/parser/baseparser.py
src/pybel/parser/baseparser.py
# -*- coding: utf-8 -*- import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line): """Parses a string with the language represented by this parser :param line: A string representing an instance of this parser's language :type line: str """ return self.language.parseString(line) def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t)
# -*- coding: utf-8 -*- import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language #: The parser can hold an internal state of the current line self.line_number = None if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line, line_number=None): """Parses a string with the language represented by this parser :param str line: A string representing an instance of this parser's language :param int line_number: The current line number of the parser """ if line_number is None: return self.language.parseString(line) self.line_number = line_number result = self.language.parseString(line) self.line_number = None return result def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t)
Add line state to base parser
Add line state to base parser References #155
Python
mit
pybel/pybel,pybel/pybel,pybel/pybel
# -*- coding: utf-8 -*- import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line): """Parses a string with the language represented by this parser :param line: A string representing an instance of this parser's language :type line: str """ return self.language.parseString(line) def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t) Add line state to base parser References #155
# -*- coding: utf-8 -*- import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language #: The parser can hold an internal state of the current line self.line_number = None if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line, line_number=None): """Parses a string with the language represented by this parser :param str line: A string representing an instance of this parser's language :param int line_number: The current line number of the parser """ if line_number is None: return self.language.parseString(line) self.line_number = line_number result = self.language.parseString(line) self.line_number = None return result def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t)
<commit_before># -*- coding: utf-8 -*- import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line): """Parses a string with the language represented by this parser :param line: A string representing an instance of this parser's language :type line: str """ return self.language.parseString(line) def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t) <commit_msg>Add line state to base parser References #155<commit_after>
# -*- coding: utf-8 -*- import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language #: The parser can hold an internal state of the current line self.line_number = None if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line, line_number=None): """Parses a string with the language represented by this parser :param str line: A string representing an instance of this parser's language :param int line_number: The current line number of the parser """ if line_number is None: return self.language.parseString(line) self.line_number = line_number result = self.language.parseString(line) self.line_number = None return result def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t)
# -*- coding: utf-8 -*- import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line): """Parses a string with the language represented by this parser :param line: A string representing an instance of this parser's language :type line: str """ return self.language.parseString(line) def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t) Add line state to base parser References #155# -*- coding: utf-8 -*- import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language #: The parser can hold an internal state of the current line self.line_number = None if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line, line_number=None): """Parses a string with the language represented by this parser :param str line: A string representing an instance of this parser's language :param int line_number: The current line number of the parser """ if line_number is None: return self.language.parseString(line) self.line_number = line_number result = self.language.parseString(line) self.line_number = None return result def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t)
<commit_before># -*- coding: utf-8 -*- import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line): """Parses a string with the language represented by this parser :param line: A string representing an instance of this parser's language :type line: str """ return self.language.parseString(line) def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t) <commit_msg>Add line state to base parser References #155<commit_after># -*- coding: utf-8 -*- import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language #: The parser can hold an internal state of the current line self.line_number = None if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line, line_number=None): """Parses a string with the language represented by this parser :param str line: A string representing an instance of this parser's language :param int line_number: The current line number of the parser """ if line_number is None: return self.language.parseString(line) self.line_number = line_number result = self.language.parseString(line) self.line_number = None return result def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t)
0edc91468c5f424a57be80675422723f9bac4a89
falmer/auth/admin.py
falmer/auth/admin.py
from django.contrib import admin from django.contrib.admin import register from . import models @register(models.FalmerUser) class FalmerUserModelAdmin(admin.ModelAdmin): list_display = ('name_or_email', 'identifier', 'authority') list_filter = ('authority', ) search_fields = ('name', 'identifier')
from django.contrib import admin from django.contrib.auth.admin import UserAdmin # @register(models.FalmerUser) # class FalmerUserModelAdmin(admin.ModelAdmin): # list_display = ('name_or_email', 'identifier', 'authority') # list_filter = ('authority', ) # search_fields = ('name', 'identifier') from falmer.auth import models class FalmerUserAdmin(UserAdmin): ordering = ('name', 'identifier') list_display = ('name_or_email', 'identifier', 'authority') fieldsets = ( (None, {'fields': ('identifier', 'authority')}), ('Personal info', {'fields': ('name',)}), ('Permissions', { 'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions'), }) ) admin.site.register(models.FalmerUser, FalmerUserAdmin)
Replace FalmerUserAdmin with extention of base
Replace FalmerUserAdmin with extention of base
Python
mit
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
from django.contrib import admin from django.contrib.admin import register from . import models @register(models.FalmerUser) class FalmerUserModelAdmin(admin.ModelAdmin): list_display = ('name_or_email', 'identifier', 'authority') list_filter = ('authority', ) search_fields = ('name', 'identifier') Replace FalmerUserAdmin with extention of base
from django.contrib import admin from django.contrib.auth.admin import UserAdmin # @register(models.FalmerUser) # class FalmerUserModelAdmin(admin.ModelAdmin): # list_display = ('name_or_email', 'identifier', 'authority') # list_filter = ('authority', ) # search_fields = ('name', 'identifier') from falmer.auth import models class FalmerUserAdmin(UserAdmin): ordering = ('name', 'identifier') list_display = ('name_or_email', 'identifier', 'authority') fieldsets = ( (None, {'fields': ('identifier', 'authority')}), ('Personal info', {'fields': ('name',)}), ('Permissions', { 'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions'), }) ) admin.site.register(models.FalmerUser, FalmerUserAdmin)
<commit_before>from django.contrib import admin from django.contrib.admin import register from . import models @register(models.FalmerUser) class FalmerUserModelAdmin(admin.ModelAdmin): list_display = ('name_or_email', 'identifier', 'authority') list_filter = ('authority', ) search_fields = ('name', 'identifier') <commit_msg>Replace FalmerUserAdmin with extention of base<commit_after>
from django.contrib import admin from django.contrib.auth.admin import UserAdmin # @register(models.FalmerUser) # class FalmerUserModelAdmin(admin.ModelAdmin): # list_display = ('name_or_email', 'identifier', 'authority') # list_filter = ('authority', ) # search_fields = ('name', 'identifier') from falmer.auth import models class FalmerUserAdmin(UserAdmin): ordering = ('name', 'identifier') list_display = ('name_or_email', 'identifier', 'authority') fieldsets = ( (None, {'fields': ('identifier', 'authority')}), ('Personal info', {'fields': ('name',)}), ('Permissions', { 'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions'), }) ) admin.site.register(models.FalmerUser, FalmerUserAdmin)
from django.contrib import admin from django.contrib.admin import register from . import models @register(models.FalmerUser) class FalmerUserModelAdmin(admin.ModelAdmin): list_display = ('name_or_email', 'identifier', 'authority') list_filter = ('authority', ) search_fields = ('name', 'identifier') Replace FalmerUserAdmin with extention of basefrom django.contrib import admin from django.contrib.auth.admin import UserAdmin # @register(models.FalmerUser) # class FalmerUserModelAdmin(admin.ModelAdmin): # list_display = ('name_or_email', 'identifier', 'authority') # list_filter = ('authority', ) # search_fields = ('name', 'identifier') from falmer.auth import models class FalmerUserAdmin(UserAdmin): ordering = ('name', 'identifier') list_display = ('name_or_email', 'identifier', 'authority') fieldsets = ( (None, {'fields': ('identifier', 'authority')}), ('Personal info', {'fields': ('name',)}), ('Permissions', { 'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions'), }) ) admin.site.register(models.FalmerUser, FalmerUserAdmin)
<commit_before>from django.contrib import admin from django.contrib.admin import register from . import models @register(models.FalmerUser) class FalmerUserModelAdmin(admin.ModelAdmin): list_display = ('name_or_email', 'identifier', 'authority') list_filter = ('authority', ) search_fields = ('name', 'identifier') <commit_msg>Replace FalmerUserAdmin with extention of base<commit_after>from django.contrib import admin from django.contrib.auth.admin import UserAdmin # @register(models.FalmerUser) # class FalmerUserModelAdmin(admin.ModelAdmin): # list_display = ('name_or_email', 'identifier', 'authority') # list_filter = ('authority', ) # search_fields = ('name', 'identifier') from falmer.auth import models class FalmerUserAdmin(UserAdmin): ordering = ('name', 'identifier') list_display = ('name_or_email', 'identifier', 'authority') fieldsets = ( (None, {'fields': ('identifier', 'authority')}), ('Personal info', {'fields': ('name',)}), ('Permissions', { 'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions'), }) ) admin.site.register(models.FalmerUser, FalmerUserAdmin)
ea54f0a306c6defa4edc58c50794da0083ed345d
setup_app.py
setup_app.py
import os from flask_app.database import init_db # Generate new secret key secret_key = os.urandom(24).encode('hex').strip() with open('flask_app/secret_key.py', 'w') as key_file: key_file.write('secret_key = """' + secret_key + '""".decode("hex")') # Initialize database init_db()
import os from flask_app.database import init_db # Generate new secret key key_file_path = 'flask_app/secret_key.py' if not os.path.isfile(key_file_path): secret_key = os.urandom(24).encode('hex').strip() with open(key_file_path, 'w') as key_file: key_file.write('secret_key = """' + secret_key + '""".decode("hex")') # Initialize database init_db()
Check if keyfile exists before generating new key
Check if keyfile exists before generating new key
Python
mit
szeestraten/kidsakoder-minecraft,szeestraten/kidsakoder-minecraft,szeestraten/kidsakoder-minecraft,szeestraten/kidsakoder-minecraft
import os from flask_app.database import init_db # Generate new secret key secret_key = os.urandom(24).encode('hex').strip() with open('flask_app/secret_key.py', 'w') as key_file: key_file.write('secret_key = """' + secret_key + '""".decode("hex")') # Initialize database init_db() Check if keyfile exists before generating new key
import os from flask_app.database import init_db # Generate new secret key key_file_path = 'flask_app/secret_key.py' if not os.path.isfile(key_file_path): secret_key = os.urandom(24).encode('hex').strip() with open(key_file_path, 'w') as key_file: key_file.write('secret_key = """' + secret_key + '""".decode("hex")') # Initialize database init_db()
<commit_before> import os from flask_app.database import init_db # Generate new secret key secret_key = os.urandom(24).encode('hex').strip() with open('flask_app/secret_key.py', 'w') as key_file: key_file.write('secret_key = """' + secret_key + '""".decode("hex")') # Initialize database init_db() <commit_msg>Check if keyfile exists before generating new key<commit_after>
import os from flask_app.database import init_db # Generate new secret key key_file_path = 'flask_app/secret_key.py' if not os.path.isfile(key_file_path): secret_key = os.urandom(24).encode('hex').strip() with open(key_file_path, 'w') as key_file: key_file.write('secret_key = """' + secret_key + '""".decode("hex")') # Initialize database init_db()
import os from flask_app.database import init_db # Generate new secret key secret_key = os.urandom(24).encode('hex').strip() with open('flask_app/secret_key.py', 'w') as key_file: key_file.write('secret_key = """' + secret_key + '""".decode("hex")') # Initialize database init_db() Check if keyfile exists before generating new key import os from flask_app.database import init_db # Generate new secret key key_file_path = 'flask_app/secret_key.py' if not os.path.isfile(key_file_path): secret_key = os.urandom(24).encode('hex').strip() with open(key_file_path, 'w') as key_file: key_file.write('secret_key = """' + secret_key + '""".decode("hex")') # Initialize database init_db()
<commit_before> import os from flask_app.database import init_db # Generate new secret key secret_key = os.urandom(24).encode('hex').strip() with open('flask_app/secret_key.py', 'w') as key_file: key_file.write('secret_key = """' + secret_key + '""".decode("hex")') # Initialize database init_db() <commit_msg>Check if keyfile exists before generating new key<commit_after> import os from flask_app.database import init_db # Generate new secret key key_file_path = 'flask_app/secret_key.py' if not os.path.isfile(key_file_path): secret_key = os.urandom(24).encode('hex').strip() with open(key_file_path, 'w') as key_file: key_file.write('secret_key = """' + secret_key + '""".decode("hex")') # Initialize database init_db()
0451d8e1ee2ad136e3b0f69c43dca9658fdbb85c
16B/spw_setup.py
16B/spw_setup.py
# Line SPW setup for 16B projects linespw_dict = {0: ["HI", "1.420405752GHz"], 3: ["OH1612", "1.612231GHz"], 5: ["OH1665", "1.6654018GHz"], 6: ["OH1667", "1.667359GHz"], 7: ["OH1720", "1.72053GHz"], 9: ["H152alp", "1.85425GHz"], 8: ["H153alp", "1.81825GHz"], 1: ["H166alp", "1.42473GHz"], 4: ["H158alp", "1.65154GHz"], 2: ["H164alp", "1.47734GHz"]}
# Line SPW setup for 16B projects linespw_dict = {0: ["HI", "1.420405752GHz", 4096], 3: ["OH1612", "1.612231GHz", 256], 5: ["OH1665", "1.6654018GHz", 256], 6: ["OH1667", "1.667359GHz", 256], 7: ["OH1720", "1.72053GHz", 256], 9: ["H152alp", "1.85425GHz", 128], 8: ["H153alp", "1.81825GHz", 128], 1: ["H166alp", "1.42473GHz", 128], 4: ["H158alp", "1.65154GHz", 128], 2: ["H164alp", "1.47734GHz", 128]}
Add channel numbers to 16B spectral setup
Add channel numbers to 16B spectral setup
Python
mit
e-koch/VLA_Lband,e-koch/VLA_Lband
# Line SPW setup for 16B projects linespw_dict = {0: ["HI", "1.420405752GHz"], 3: ["OH1612", "1.612231GHz"], 5: ["OH1665", "1.6654018GHz"], 6: ["OH1667", "1.667359GHz"], 7: ["OH1720", "1.72053GHz"], 9: ["H152alp", "1.85425GHz"], 8: ["H153alp", "1.81825GHz"], 1: ["H166alp", "1.42473GHz"], 4: ["H158alp", "1.65154GHz"], 2: ["H164alp", "1.47734GHz"]} Add channel numbers to 16B spectral setup
# Line SPW setup for 16B projects linespw_dict = {0: ["HI", "1.420405752GHz", 4096], 3: ["OH1612", "1.612231GHz", 256], 5: ["OH1665", "1.6654018GHz", 256], 6: ["OH1667", "1.667359GHz", 256], 7: ["OH1720", "1.72053GHz", 256], 9: ["H152alp", "1.85425GHz", 128], 8: ["H153alp", "1.81825GHz", 128], 1: ["H166alp", "1.42473GHz", 128], 4: ["H158alp", "1.65154GHz", 128], 2: ["H164alp", "1.47734GHz", 128]}
<commit_before> # Line SPW setup for 16B projects linespw_dict = {0: ["HI", "1.420405752GHz"], 3: ["OH1612", "1.612231GHz"], 5: ["OH1665", "1.6654018GHz"], 6: ["OH1667", "1.667359GHz"], 7: ["OH1720", "1.72053GHz"], 9: ["H152alp", "1.85425GHz"], 8: ["H153alp", "1.81825GHz"], 1: ["H166alp", "1.42473GHz"], 4: ["H158alp", "1.65154GHz"], 2: ["H164alp", "1.47734GHz"]} <commit_msg>Add channel numbers to 16B spectral setup<commit_after>
# Line SPW setup for 16B projects linespw_dict = {0: ["HI", "1.420405752GHz", 4096], 3: ["OH1612", "1.612231GHz", 256], 5: ["OH1665", "1.6654018GHz", 256], 6: ["OH1667", "1.667359GHz", 256], 7: ["OH1720", "1.72053GHz", 256], 9: ["H152alp", "1.85425GHz", 128], 8: ["H153alp", "1.81825GHz", 128], 1: ["H166alp", "1.42473GHz", 128], 4: ["H158alp", "1.65154GHz", 128], 2: ["H164alp", "1.47734GHz", 128]}
# Line SPW setup for 16B projects linespw_dict = {0: ["HI", "1.420405752GHz"], 3: ["OH1612", "1.612231GHz"], 5: ["OH1665", "1.6654018GHz"], 6: ["OH1667", "1.667359GHz"], 7: ["OH1720", "1.72053GHz"], 9: ["H152alp", "1.85425GHz"], 8: ["H153alp", "1.81825GHz"], 1: ["H166alp", "1.42473GHz"], 4: ["H158alp", "1.65154GHz"], 2: ["H164alp", "1.47734GHz"]} Add channel numbers to 16B spectral setup # Line SPW setup for 16B projects linespw_dict = {0: ["HI", "1.420405752GHz", 4096], 3: ["OH1612", "1.612231GHz", 256], 5: ["OH1665", "1.6654018GHz", 256], 6: ["OH1667", "1.667359GHz", 256], 7: ["OH1720", "1.72053GHz", 256], 9: ["H152alp", "1.85425GHz", 128], 8: ["H153alp", "1.81825GHz", 128], 1: ["H166alp", "1.42473GHz", 128], 4: ["H158alp", "1.65154GHz", 128], 2: ["H164alp", "1.47734GHz", 128]}
<commit_before> # Line SPW setup for 16B projects linespw_dict = {0: ["HI", "1.420405752GHz"], 3: ["OH1612", "1.612231GHz"], 5: ["OH1665", "1.6654018GHz"], 6: ["OH1667", "1.667359GHz"], 7: ["OH1720", "1.72053GHz"], 9: ["H152alp", "1.85425GHz"], 8: ["H153alp", "1.81825GHz"], 1: ["H166alp", "1.42473GHz"], 4: ["H158alp", "1.65154GHz"], 2: ["H164alp", "1.47734GHz"]} <commit_msg>Add channel numbers to 16B spectral setup<commit_after> # Line SPW setup for 16B projects linespw_dict = {0: ["HI", "1.420405752GHz", 4096], 3: ["OH1612", "1.612231GHz", 256], 5: ["OH1665", "1.6654018GHz", 256], 6: ["OH1667", "1.667359GHz", 256], 7: ["OH1720", "1.72053GHz", 256], 9: ["H152alp", "1.85425GHz", 128], 8: ["H153alp", "1.81825GHz", 128], 1: ["H166alp", "1.42473GHz", 128], 4: ["H158alp", "1.65154GHz", 128], 2: ["H164alp", "1.47734GHz", 128]}
e4b516f612d60eac8cb278d1c8675b3fdbad8652
windmill/server/__init__.py
windmill/server/__init__.py
# Copyright (c) 2006-2007 Open Source Applications Foundation # Copyright (c) 2008-2009 Mikeal Rogers <mikeal.rogers@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wsgi, convergence forwarding_conditions = [lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url']] def add_forward_condition(condition): forwarding_conditions.append(condition) def remove_forward_condition(condition): while condition in forwarding_conditions: forwarding_conditions.remove(condition)
# Copyright (c) 2006-2007 Open Source Applications Foundation # Copyright (c) 2008-2009 Mikeal Rogers <mikeal.rogers@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wsgi, convergence forwarding_conditions = [ lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url'], lambda e : 'mozilla.org/en-US/firefox/livebookmarks.html' not in e['reconstructed_url'], ] def add_forward_condition(condition): forwarding_conditions.append(condition) def remove_forward_condition(condition): while condition in forwarding_conditions: forwarding_conditions.remove(condition)
Fix for [ticket:281]. Do not forward livebookmarks request.
Fix for [ticket:281]. Do not forward livebookmarks request. git-svn-id: 87d19257dd11500985d055ec4730e446075a5f07@1261 78c7df6f-8922-0410-bcd3-9426b1ad491b
Python
apache-2.0
ept/windmill,ept/windmill,ept/windmill
# Copyright (c) 2006-2007 Open Source Applications Foundation # Copyright (c) 2008-2009 Mikeal Rogers <mikeal.rogers@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wsgi, convergence forwarding_conditions = [lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url']] def add_forward_condition(condition): forwarding_conditions.append(condition) def remove_forward_condition(condition): while condition in forwarding_conditions: forwarding_conditions.remove(condition) Fix for [ticket:281]. Do not forward livebookmarks request. git-svn-id: 87d19257dd11500985d055ec4730e446075a5f07@1261 78c7df6f-8922-0410-bcd3-9426b1ad491b
# Copyright (c) 2006-2007 Open Source Applications Foundation # Copyright (c) 2008-2009 Mikeal Rogers <mikeal.rogers@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wsgi, convergence forwarding_conditions = [ lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url'], lambda e : 'mozilla.org/en-US/firefox/livebookmarks.html' not in e['reconstructed_url'], ] def add_forward_condition(condition): forwarding_conditions.append(condition) def remove_forward_condition(condition): while condition in forwarding_conditions: forwarding_conditions.remove(condition)
<commit_before># Copyright (c) 2006-2007 Open Source Applications Foundation # Copyright (c) 2008-2009 Mikeal Rogers <mikeal.rogers@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wsgi, convergence forwarding_conditions = [lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url']] def add_forward_condition(condition): forwarding_conditions.append(condition) def remove_forward_condition(condition): while condition in forwarding_conditions: forwarding_conditions.remove(condition) <commit_msg>Fix for [ticket:281]. Do not forward livebookmarks request. git-svn-id: 87d19257dd11500985d055ec4730e446075a5f07@1261 78c7df6f-8922-0410-bcd3-9426b1ad491b<commit_after>
# Copyright (c) 2006-2007 Open Source Applications Foundation # Copyright (c) 2008-2009 Mikeal Rogers <mikeal.rogers@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wsgi, convergence forwarding_conditions = [ lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url'], lambda e : 'mozilla.org/en-US/firefox/livebookmarks.html' not in e['reconstructed_url'], ] def add_forward_condition(condition): forwarding_conditions.append(condition) def remove_forward_condition(condition): while condition in forwarding_conditions: forwarding_conditions.remove(condition)
# Copyright (c) 2006-2007 Open Source Applications Foundation # Copyright (c) 2008-2009 Mikeal Rogers <mikeal.rogers@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wsgi, convergence forwarding_conditions = [lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url']] def add_forward_condition(condition): forwarding_conditions.append(condition) def remove_forward_condition(condition): while condition in forwarding_conditions: forwarding_conditions.remove(condition) Fix for [ticket:281]. Do not forward livebookmarks request. git-svn-id: 87d19257dd11500985d055ec4730e446075a5f07@1261 78c7df6f-8922-0410-bcd3-9426b1ad491b# Copyright (c) 2006-2007 Open Source Applications Foundation # Copyright (c) 2008-2009 Mikeal Rogers <mikeal.rogers@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wsgi, convergence forwarding_conditions = [ lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url'], lambda e : 'mozilla.org/en-US/firefox/livebookmarks.html' not in e['reconstructed_url'], ] def add_forward_condition(condition): forwarding_conditions.append(condition) def remove_forward_condition(condition): while condition in forwarding_conditions: forwarding_conditions.remove(condition)
<commit_before># Copyright (c) 2006-2007 Open Source Applications Foundation # Copyright (c) 2008-2009 Mikeal Rogers <mikeal.rogers@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wsgi, convergence forwarding_conditions = [lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url']] def add_forward_condition(condition): forwarding_conditions.append(condition) def remove_forward_condition(condition): while condition in forwarding_conditions: forwarding_conditions.remove(condition) <commit_msg>Fix for [ticket:281]. Do not forward livebookmarks request. git-svn-id: 87d19257dd11500985d055ec4730e446075a5f07@1261 78c7df6f-8922-0410-bcd3-9426b1ad491b<commit_after># Copyright (c) 2006-2007 Open Source Applications Foundation # Copyright (c) 2008-2009 Mikeal Rogers <mikeal.rogers@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wsgi, convergence forwarding_conditions = [ lambda e : 'google.com/safebrowsing/downloads' not in e['reconstructed_url'], lambda e : 'mozilla.org/en-US/firefox/livebookmarks.html' not in e['reconstructed_url'], ] def add_forward_condition(condition): forwarding_conditions.append(condition) def remove_forward_condition(condition): while condition in forwarding_conditions: forwarding_conditions.remove(condition)
1a77599911f26cf660e9d11693ab95aef38d44b7
first_django/urls.py
first_django/urls.py
"""first_django URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from my_app import views as my_app_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^add/$', my_app_views.add, name='add'), url(r'^index/$', my_app_views.index, name='index'), url(r'^$', my_app_views.func) ]
"""first_django URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from my_app import views as my_app_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^add/$', my_app_views.add, name='add'), url(r'^index/$', my_app_views.index, name='index'), url(r'^$', my_app_views.func) ]
Test use vcs plugin to commit.
Test use vcs plugin to commit.
Python
apache-2.0
wmh-demos/django-first-demo,wmh-demos/django-first-demo
"""first_django URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from my_app import views as my_app_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^add/$', my_app_views.add, name='add'), url(r'^index/$', my_app_views.index, name='index'), url(r'^$', my_app_views.func) ] Test use vcs plugin to commit.
"""first_django URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from my_app import views as my_app_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^add/$', my_app_views.add, name='add'), url(r'^index/$', my_app_views.index, name='index'), url(r'^$', my_app_views.func) ]
<commit_before>"""first_django URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from my_app import views as my_app_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^add/$', my_app_views.add, name='add'), url(r'^index/$', my_app_views.index, name='index'), url(r'^$', my_app_views.func) ] <commit_msg>Test use vcs plugin to commit.<commit_after>
"""first_django URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from my_app import views as my_app_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^add/$', my_app_views.add, name='add'), url(r'^index/$', my_app_views.index, name='index'), url(r'^$', my_app_views.func) ]
"""first_django URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from my_app import views as my_app_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^add/$', my_app_views.add, name='add'), url(r'^index/$', my_app_views.index, name='index'), url(r'^$', my_app_views.func) ] Test use vcs plugin to commit."""first_django URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from my_app import views as my_app_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^add/$', my_app_views.add, name='add'), url(r'^index/$', my_app_views.index, name='index'), url(r'^$', my_app_views.func) ]
<commit_before>"""first_django URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from my_app import views as my_app_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^add/$', my_app_views.add, name='add'), url(r'^index/$', my_app_views.index, name='index'), url(r'^$', my_app_views.func) ] <commit_msg>Test use vcs plugin to commit.<commit_after>"""first_django URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include, url from django.contrib import admin from my_app import views as my_app_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^add/$', my_app_views.add, name='add'), url(r'^index/$', my_app_views.index, name='index'), url(r'^$', my_app_views.func) ]
6c891692c5595f4cf9822bee6b42a33f141af5ed
fmn/consumer/util.py
fmn/consumer/util.py
import fedora.client import logging log = logging.getLogger("fmn") def new_packager(topic, msg): """ Returns a username if the message is about a new packager in FAS. """ if '.fas.group.member.sponsor' in topic: group = msg['msg']['group'] if group == 'packager': return msg['msg']['user'] return None def new_badges_user(topic, msg): """ Returns a username if the message is about a new fedbadges user. """ if '.fedbadges.person.login.first' in topic: return msg['msg']['user']['username'] return None def get_fas_email(config, username): """ Return FAS email associated with a username. We use this to try and get the right email for new autocreated users. We used to just use $USERNAME@fp.o, but when first created most users don't have that alias available yet. """ try: fas = fedora.client.AccountSystem(**config['fas_credentials']) person = fas.person_by_username(username) if person.email: return person.email raise ValueError("No email found: %r, %r" % (person.email, username)) except Exception: log.exception("Failed to get FAS email for %r" % username) return '%s@fedoraproject.org' % username
import fedora.client import logging log = logging.getLogger("fmn") def new_packager(topic, msg): """ Returns a username if the message is about a new packager in FAS. """ if '.fas.group.member.sponsor' in topic: group = msg['msg']['group'] if group == 'packager': return msg['msg']['user'] return None def new_badges_user(topic, msg): """ Returns a username if the message is about a new fedbadges user. """ if '.fedbadges.person.login.first' in topic: return msg['msg']['user']['username'] return None def get_fas_email(config, username): """ Return FAS email associated with a username. We use this to try and get the right email for new autocreated users. We used to just use $USERNAME@fp.o, but when first created most users don't have that alias available yet. """ try: fas = fedora.client.AccountSystem(**config['fas_credentials']) person = fas.person_by_username(username) if person.get('email'): return person['email'] raise ValueError("No email found: %r, %r" % (person.email, username)) except Exception: log.exception("Failed to get FAS email for %r" % username) return '%s@fedoraproject.org' % username
Use dict interface to bunch.
Use dict interface to bunch. I'm not sure why, but we got this error on the server:: Traceback (most recent call last): File "fmn/consumer/util.py", line 33, in get_fas_email if person.email: AttributeError: 'dict' object has no attribute 'email' This should fix that.
Python
lgpl-2.1
jeremycline/fmn,jeremycline/fmn,jeremycline/fmn
import fedora.client import logging log = logging.getLogger("fmn") def new_packager(topic, msg): """ Returns a username if the message is about a new packager in FAS. """ if '.fas.group.member.sponsor' in topic: group = msg['msg']['group'] if group == 'packager': return msg['msg']['user'] return None def new_badges_user(topic, msg): """ Returns a username if the message is about a new fedbadges user. """ if '.fedbadges.person.login.first' in topic: return msg['msg']['user']['username'] return None def get_fas_email(config, username): """ Return FAS email associated with a username. We use this to try and get the right email for new autocreated users. We used to just use $USERNAME@fp.o, but when first created most users don't have that alias available yet. """ try: fas = fedora.client.AccountSystem(**config['fas_credentials']) person = fas.person_by_username(username) if person.email: return person.email raise ValueError("No email found: %r, %r" % (person.email, username)) except Exception: log.exception("Failed to get FAS email for %r" % username) return '%s@fedoraproject.org' % username Use dict interface to bunch. I'm not sure why, but we got this error on the server:: Traceback (most recent call last): File "fmn/consumer/util.py", line 33, in get_fas_email if person.email: AttributeError: 'dict' object has no attribute 'email' This should fix that.
import fedora.client import logging log = logging.getLogger("fmn") def new_packager(topic, msg): """ Returns a username if the message is about a new packager in FAS. """ if '.fas.group.member.sponsor' in topic: group = msg['msg']['group'] if group == 'packager': return msg['msg']['user'] return None def new_badges_user(topic, msg): """ Returns a username if the message is about a new fedbadges user. """ if '.fedbadges.person.login.first' in topic: return msg['msg']['user']['username'] return None def get_fas_email(config, username): """ Return FAS email associated with a username. We use this to try and get the right email for new autocreated users. We used to just use $USERNAME@fp.o, but when first created most users don't have that alias available yet. """ try: fas = fedora.client.AccountSystem(**config['fas_credentials']) person = fas.person_by_username(username) if person.get('email'): return person['email'] raise ValueError("No email found: %r, %r" % (person.email, username)) except Exception: log.exception("Failed to get FAS email for %r" % username) return '%s@fedoraproject.org' % username
<commit_before>import fedora.client import logging log = logging.getLogger("fmn") def new_packager(topic, msg): """ Returns a username if the message is about a new packager in FAS. """ if '.fas.group.member.sponsor' in topic: group = msg['msg']['group'] if group == 'packager': return msg['msg']['user'] return None def new_badges_user(topic, msg): """ Returns a username if the message is about a new fedbadges user. """ if '.fedbadges.person.login.first' in topic: return msg['msg']['user']['username'] return None def get_fas_email(config, username): """ Return FAS email associated with a username. We use this to try and get the right email for new autocreated users. We used to just use $USERNAME@fp.o, but when first created most users don't have that alias available yet. """ try: fas = fedora.client.AccountSystem(**config['fas_credentials']) person = fas.person_by_username(username) if person.email: return person.email raise ValueError("No email found: %r, %r" % (person.email, username)) except Exception: log.exception("Failed to get FAS email for %r" % username) return '%s@fedoraproject.org' % username <commit_msg>Use dict interface to bunch. I'm not sure why, but we got this error on the server:: Traceback (most recent call last): File "fmn/consumer/util.py", line 33, in get_fas_email if person.email: AttributeError: 'dict' object has no attribute 'email' This should fix that.<commit_after>
import fedora.client import logging log = logging.getLogger("fmn") def new_packager(topic, msg): """ Returns a username if the message is about a new packager in FAS. """ if '.fas.group.member.sponsor' in topic: group = msg['msg']['group'] if group == 'packager': return msg['msg']['user'] return None def new_badges_user(topic, msg): """ Returns a username if the message is about a new fedbadges user. """ if '.fedbadges.person.login.first' in topic: return msg['msg']['user']['username'] return None def get_fas_email(config, username): """ Return FAS email associated with a username. We use this to try and get the right email for new autocreated users. We used to just use $USERNAME@fp.o, but when first created most users don't have that alias available yet. """ try: fas = fedora.client.AccountSystem(**config['fas_credentials']) person = fas.person_by_username(username) if person.get('email'): return person['email'] raise ValueError("No email found: %r, %r" % (person.email, username)) except Exception: log.exception("Failed to get FAS email for %r" % username) return '%s@fedoraproject.org' % username
import fedora.client import logging log = logging.getLogger("fmn") def new_packager(topic, msg): """ Returns a username if the message is about a new packager in FAS. """ if '.fas.group.member.sponsor' in topic: group = msg['msg']['group'] if group == 'packager': return msg['msg']['user'] return None def new_badges_user(topic, msg): """ Returns a username if the message is about a new fedbadges user. """ if '.fedbadges.person.login.first' in topic: return msg['msg']['user']['username'] return None def get_fas_email(config, username): """ Return FAS email associated with a username. We use this to try and get the right email for new autocreated users. We used to just use $USERNAME@fp.o, but when first created most users don't have that alias available yet. """ try: fas = fedora.client.AccountSystem(**config['fas_credentials']) person = fas.person_by_username(username) if person.email: return person.email raise ValueError("No email found: %r, %r" % (person.email, username)) except Exception: log.exception("Failed to get FAS email for %r" % username) return '%s@fedoraproject.org' % username Use dict interface to bunch. I'm not sure why, but we got this error on the server:: Traceback (most recent call last): File "fmn/consumer/util.py", line 33, in get_fas_email if person.email: AttributeError: 'dict' object has no attribute 'email' This should fix that.import fedora.client import logging log = logging.getLogger("fmn") def new_packager(topic, msg): """ Returns a username if the message is about a new packager in FAS. """ if '.fas.group.member.sponsor' in topic: group = msg['msg']['group'] if group == 'packager': return msg['msg']['user'] return None def new_badges_user(topic, msg): """ Returns a username if the message is about a new fedbadges user. """ if '.fedbadges.person.login.first' in topic: return msg['msg']['user']['username'] return None def get_fas_email(config, username): """ Return FAS email associated with a username. We use this to try and get the right email for new autocreated users. We used to just use $USERNAME@fp.o, but when first created most users don't have that alias available yet. """ try: fas = fedora.client.AccountSystem(**config['fas_credentials']) person = fas.person_by_username(username) if person.get('email'): return person['email'] raise ValueError("No email found: %r, %r" % (person.email, username)) except Exception: log.exception("Failed to get FAS email for %r" % username) return '%s@fedoraproject.org' % username
<commit_before>import fedora.client import logging log = logging.getLogger("fmn") def new_packager(topic, msg): """ Returns a username if the message is about a new packager in FAS. """ if '.fas.group.member.sponsor' in topic: group = msg['msg']['group'] if group == 'packager': return msg['msg']['user'] return None def new_badges_user(topic, msg): """ Returns a username if the message is about a new fedbadges user. """ if '.fedbadges.person.login.first' in topic: return msg['msg']['user']['username'] return None def get_fas_email(config, username): """ Return FAS email associated with a username. We use this to try and get the right email for new autocreated users. We used to just use $USERNAME@fp.o, but when first created most users don't have that alias available yet. """ try: fas = fedora.client.AccountSystem(**config['fas_credentials']) person = fas.person_by_username(username) if person.email: return person.email raise ValueError("No email found: %r, %r" % (person.email, username)) except Exception: log.exception("Failed to get FAS email for %r" % username) return '%s@fedoraproject.org' % username <commit_msg>Use dict interface to bunch. I'm not sure why, but we got this error on the server:: Traceback (most recent call last): File "fmn/consumer/util.py", line 33, in get_fas_email if person.email: AttributeError: 'dict' object has no attribute 'email' This should fix that.<commit_after>import fedora.client import logging log = logging.getLogger("fmn") def new_packager(topic, msg): """ Returns a username if the message is about a new packager in FAS. """ if '.fas.group.member.sponsor' in topic: group = msg['msg']['group'] if group == 'packager': return msg['msg']['user'] return None def new_badges_user(topic, msg): """ Returns a username if the message is about a new fedbadges user. """ if '.fedbadges.person.login.first' in topic: return msg['msg']['user']['username'] return None def get_fas_email(config, username): """ Return FAS email associated with a username. We use this to try and get the right email for new autocreated users. We used to just use $USERNAME@fp.o, but when first created most users don't have that alias available yet. """ try: fas = fedora.client.AccountSystem(**config['fas_credentials']) person = fas.person_by_username(username) if person.get('email'): return person['email'] raise ValueError("No email found: %r, %r" % (person.email, username)) except Exception: log.exception("Failed to get FAS email for %r" % username) return '%s@fedoraproject.org' % username
ef497d6bac0f60dee04e47a8ec742744c1ab9427
poyo/patterns.py
poyo/patterns.py
# -*- coding: utf-8 -*- INDENT = r"(?P<indent>^ *)" VARIABLE = r"(?P<variable>.+):" VALUE = r"(?P<value>((?P<q2>['\"]).*?(?P=q2))|[^#]+?)" NEWLINE = r"$\n" BLANK = r" +" INLINE_COMMENT = r"( +#.*)?" COMMENT = r"^ *#.*" + NEWLINE BLANK_LINE = r"^[ \t]*" + NEWLINE DASHES = r"^---" + NEWLINE SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE NULL = r"\b(null|Null|NULL|~)\b" TRUE = r"\b(true|True|TRUE)\b" FALSE = r"\b(false|False|FALSE)\b" INT = r"[-+]?[0-9]+" FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)" STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
# -*- coding: utf-8 -*- INDENT = r"(?P<indent>^ *)" VARIABLE = r"(?P<variable>.+):" VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)" NEWLINE = r"$\n" BLANK = r" +" INLINE_COMMENT = r"(?: +#.*)?" COMMENT = r"^ *#.*" + NEWLINE BLANK_LINE = r"^[ \t]*" + NEWLINE DASHES = r"^---" + NEWLINE SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE NULL = r"\b(null|Null|NULL|~)\b" TRUE = r"\b(true|True|TRUE)\b" FALSE = r"\b(false|False|FALSE)\b" INT = r"[-+]?[0-9]+" FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)" STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
Update to non-capturing groups in VALUE and INLINE_COMMENT
Update to non-capturing groups in VALUE and INLINE_COMMENT
Python
mit
hackebrot/poyo
# -*- coding: utf-8 -*- INDENT = r"(?P<indent>^ *)" VARIABLE = r"(?P<variable>.+):" VALUE = r"(?P<value>((?P<q2>['\"]).*?(?P=q2))|[^#]+?)" NEWLINE = r"$\n" BLANK = r" +" INLINE_COMMENT = r"( +#.*)?" COMMENT = r"^ *#.*" + NEWLINE BLANK_LINE = r"^[ \t]*" + NEWLINE DASHES = r"^---" + NEWLINE SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE NULL = r"\b(null|Null|NULL|~)\b" TRUE = r"\b(true|True|TRUE)\b" FALSE = r"\b(false|False|FALSE)\b" INT = r"[-+]?[0-9]+" FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)" STR = r"(?P<quotes>['\"]?).*(?P=quotes)" Update to non-capturing groups in VALUE and INLINE_COMMENT
# -*- coding: utf-8 -*- INDENT = r"(?P<indent>^ *)" VARIABLE = r"(?P<variable>.+):" VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)" NEWLINE = r"$\n" BLANK = r" +" INLINE_COMMENT = r"(?: +#.*)?" COMMENT = r"^ *#.*" + NEWLINE BLANK_LINE = r"^[ \t]*" + NEWLINE DASHES = r"^---" + NEWLINE SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE NULL = r"\b(null|Null|NULL|~)\b" TRUE = r"\b(true|True|TRUE)\b" FALSE = r"\b(false|False|FALSE)\b" INT = r"[-+]?[0-9]+" FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)" STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
<commit_before># -*- coding: utf-8 -*- INDENT = r"(?P<indent>^ *)" VARIABLE = r"(?P<variable>.+):" VALUE = r"(?P<value>((?P<q2>['\"]).*?(?P=q2))|[^#]+?)" NEWLINE = r"$\n" BLANK = r" +" INLINE_COMMENT = r"( +#.*)?" COMMENT = r"^ *#.*" + NEWLINE BLANK_LINE = r"^[ \t]*" + NEWLINE DASHES = r"^---" + NEWLINE SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE NULL = r"\b(null|Null|NULL|~)\b" TRUE = r"\b(true|True|TRUE)\b" FALSE = r"\b(false|False|FALSE)\b" INT = r"[-+]?[0-9]+" FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)" STR = r"(?P<quotes>['\"]?).*(?P=quotes)" <commit_msg>Update to non-capturing groups in VALUE and INLINE_COMMENT<commit_after>
# -*- coding: utf-8 -*- INDENT = r"(?P<indent>^ *)" VARIABLE = r"(?P<variable>.+):" VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)" NEWLINE = r"$\n" BLANK = r" +" INLINE_COMMENT = r"(?: +#.*)?" COMMENT = r"^ *#.*" + NEWLINE BLANK_LINE = r"^[ \t]*" + NEWLINE DASHES = r"^---" + NEWLINE SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE NULL = r"\b(null|Null|NULL|~)\b" TRUE = r"\b(true|True|TRUE)\b" FALSE = r"\b(false|False|FALSE)\b" INT = r"[-+]?[0-9]+" FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)" STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
# -*- coding: utf-8 -*- INDENT = r"(?P<indent>^ *)" VARIABLE = r"(?P<variable>.+):" VALUE = r"(?P<value>((?P<q2>['\"]).*?(?P=q2))|[^#]+?)" NEWLINE = r"$\n" BLANK = r" +" INLINE_COMMENT = r"( +#.*)?" COMMENT = r"^ *#.*" + NEWLINE BLANK_LINE = r"^[ \t]*" + NEWLINE DASHES = r"^---" + NEWLINE SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE NULL = r"\b(null|Null|NULL|~)\b" TRUE = r"\b(true|True|TRUE)\b" FALSE = r"\b(false|False|FALSE)\b" INT = r"[-+]?[0-9]+" FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)" STR = r"(?P<quotes>['\"]?).*(?P=quotes)" Update to non-capturing groups in VALUE and INLINE_COMMENT# -*- coding: utf-8 -*- INDENT = r"(?P<indent>^ *)" VARIABLE = r"(?P<variable>.+):" VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)" NEWLINE = r"$\n" BLANK = r" +" INLINE_COMMENT = r"(?: +#.*)?" COMMENT = r"^ *#.*" + NEWLINE BLANK_LINE = r"^[ \t]*" + NEWLINE DASHES = r"^---" + NEWLINE SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE NULL = r"\b(null|Null|NULL|~)\b" TRUE = r"\b(true|True|TRUE)\b" FALSE = r"\b(false|False|FALSE)\b" INT = r"[-+]?[0-9]+" FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)" STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
<commit_before># -*- coding: utf-8 -*- INDENT = r"(?P<indent>^ *)" VARIABLE = r"(?P<variable>.+):" VALUE = r"(?P<value>((?P<q2>['\"]).*?(?P=q2))|[^#]+?)" NEWLINE = r"$\n" BLANK = r" +" INLINE_COMMENT = r"( +#.*)?" COMMENT = r"^ *#.*" + NEWLINE BLANK_LINE = r"^[ \t]*" + NEWLINE DASHES = r"^---" + NEWLINE SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE NULL = r"\b(null|Null|NULL|~)\b" TRUE = r"\b(true|True|TRUE)\b" FALSE = r"\b(false|False|FALSE)\b" INT = r"[-+]?[0-9]+" FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)" STR = r"(?P<quotes>['\"]?).*(?P=quotes)" <commit_msg>Update to non-capturing groups in VALUE and INLINE_COMMENT<commit_after># -*- coding: utf-8 -*- INDENT = r"(?P<indent>^ *)" VARIABLE = r"(?P<variable>.+):" VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)" NEWLINE = r"$\n" BLANK = r" +" INLINE_COMMENT = r"(?: +#.*)?" COMMENT = r"^ *#.*" + NEWLINE BLANK_LINE = r"^[ \t]*" + NEWLINE DASHES = r"^---" + NEWLINE SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE NULL = r"\b(null|Null|NULL|~)\b" TRUE = r"\b(true|True|TRUE)\b" FALSE = r"\b(false|False|FALSE)\b" INT = r"[-+]?[0-9]+" FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)" STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
12eea471fb50c229dff5627e1e97b7ddeceedd18
TWLight/ezproxy/urls.py
TWLight/ezproxy/urls.py
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>([a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>(ezp\.[a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
Use a more precise pattern to id ^R ezproxy url tokens.
Use a more precise pattern to id ^R ezproxy url tokens.
Python
mit
WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>([a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]Use a more precise pattern to id ^R ezproxy url tokens.
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>(ezp\.[a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
<commit_before>from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>([a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]<commit_msg>Use a more precise pattern to id ^R ezproxy url tokens.<commit_after>
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>(ezp\.[a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>([a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]Use a more precise pattern to id ^R ezproxy url tokens.from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>(ezp\.[a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
<commit_before>from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>([a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]<commit_msg>Use a more precise pattern to id ^R ezproxy url tokens.<commit_after>from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>(ezp\.[a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
f1941a96a08545ac83ff15675331939bc2beddcb
server/gauges.py
server/gauges.py
#!/usr/bin/env python3 import os from flask import Flask, jsonify, send_from_directory, abort, Response, request import psycopg2 app = Flask(__name__) conn = psycopg2.connect("dbname='rivers' user='nelson' host='localhost' password='NONE'") #@app.route('/') #def index(): # return send_from_directory('.', 'index.html') @app.route('/gauges/list/<string:xmin>/<string:ymin>/<string:xmax>/<string:ymax>', methods=['GET']) def show_gaugelist(xmin,ymin,xmax,ymax): print('hi') cur = conn.cursor() cur.execute(""" SELECT *, ST_X(the_geom) as lng, ST_Y(the_geom) as lat FROM gageloc WHERE geom @ -- contained by, gets fewer rows -- ONE YOU NEED! ST_MakeEnvelope ( %(xmin)s, %(ymin)s, -- bounding %(xmax)s, %(ymax)s, -- box limits 900913) """, {"xmin":xmin,"ymin":ymin,"xmax":xmax,"ymax":ymax}) print(cur.fetchall()) return Response("hi", mimetype='text') #return Response(outcss, mimetype='text/css') if __name__ == '__main__': app.run(debug=True)
#!/usr/bin/env python3 import os from flask import Flask, jsonify, send_from_directory, abort, Response, request import psycopg2 import psycopg2.extras app = Flask(__name__) conn = psycopg2.connect("dbname='rivers' user='nelson' host='localhost' password='NONE'") #@app.route('/') #def index(): # return send_from_directory('.', 'index.html') @app.route('/gauges/list/<string:xmin>/<string:ymin>/<string:xmax>/<string:ymax>', methods=['GET']) def show_gaugelist(xmin,ymin,xmax,ymax): cur = conn.cursor(cursor_factory = psycopg2.extras.DictCursor) cur.execute(""" SELECT *, ST_X(the_geom) as lng, ST_Y(the_geom) as lat FROM gageloc WHERE geom @ -- contained by, gets fewer rows -- ONE YOU NEED! ST_MakeEnvelope ( %(xmin)s, %(ymin)s, -- bounding %(xmax)s, %(ymax)s, -- box limits 900913) """, {"xmin":xmin,"ymin":ymin,"xmax":xmax,"ymax":ymax}) return jsonify(cur.fetchall()) if __name__ == '__main__': app.run(debug=True)
Return data in dict form
Return data in dict form
Python
bsd-3-clause
r-barnes/waterviz,HydroLogic/waterviz,r-barnes/waterviz,HydroLogic/waterviz,r-barnes/waterviz,HydroLogic/waterviz,r-barnes/waterviz,HydroLogic/waterviz
#!/usr/bin/env python3 import os from flask import Flask, jsonify, send_from_directory, abort, Response, request import psycopg2 app = Flask(__name__) conn = psycopg2.connect("dbname='rivers' user='nelson' host='localhost' password='NONE'") #@app.route('/') #def index(): # return send_from_directory('.', 'index.html') @app.route('/gauges/list/<string:xmin>/<string:ymin>/<string:xmax>/<string:ymax>', methods=['GET']) def show_gaugelist(xmin,ymin,xmax,ymax): print('hi') cur = conn.cursor() cur.execute(""" SELECT *, ST_X(the_geom) as lng, ST_Y(the_geom) as lat FROM gageloc WHERE geom @ -- contained by, gets fewer rows -- ONE YOU NEED! ST_MakeEnvelope ( %(xmin)s, %(ymin)s, -- bounding %(xmax)s, %(ymax)s, -- box limits 900913) """, {"xmin":xmin,"ymin":ymin,"xmax":xmax,"ymax":ymax}) print(cur.fetchall()) return Response("hi", mimetype='text') #return Response(outcss, mimetype='text/css') if __name__ == '__main__': app.run(debug=True) Return data in dict form
#!/usr/bin/env python3 import os from flask import Flask, jsonify, send_from_directory, abort, Response, request import psycopg2 import psycopg2.extras app = Flask(__name__) conn = psycopg2.connect("dbname='rivers' user='nelson' host='localhost' password='NONE'") #@app.route('/') #def index(): # return send_from_directory('.', 'index.html') @app.route('/gauges/list/<string:xmin>/<string:ymin>/<string:xmax>/<string:ymax>', methods=['GET']) def show_gaugelist(xmin,ymin,xmax,ymax): cur = conn.cursor(cursor_factory = psycopg2.extras.DictCursor) cur.execute(""" SELECT *, ST_X(the_geom) as lng, ST_Y(the_geom) as lat FROM gageloc WHERE geom @ -- contained by, gets fewer rows -- ONE YOU NEED! ST_MakeEnvelope ( %(xmin)s, %(ymin)s, -- bounding %(xmax)s, %(ymax)s, -- box limits 900913) """, {"xmin":xmin,"ymin":ymin,"xmax":xmax,"ymax":ymax}) return jsonify(cur.fetchall()) if __name__ == '__main__': app.run(debug=True)
<commit_before>#!/usr/bin/env python3 import os from flask import Flask, jsonify, send_from_directory, abort, Response, request import psycopg2 app = Flask(__name__) conn = psycopg2.connect("dbname='rivers' user='nelson' host='localhost' password='NONE'") #@app.route('/') #def index(): # return send_from_directory('.', 'index.html') @app.route('/gauges/list/<string:xmin>/<string:ymin>/<string:xmax>/<string:ymax>', methods=['GET']) def show_gaugelist(xmin,ymin,xmax,ymax): print('hi') cur = conn.cursor() cur.execute(""" SELECT *, ST_X(the_geom) as lng, ST_Y(the_geom) as lat FROM gageloc WHERE geom @ -- contained by, gets fewer rows -- ONE YOU NEED! ST_MakeEnvelope ( %(xmin)s, %(ymin)s, -- bounding %(xmax)s, %(ymax)s, -- box limits 900913) """, {"xmin":xmin,"ymin":ymin,"xmax":xmax,"ymax":ymax}) print(cur.fetchall()) return Response("hi", mimetype='text') #return Response(outcss, mimetype='text/css') if __name__ == '__main__': app.run(debug=True) <commit_msg>Return data in dict form<commit_after>
#!/usr/bin/env python3 import os from flask import Flask, jsonify, send_from_directory, abort, Response, request import psycopg2 import psycopg2.extras app = Flask(__name__) conn = psycopg2.connect("dbname='rivers' user='nelson' host='localhost' password='NONE'") #@app.route('/') #def index(): # return send_from_directory('.', 'index.html') @app.route('/gauges/list/<string:xmin>/<string:ymin>/<string:xmax>/<string:ymax>', methods=['GET']) def show_gaugelist(xmin,ymin,xmax,ymax): cur = conn.cursor(cursor_factory = psycopg2.extras.DictCursor) cur.execute(""" SELECT *, ST_X(the_geom) as lng, ST_Y(the_geom) as lat FROM gageloc WHERE geom @ -- contained by, gets fewer rows -- ONE YOU NEED! ST_MakeEnvelope ( %(xmin)s, %(ymin)s, -- bounding %(xmax)s, %(ymax)s, -- box limits 900913) """, {"xmin":xmin,"ymin":ymin,"xmax":xmax,"ymax":ymax}) return jsonify(cur.fetchall()) if __name__ == '__main__': app.run(debug=True)
#!/usr/bin/env python3 import os from flask import Flask, jsonify, send_from_directory, abort, Response, request import psycopg2 app = Flask(__name__) conn = psycopg2.connect("dbname='rivers' user='nelson' host='localhost' password='NONE'") #@app.route('/') #def index(): # return send_from_directory('.', 'index.html') @app.route('/gauges/list/<string:xmin>/<string:ymin>/<string:xmax>/<string:ymax>', methods=['GET']) def show_gaugelist(xmin,ymin,xmax,ymax): print('hi') cur = conn.cursor() cur.execute(""" SELECT *, ST_X(the_geom) as lng, ST_Y(the_geom) as lat FROM gageloc WHERE geom @ -- contained by, gets fewer rows -- ONE YOU NEED! ST_MakeEnvelope ( %(xmin)s, %(ymin)s, -- bounding %(xmax)s, %(ymax)s, -- box limits 900913) """, {"xmin":xmin,"ymin":ymin,"xmax":xmax,"ymax":ymax}) print(cur.fetchall()) return Response("hi", mimetype='text') #return Response(outcss, mimetype='text/css') if __name__ == '__main__': app.run(debug=True) Return data in dict form#!/usr/bin/env python3 import os from flask import Flask, jsonify, send_from_directory, abort, Response, request import psycopg2 import psycopg2.extras app = Flask(__name__) conn = psycopg2.connect("dbname='rivers' user='nelson' host='localhost' password='NONE'") #@app.route('/') #def index(): # return send_from_directory('.', 'index.html') @app.route('/gauges/list/<string:xmin>/<string:ymin>/<string:xmax>/<string:ymax>', methods=['GET']) def show_gaugelist(xmin,ymin,xmax,ymax): cur = conn.cursor(cursor_factory = psycopg2.extras.DictCursor) cur.execute(""" SELECT *, ST_X(the_geom) as lng, ST_Y(the_geom) as lat FROM gageloc WHERE geom @ -- contained by, gets fewer rows -- ONE YOU NEED! ST_MakeEnvelope ( %(xmin)s, %(ymin)s, -- bounding %(xmax)s, %(ymax)s, -- box limits 900913) """, {"xmin":xmin,"ymin":ymin,"xmax":xmax,"ymax":ymax}) return jsonify(cur.fetchall()) if __name__ == '__main__': app.run(debug=True)
<commit_before>#!/usr/bin/env python3 import os from flask import Flask, jsonify, send_from_directory, abort, Response, request import psycopg2 app = Flask(__name__) conn = psycopg2.connect("dbname='rivers' user='nelson' host='localhost' password='NONE'") #@app.route('/') #def index(): # return send_from_directory('.', 'index.html') @app.route('/gauges/list/<string:xmin>/<string:ymin>/<string:xmax>/<string:ymax>', methods=['GET']) def show_gaugelist(xmin,ymin,xmax,ymax): print('hi') cur = conn.cursor() cur.execute(""" SELECT *, ST_X(the_geom) as lng, ST_Y(the_geom) as lat FROM gageloc WHERE geom @ -- contained by, gets fewer rows -- ONE YOU NEED! ST_MakeEnvelope ( %(xmin)s, %(ymin)s, -- bounding %(xmax)s, %(ymax)s, -- box limits 900913) """, {"xmin":xmin,"ymin":ymin,"xmax":xmax,"ymax":ymax}) print(cur.fetchall()) return Response("hi", mimetype='text') #return Response(outcss, mimetype='text/css') if __name__ == '__main__': app.run(debug=True) <commit_msg>Return data in dict form<commit_after>#!/usr/bin/env python3 import os from flask import Flask, jsonify, send_from_directory, abort, Response, request import psycopg2 import psycopg2.extras app = Flask(__name__) conn = psycopg2.connect("dbname='rivers' user='nelson' host='localhost' password='NONE'") #@app.route('/') #def index(): # return send_from_directory('.', 'index.html') @app.route('/gauges/list/<string:xmin>/<string:ymin>/<string:xmax>/<string:ymax>', methods=['GET']) def show_gaugelist(xmin,ymin,xmax,ymax): cur = conn.cursor(cursor_factory = psycopg2.extras.DictCursor) cur.execute(""" SELECT *, ST_X(the_geom) as lng, ST_Y(the_geom) as lat FROM gageloc WHERE geom @ -- contained by, gets fewer rows -- ONE YOU NEED! ST_MakeEnvelope ( %(xmin)s, %(ymin)s, -- bounding %(xmax)s, %(ymax)s, -- box limits 900913) """, {"xmin":xmin,"ymin":ymin,"xmax":xmax,"ymax":ymax}) return jsonify(cur.fetchall()) if __name__ == '__main__': app.run(debug=True)
349b0ca88a85a8e9f234debe35807d2c9bc93544
src/setup.py
src/setup.py
from distutils.core import setup from distutils.command.install import install import os import shutil class issue(Exception): def __init__(self, errorStr): self.errorStr = errorStr def __str__(self): return repr(self.errorStr) class post_install(install): def copyStuff(self, dataDir, destDir): if os.path.isdir(dataDir) == False: os.makedirs(dataDir) _bsDir = os.path.abspath(destDir) if os.path.exists(_bsDir) == False: raise issue('No files at: ' % (_bsDir,)) _files = [_bsDir+'/'+_file for _file in os.listdir(_bsDir)] for _file in _files: print 'copying %s -> %s' % (_file, dataDir,) shutil.copy2(_file, dataDir) def run(self): install.run(self) setup(cmdclass = {'install': post_install}, name = 'deeptools', description = 'Deep Tools', author = 'Deep Grant', author_email = 'ralph.wiggum@icloud.com', url = 'http://deepis.com', version = '0.1', packages = ['deep', 'deep.tools', ], scripts = [ ] )
from distutils.core import setup from distutils.command.install import install import os import shutil class issue(Exception): def __init__(self, errorStr): self.errorStr = errorStr def __str__(self): return repr(self.errorStr) class post_install(install): def copyStuff(self, dataDir, destDir): if os.path.isdir(dataDir) == False: os.makedirs(dataDir) _bsDir = os.path.abspath(destDir) if os.path.exists(_bsDir) == False: raise issue('No files at: ' % (_bsDir,)) _files = [_bsDir+'/'+_file for _file in os.listdir(_bsDir)] for _file in _files: print 'copying %s -> %s' % (_file, dataDir,) shutil.copy2(_file, dataDir) def run(self): install.run(self) setup(cmdclass = {'install': post_install}, name = 'deeptools', description = 'Deep Tools', author = 'Deep Grant', author_email = 'ralph.wiggum@icloud.com', url = 'http://deepis.com', version = '0.1', packages = ['deep', 'deep.tools', ], scripts = ['scripts/clusterDb.py', ] )
Add clusterDB to python egg
Add clusterDB to python egg
Python
apache-2.0
deepgrant/deep-tools
from distutils.core import setup from distutils.command.install import install import os import shutil class issue(Exception): def __init__(self, errorStr): self.errorStr = errorStr def __str__(self): return repr(self.errorStr) class post_install(install): def copyStuff(self, dataDir, destDir): if os.path.isdir(dataDir) == False: os.makedirs(dataDir) _bsDir = os.path.abspath(destDir) if os.path.exists(_bsDir) == False: raise issue('No files at: ' % (_bsDir,)) _files = [_bsDir+'/'+_file for _file in os.listdir(_bsDir)] for _file in _files: print 'copying %s -> %s' % (_file, dataDir,) shutil.copy2(_file, dataDir) def run(self): install.run(self) setup(cmdclass = {'install': post_install}, name = 'deeptools', description = 'Deep Tools', author = 'Deep Grant', author_email = 'ralph.wiggum@icloud.com', url = 'http://deepis.com', version = '0.1', packages = ['deep', 'deep.tools', ], scripts = [ ] ) Add clusterDB to python egg
from distutils.core import setup from distutils.command.install import install import os import shutil class issue(Exception): def __init__(self, errorStr): self.errorStr = errorStr def __str__(self): return repr(self.errorStr) class post_install(install): def copyStuff(self, dataDir, destDir): if os.path.isdir(dataDir) == False: os.makedirs(dataDir) _bsDir = os.path.abspath(destDir) if os.path.exists(_bsDir) == False: raise issue('No files at: ' % (_bsDir,)) _files = [_bsDir+'/'+_file for _file in os.listdir(_bsDir)] for _file in _files: print 'copying %s -> %s' % (_file, dataDir,) shutil.copy2(_file, dataDir) def run(self): install.run(self) setup(cmdclass = {'install': post_install}, name = 'deeptools', description = 'Deep Tools', author = 'Deep Grant', author_email = 'ralph.wiggum@icloud.com', url = 'http://deepis.com', version = '0.1', packages = ['deep', 'deep.tools', ], scripts = ['scripts/clusterDb.py', ] )
<commit_before>from distutils.core import setup from distutils.command.install import install import os import shutil class issue(Exception): def __init__(self, errorStr): self.errorStr = errorStr def __str__(self): return repr(self.errorStr) class post_install(install): def copyStuff(self, dataDir, destDir): if os.path.isdir(dataDir) == False: os.makedirs(dataDir) _bsDir = os.path.abspath(destDir) if os.path.exists(_bsDir) == False: raise issue('No files at: ' % (_bsDir,)) _files = [_bsDir+'/'+_file for _file in os.listdir(_bsDir)] for _file in _files: print 'copying %s -> %s' % (_file, dataDir,) shutil.copy2(_file, dataDir) def run(self): install.run(self) setup(cmdclass = {'install': post_install}, name = 'deeptools', description = 'Deep Tools', author = 'Deep Grant', author_email = 'ralph.wiggum@icloud.com', url = 'http://deepis.com', version = '0.1', packages = ['deep', 'deep.tools', ], scripts = [ ] ) <commit_msg>Add clusterDB to python egg<commit_after>
from distutils.core import setup from distutils.command.install import install import os import shutil class issue(Exception): def __init__(self, errorStr): self.errorStr = errorStr def __str__(self): return repr(self.errorStr) class post_install(install): def copyStuff(self, dataDir, destDir): if os.path.isdir(dataDir) == False: os.makedirs(dataDir) _bsDir = os.path.abspath(destDir) if os.path.exists(_bsDir) == False: raise issue('No files at: ' % (_bsDir,)) _files = [_bsDir+'/'+_file for _file in os.listdir(_bsDir)] for _file in _files: print 'copying %s -> %s' % (_file, dataDir,) shutil.copy2(_file, dataDir) def run(self): install.run(self) setup(cmdclass = {'install': post_install}, name = 'deeptools', description = 'Deep Tools', author = 'Deep Grant', author_email = 'ralph.wiggum@icloud.com', url = 'http://deepis.com', version = '0.1', packages = ['deep', 'deep.tools', ], scripts = ['scripts/clusterDb.py', ] )
from distutils.core import setup from distutils.command.install import install import os import shutil class issue(Exception): def __init__(self, errorStr): self.errorStr = errorStr def __str__(self): return repr(self.errorStr) class post_install(install): def copyStuff(self, dataDir, destDir): if os.path.isdir(dataDir) == False: os.makedirs(dataDir) _bsDir = os.path.abspath(destDir) if os.path.exists(_bsDir) == False: raise issue('No files at: ' % (_bsDir,)) _files = [_bsDir+'/'+_file for _file in os.listdir(_bsDir)] for _file in _files: print 'copying %s -> %s' % (_file, dataDir,) shutil.copy2(_file, dataDir) def run(self): install.run(self) setup(cmdclass = {'install': post_install}, name = 'deeptools', description = 'Deep Tools', author = 'Deep Grant', author_email = 'ralph.wiggum@icloud.com', url = 'http://deepis.com', version = '0.1', packages = ['deep', 'deep.tools', ], scripts = [ ] ) Add clusterDB to python eggfrom distutils.core import setup from distutils.command.install import install import os import shutil class issue(Exception): def __init__(self, errorStr): self.errorStr = errorStr def __str__(self): return repr(self.errorStr) class post_install(install): def copyStuff(self, dataDir, destDir): if os.path.isdir(dataDir) == False: os.makedirs(dataDir) _bsDir = os.path.abspath(destDir) if os.path.exists(_bsDir) == False: raise issue('No files at: ' % (_bsDir,)) _files = [_bsDir+'/'+_file for _file in os.listdir(_bsDir)] for _file in _files: print 'copying %s -> %s' % (_file, dataDir,) shutil.copy2(_file, dataDir) def run(self): install.run(self) setup(cmdclass = {'install': post_install}, name = 'deeptools', description = 'Deep Tools', author = 'Deep Grant', author_email = 'ralph.wiggum@icloud.com', url = 'http://deepis.com', version = '0.1', packages = ['deep', 'deep.tools', ], scripts = ['scripts/clusterDb.py', ] )
<commit_before>from distutils.core import setup from distutils.command.install import install import os import shutil class issue(Exception): def __init__(self, errorStr): self.errorStr = errorStr def __str__(self): return repr(self.errorStr) class post_install(install): def copyStuff(self, dataDir, destDir): if os.path.isdir(dataDir) == False: os.makedirs(dataDir) _bsDir = os.path.abspath(destDir) if os.path.exists(_bsDir) == False: raise issue('No files at: ' % (_bsDir,)) _files = [_bsDir+'/'+_file for _file in os.listdir(_bsDir)] for _file in _files: print 'copying %s -> %s' % (_file, dataDir,) shutil.copy2(_file, dataDir) def run(self): install.run(self) setup(cmdclass = {'install': post_install}, name = 'deeptools', description = 'Deep Tools', author = 'Deep Grant', author_email = 'ralph.wiggum@icloud.com', url = 'http://deepis.com', version = '0.1', packages = ['deep', 'deep.tools', ], scripts = [ ] ) <commit_msg>Add clusterDB to python egg<commit_after>from distutils.core import setup from distutils.command.install import install import os import shutil class issue(Exception): def __init__(self, errorStr): self.errorStr = errorStr def __str__(self): return repr(self.errorStr) class post_install(install): def copyStuff(self, dataDir, destDir): if os.path.isdir(dataDir) == False: os.makedirs(dataDir) _bsDir = os.path.abspath(destDir) if os.path.exists(_bsDir) == False: raise issue('No files at: ' % (_bsDir,)) _files = [_bsDir+'/'+_file for _file in os.listdir(_bsDir)] for _file in _files: print 'copying %s -> %s' % (_file, dataDir,) shutil.copy2(_file, dataDir) def run(self): install.run(self) setup(cmdclass = {'install': post_install}, name = 'deeptools', description = 'Deep Tools', author = 'Deep Grant', author_email = 'ralph.wiggum@icloud.com', url = 'http://deepis.com', version = '0.1', packages = ['deep', 'deep.tools', ], scripts = ['scripts/clusterDb.py', ] )
43acbf596e615f68eef119fb828aff55df0586d5
FlaskRequests.py
FlaskRequests.py
from flask import Request, Response class RqRequest(Request): def rq_headers(self): headers = {} if 'Authorization' in self.headers: headers['Authorization'] = self.headers['Authorization'] if self.headers['Accept'] == 'application/xml': headers['Accept'] = 'application/xml' else: headers['Accept'] = 'application/json' return self.headers # request.form is a Werkzeug MultiDict # we want to create a string def rq_data(self): data = "" for k, v in self.form.iteritems(): data += k + "=" + v + "&" return data def rq_params(self): return self.args.to_dict()
from flask import Request, Response class RqRequest(Request): def rq_headers(self): headers = {} if 'Authorization' in self.headers: headers['Authorization'] = self.headers['Authorization'] if self.headers.get('Accept') == 'application/xml': headers['Accept'] = 'application/xml' else: headers['Accept'] = 'application/json' return self.headers # request.form is a Werkzeug MultiDict # we want to create a string def rq_data(self): data = "" for k, v in self.form.iteritems(): data += k + "=" + v + "&" return data def rq_params(self): return self.args.to_dict()
Fix KeyError when accessing non-existing header
Fix KeyError when accessing non-existing header
Python
mit
Timothee/Passeplat
from flask import Request, Response class RqRequest(Request): def rq_headers(self): headers = {} if 'Authorization' in self.headers: headers['Authorization'] = self.headers['Authorization'] if self.headers['Accept'] == 'application/xml': headers['Accept'] = 'application/xml' else: headers['Accept'] = 'application/json' return self.headers # request.form is a Werkzeug MultiDict # we want to create a string def rq_data(self): data = "" for k, v in self.form.iteritems(): data += k + "=" + v + "&" return data def rq_params(self): return self.args.to_dict() Fix KeyError when accessing non-existing header
from flask import Request, Response class RqRequest(Request): def rq_headers(self): headers = {} if 'Authorization' in self.headers: headers['Authorization'] = self.headers['Authorization'] if self.headers.get('Accept') == 'application/xml': headers['Accept'] = 'application/xml' else: headers['Accept'] = 'application/json' return self.headers # request.form is a Werkzeug MultiDict # we want to create a string def rq_data(self): data = "" for k, v in self.form.iteritems(): data += k + "=" + v + "&" return data def rq_params(self): return self.args.to_dict()
<commit_before>from flask import Request, Response class RqRequest(Request): def rq_headers(self): headers = {} if 'Authorization' in self.headers: headers['Authorization'] = self.headers['Authorization'] if self.headers['Accept'] == 'application/xml': headers['Accept'] = 'application/xml' else: headers['Accept'] = 'application/json' return self.headers # request.form is a Werkzeug MultiDict # we want to create a string def rq_data(self): data = "" for k, v in self.form.iteritems(): data += k + "=" + v + "&" return data def rq_params(self): return self.args.to_dict() <commit_msg>Fix KeyError when accessing non-existing header<commit_after>
from flask import Request, Response class RqRequest(Request): def rq_headers(self): headers = {} if 'Authorization' in self.headers: headers['Authorization'] = self.headers['Authorization'] if self.headers.get('Accept') == 'application/xml': headers['Accept'] = 'application/xml' else: headers['Accept'] = 'application/json' return self.headers # request.form is a Werkzeug MultiDict # we want to create a string def rq_data(self): data = "" for k, v in self.form.iteritems(): data += k + "=" + v + "&" return data def rq_params(self): return self.args.to_dict()
from flask import Request, Response class RqRequest(Request): def rq_headers(self): headers = {} if 'Authorization' in self.headers: headers['Authorization'] = self.headers['Authorization'] if self.headers['Accept'] == 'application/xml': headers['Accept'] = 'application/xml' else: headers['Accept'] = 'application/json' return self.headers # request.form is a Werkzeug MultiDict # we want to create a string def rq_data(self): data = "" for k, v in self.form.iteritems(): data += k + "=" + v + "&" return data def rq_params(self): return self.args.to_dict() Fix KeyError when accessing non-existing headerfrom flask import Request, Response class RqRequest(Request): def rq_headers(self): headers = {} if 'Authorization' in self.headers: headers['Authorization'] = self.headers['Authorization'] if self.headers.get('Accept') == 'application/xml': headers['Accept'] = 'application/xml' else: headers['Accept'] = 'application/json' return self.headers # request.form is a Werkzeug MultiDict # we want to create a string def rq_data(self): data = "" for k, v in self.form.iteritems(): data += k + "=" + v + "&" return data def rq_params(self): return self.args.to_dict()
<commit_before>from flask import Request, Response class RqRequest(Request): def rq_headers(self): headers = {} if 'Authorization' in self.headers: headers['Authorization'] = self.headers['Authorization'] if self.headers['Accept'] == 'application/xml': headers['Accept'] = 'application/xml' else: headers['Accept'] = 'application/json' return self.headers # request.form is a Werkzeug MultiDict # we want to create a string def rq_data(self): data = "" for k, v in self.form.iteritems(): data += k + "=" + v + "&" return data def rq_params(self): return self.args.to_dict() <commit_msg>Fix KeyError when accessing non-existing header<commit_after>from flask import Request, Response class RqRequest(Request): def rq_headers(self): headers = {} if 'Authorization' in self.headers: headers['Authorization'] = self.headers['Authorization'] if self.headers.get('Accept') == 'application/xml': headers['Accept'] = 'application/xml' else: headers['Accept'] = 'application/json' return self.headers # request.form is a Werkzeug MultiDict # we want to create a string def rq_data(self): data = "" for k, v in self.form.iteritems(): data += k + "=" + v + "&" return data def rq_params(self): return self.args.to_dict()
a2776a5cfcad7e9957eb44ab5882d36878026b1e
property_transformation.py
property_transformation.py
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
Add option to include static values in properties
Add option to include static values in properties
Python
mit
OpenBounds/Processing
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results Add option to include static values in properties
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
<commit_before>from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results <commit_msg>Add option to include static values in properties<commit_after>
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results Add option to include static values in propertiesfrom types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
<commit_before>from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results <commit_msg>Add option to include static values in properties<commit_after>from types import UnicodeType, StringType class PropertyMappingFailedException(Exception): pass def get_transformed_properties(source_properties, prop_map): results = {} for key, value in prop_map.iteritems(): if type(value) in (StringType, UnicodeType): if value in source_properties: results[key] = source_properties[value] else: raise PropertyMappingFailedException("property %s not found in source feature" % (value)) elif type(value) == dict: if "static" in value: results[key] = value["static"] else: raise PropertyMappingFailedException("Unhandled mapping for key:%s value type:%s" % (key, type(value))) return results
eb698848c67a5f2ffe1b47c2b4620946f3133c3f
pyautoupdate/_move_glob.py
pyautoupdate/_move_glob.py
import glob import shutil import os def move_glob(src,dst): """Moves files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): shutil.move(obj,dst) def copy_glob(src,dst): """Copies files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): if os.path.isdir(obj): start_part=os.path.commonpath([src,obj]) end_part=os.path.relpath(obj,start_part) shutil.copytree(obj,os.path.join(dst,end_part)) else: shutil.copy2(obj,dst)
import glob import shutil import os if os.name == "nt": from .ntcommonpath import commonpath else: from .posixcommonpath import commonpath def move_glob(src,dst): """Moves files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): shutil.move(obj,dst) def copy_glob(src,dst): """Copies files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): if os.path.isdir(obj): start_part=commonpath([src,obj]) end_part=os.path.relpath(obj,start_part) shutil.copytree(obj,os.path.join(dst,end_part)) else: shutil.copy2(obj,dst)
Use backported commonpath instead of built in os.path.commonpath
Use backported commonpath instead of built in os.path.commonpath The built in one is only available for Python 3.5
Python
lgpl-2.1
rlee287/pyautoupdate,rlee287/pyautoupdate
import glob import shutil import os def move_glob(src,dst): """Moves files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): shutil.move(obj,dst) def copy_glob(src,dst): """Copies files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): if os.path.isdir(obj): start_part=os.path.commonpath([src,obj]) end_part=os.path.relpath(obj,start_part) shutil.copytree(obj,os.path.join(dst,end_part)) else: shutil.copy2(obj,dst) Use backported commonpath instead of built in os.path.commonpath The built in one is only available for Python 3.5
import glob import shutil import os if os.name == "nt": from .ntcommonpath import commonpath else: from .posixcommonpath import commonpath def move_glob(src,dst): """Moves files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): shutil.move(obj,dst) def copy_glob(src,dst): """Copies files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): if os.path.isdir(obj): start_part=commonpath([src,obj]) end_part=os.path.relpath(obj,start_part) shutil.copytree(obj,os.path.join(dst,end_part)) else: shutil.copy2(obj,dst)
<commit_before>import glob import shutil import os def move_glob(src,dst): """Moves files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): shutil.move(obj,dst) def copy_glob(src,dst): """Copies files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): if os.path.isdir(obj): start_part=os.path.commonpath([src,obj]) end_part=os.path.relpath(obj,start_part) shutil.copytree(obj,os.path.join(dst,end_part)) else: shutil.copy2(obj,dst) <commit_msg>Use backported commonpath instead of built in os.path.commonpath The built in one is only available for Python 3.5<commit_after>
import glob import shutil import os if os.name == "nt": from .ntcommonpath import commonpath else: from .posixcommonpath import commonpath def move_glob(src,dst): """Moves files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): shutil.move(obj,dst) def copy_glob(src,dst): """Copies files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): if os.path.isdir(obj): start_part=commonpath([src,obj]) end_part=os.path.relpath(obj,start_part) shutil.copytree(obj,os.path.join(dst,end_part)) else: shutil.copy2(obj,dst)
import glob import shutil import os def move_glob(src,dst): """Moves files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): shutil.move(obj,dst) def copy_glob(src,dst): """Copies files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): if os.path.isdir(obj): start_part=os.path.commonpath([src,obj]) end_part=os.path.relpath(obj,start_part) shutil.copytree(obj,os.path.join(dst,end_part)) else: shutil.copy2(obj,dst) Use backported commonpath instead of built in os.path.commonpath The built in one is only available for Python 3.5import glob import shutil import os if os.name == "nt": from .ntcommonpath import commonpath else: from .posixcommonpath import commonpath def move_glob(src,dst): """Moves files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): shutil.move(obj,dst) def copy_glob(src,dst): """Copies files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): if os.path.isdir(obj): start_part=commonpath([src,obj]) end_part=os.path.relpath(obj,start_part) shutil.copytree(obj,os.path.join(dst,end_part)) else: shutil.copy2(obj,dst)
<commit_before>import glob import shutil import os def move_glob(src,dst): """Moves files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): shutil.move(obj,dst) def copy_glob(src,dst): """Copies files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): if os.path.isdir(obj): start_part=os.path.commonpath([src,obj]) end_part=os.path.relpath(obj,start_part) shutil.copytree(obj,os.path.join(dst,end_part)) else: shutil.copy2(obj,dst) <commit_msg>Use backported commonpath instead of built in os.path.commonpath The built in one is only available for Python 3.5<commit_after>import glob import shutil import os if os.name == "nt": from .ntcommonpath import commonpath else: from .posixcommonpath import commonpath def move_glob(src,dst): """Moves files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): shutil.move(obj,dst) def copy_glob(src,dst): """Copies files from src to dest. src may be any glob to recognize files. dst must be a folder.""" for obj in glob.iglob(src): if os.path.isdir(obj): start_part=commonpath([src,obj]) end_part=os.path.relpath(obj,start_part) shutil.copytree(obj,os.path.join(dst,end_part)) else: shutil.copy2(obj,dst)
397e4b3841dde6f82ad7f1d3f6458f99da69d678
px/px_install.py
px/px_install.py
import sys import shutil import os def install(src, dest): """ Copy src (file) into dest (file) and make dest executable. On trouble, prints message and exits with an error code. """ try: _install(src, dest) except Exception as e: sys.stderr.write("Installing {} failed, please retry with sudo\n".format(dest)) sys.stderr.write("Error was: {}\n".format(e.message)) exit(1) print("Created: {}".format(dest)) def _install(src, dest): """ Copy src (file) into dest (file) and make dest executable. Throws exception on trouble. """ parent = os.path.dirname(dest) if not os.path.isdir(parent): raise IOError("Destination parent is not a directory: %s" % parent) if os.path.isdir(dest): raise IOError("Destination is a directory, won't replace that: %s" % dest) # Make sure nothing's in the way try: os.remove(dest) except OSError: pass if os.path.exists(dest): raise IOError("Can't remove existing entry: %s" % dest) shutil.copyfile(src, dest) os.chmod(dest, 0755)
import sys import shutil import os def install(src, dest): """ Copy src (file) into dest (file) and make dest executable. On trouble, prints message and exits with an error code. """ try: _install(src, dest) except Exception as e: sys.stderr.write("Installing {} failed, please retry with sudo\n".format(dest)) sys.stderr.write("Error was: {}\n".format(e.message)) exit(1) print("Created: {}".format(dest)) def _install(src, dest): """ Copy src (file) into dest (file) and make dest executable. Throws exception on trouble. """ if os.path.realpath(src) == os.path.realpath(dest): # Copying a file onto itself is a no-op, never mind return parent = os.path.dirname(dest) if not os.path.isdir(parent): raise IOError("Destination parent is not a directory: %s" % parent) if os.path.isdir(dest): raise IOError("Destination is a directory, won't replace that: %s" % dest) # Make sure nothing's in the way try: os.remove(dest) except OSError: pass if os.path.exists(dest): raise IOError("Can't remove existing entry: %s" % dest) shutil.copyfile(src, dest) os.chmod(dest, 0755)
Handle installing px on top of itself
Handle installing px on top of itself
Python
mit
walles/px,walles/px
import sys import shutil import os def install(src, dest): """ Copy src (file) into dest (file) and make dest executable. On trouble, prints message and exits with an error code. """ try: _install(src, dest) except Exception as e: sys.stderr.write("Installing {} failed, please retry with sudo\n".format(dest)) sys.stderr.write("Error was: {}\n".format(e.message)) exit(1) print("Created: {}".format(dest)) def _install(src, dest): """ Copy src (file) into dest (file) and make dest executable. Throws exception on trouble. """ parent = os.path.dirname(dest) if not os.path.isdir(parent): raise IOError("Destination parent is not a directory: %s" % parent) if os.path.isdir(dest): raise IOError("Destination is a directory, won't replace that: %s" % dest) # Make sure nothing's in the way try: os.remove(dest) except OSError: pass if os.path.exists(dest): raise IOError("Can't remove existing entry: %s" % dest) shutil.copyfile(src, dest) os.chmod(dest, 0755) Handle installing px on top of itself
import sys import shutil import os def install(src, dest): """ Copy src (file) into dest (file) and make dest executable. On trouble, prints message and exits with an error code. """ try: _install(src, dest) except Exception as e: sys.stderr.write("Installing {} failed, please retry with sudo\n".format(dest)) sys.stderr.write("Error was: {}\n".format(e.message)) exit(1) print("Created: {}".format(dest)) def _install(src, dest): """ Copy src (file) into dest (file) and make dest executable. Throws exception on trouble. """ if os.path.realpath(src) == os.path.realpath(dest): # Copying a file onto itself is a no-op, never mind return parent = os.path.dirname(dest) if not os.path.isdir(parent): raise IOError("Destination parent is not a directory: %s" % parent) if os.path.isdir(dest): raise IOError("Destination is a directory, won't replace that: %s" % dest) # Make sure nothing's in the way try: os.remove(dest) except OSError: pass if os.path.exists(dest): raise IOError("Can't remove existing entry: %s" % dest) shutil.copyfile(src, dest) os.chmod(dest, 0755)
<commit_before>import sys import shutil import os def install(src, dest): """ Copy src (file) into dest (file) and make dest executable. On trouble, prints message and exits with an error code. """ try: _install(src, dest) except Exception as e: sys.stderr.write("Installing {} failed, please retry with sudo\n".format(dest)) sys.stderr.write("Error was: {}\n".format(e.message)) exit(1) print("Created: {}".format(dest)) def _install(src, dest): """ Copy src (file) into dest (file) and make dest executable. Throws exception on trouble. """ parent = os.path.dirname(dest) if not os.path.isdir(parent): raise IOError("Destination parent is not a directory: %s" % parent) if os.path.isdir(dest): raise IOError("Destination is a directory, won't replace that: %s" % dest) # Make sure nothing's in the way try: os.remove(dest) except OSError: pass if os.path.exists(dest): raise IOError("Can't remove existing entry: %s" % dest) shutil.copyfile(src, dest) os.chmod(dest, 0755) <commit_msg>Handle installing px on top of itself<commit_after>
import sys import shutil import os def install(src, dest): """ Copy src (file) into dest (file) and make dest executable. On trouble, prints message and exits with an error code. """ try: _install(src, dest) except Exception as e: sys.stderr.write("Installing {} failed, please retry with sudo\n".format(dest)) sys.stderr.write("Error was: {}\n".format(e.message)) exit(1) print("Created: {}".format(dest)) def _install(src, dest): """ Copy src (file) into dest (file) and make dest executable. Throws exception on trouble. """ if os.path.realpath(src) == os.path.realpath(dest): # Copying a file onto itself is a no-op, never mind return parent = os.path.dirname(dest) if not os.path.isdir(parent): raise IOError("Destination parent is not a directory: %s" % parent) if os.path.isdir(dest): raise IOError("Destination is a directory, won't replace that: %s" % dest) # Make sure nothing's in the way try: os.remove(dest) except OSError: pass if os.path.exists(dest): raise IOError("Can't remove existing entry: %s" % dest) shutil.copyfile(src, dest) os.chmod(dest, 0755)
import sys import shutil import os def install(src, dest): """ Copy src (file) into dest (file) and make dest executable. On trouble, prints message and exits with an error code. """ try: _install(src, dest) except Exception as e: sys.stderr.write("Installing {} failed, please retry with sudo\n".format(dest)) sys.stderr.write("Error was: {}\n".format(e.message)) exit(1) print("Created: {}".format(dest)) def _install(src, dest): """ Copy src (file) into dest (file) and make dest executable. Throws exception on trouble. """ parent = os.path.dirname(dest) if not os.path.isdir(parent): raise IOError("Destination parent is not a directory: %s" % parent) if os.path.isdir(dest): raise IOError("Destination is a directory, won't replace that: %s" % dest) # Make sure nothing's in the way try: os.remove(dest) except OSError: pass if os.path.exists(dest): raise IOError("Can't remove existing entry: %s" % dest) shutil.copyfile(src, dest) os.chmod(dest, 0755) Handle installing px on top of itselfimport sys import shutil import os def install(src, dest): """ Copy src (file) into dest (file) and make dest executable. On trouble, prints message and exits with an error code. """ try: _install(src, dest) except Exception as e: sys.stderr.write("Installing {} failed, please retry with sudo\n".format(dest)) sys.stderr.write("Error was: {}\n".format(e.message)) exit(1) print("Created: {}".format(dest)) def _install(src, dest): """ Copy src (file) into dest (file) and make dest executable. Throws exception on trouble. """ if os.path.realpath(src) == os.path.realpath(dest): # Copying a file onto itself is a no-op, never mind return parent = os.path.dirname(dest) if not os.path.isdir(parent): raise IOError("Destination parent is not a directory: %s" % parent) if os.path.isdir(dest): raise IOError("Destination is a directory, won't replace that: %s" % dest) # Make sure nothing's in the way try: os.remove(dest) except OSError: pass if os.path.exists(dest): raise IOError("Can't remove existing entry: %s" % dest) shutil.copyfile(src, dest) os.chmod(dest, 0755)
<commit_before>import sys import shutil import os def install(src, dest): """ Copy src (file) into dest (file) and make dest executable. On trouble, prints message and exits with an error code. """ try: _install(src, dest) except Exception as e: sys.stderr.write("Installing {} failed, please retry with sudo\n".format(dest)) sys.stderr.write("Error was: {}\n".format(e.message)) exit(1) print("Created: {}".format(dest)) def _install(src, dest): """ Copy src (file) into dest (file) and make dest executable. Throws exception on trouble. """ parent = os.path.dirname(dest) if not os.path.isdir(parent): raise IOError("Destination parent is not a directory: %s" % parent) if os.path.isdir(dest): raise IOError("Destination is a directory, won't replace that: %s" % dest) # Make sure nothing's in the way try: os.remove(dest) except OSError: pass if os.path.exists(dest): raise IOError("Can't remove existing entry: %s" % dest) shutil.copyfile(src, dest) os.chmod(dest, 0755) <commit_msg>Handle installing px on top of itself<commit_after>import sys import shutil import os def install(src, dest): """ Copy src (file) into dest (file) and make dest executable. On trouble, prints message and exits with an error code. """ try: _install(src, dest) except Exception as e: sys.stderr.write("Installing {} failed, please retry with sudo\n".format(dest)) sys.stderr.write("Error was: {}\n".format(e.message)) exit(1) print("Created: {}".format(dest)) def _install(src, dest): """ Copy src (file) into dest (file) and make dest executable. Throws exception on trouble. """ if os.path.realpath(src) == os.path.realpath(dest): # Copying a file onto itself is a no-op, never mind return parent = os.path.dirname(dest) if not os.path.isdir(parent): raise IOError("Destination parent is not a directory: %s" % parent) if os.path.isdir(dest): raise IOError("Destination is a directory, won't replace that: %s" % dest) # Make sure nothing's in the way try: os.remove(dest) except OSError: pass if os.path.exists(dest): raise IOError("Can't remove existing entry: %s" % dest) shutil.copyfile(src, dest) os.chmod(dest, 0755)
c181c54ee749408401b47c9528e9ee085b582692
timelapse.py
timelapse.py
import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 700), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close()
import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 730), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close()
Revert "E arruma o alinhamento do texto"
Revert "E arruma o alinhamento do texto" This reverts commit a7cb58ef323df97de537bb7cf0571c5ed26e113c.
Python
mit
dvl/raspberry-pi_timelapse,dvl/raspberry-pi_timelapse
import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 700), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close() Revert "E arruma o alinhamento do texto" This reverts commit a7cb58ef323df97de537bb7cf0571c5ed26e113c.
import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 730), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close()
<commit_before>import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 700), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close() <commit_msg>Revert "E arruma o alinhamento do texto" This reverts commit a7cb58ef323df97de537bb7cf0571c5ed26e113c.<commit_after>
import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 730), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close()
import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 700), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close() Revert "E arruma o alinhamento do texto" This reverts commit a7cb58ef323df97de537bb7cf0571c5ed26e113c.import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 730), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close()
<commit_before>import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 700), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close() <commit_msg>Revert "E arruma o alinhamento do texto" This reverts commit a7cb58ef323df97de537bb7cf0571c5ed26e113c.<commit_after>import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 730), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close()
0c3ed07548cd196ceac2641a38f4d20a1e104d11
admin/test/test_acceptance.py
admin/test/test_acceptance.py
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for ``admin.acceptance``. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import SynchronousTestCase from ..acceptance import IClusterRunner, ManagedRunner class ManagedRunnerTests(SynchronousTestCase): """ Tests for ``ManagedRunner``. """ def test_interface(self): """ ``ManagedRunner`` provides ``IClusterRunner``. """ runner = ManagedRunner( node_addresses=[b'192.0.2.1'], distribution=b'centos-7' ) self.assertTrue( verifyObject(IClusterRunner, runner) )
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for ``admin.acceptance``. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import SynchronousTestCase from ..acceptance import IClusterRunner, ManagedRunner from flocker.provision import PackageSource from flocker.acceptance.testtools import DatasetBackend class ManagedRunnerTests(SynchronousTestCase): """ Tests for ``ManagedRunner``. """ def test_interface(self): """ ``ManagedRunner`` provides ``IClusterRunner``. """ runner = ManagedRunner( node_addresses=[b'192.0.2.1'], package_source=PackageSource( version=b"", os_version=b"", branch=b"", build_serve=b"", ), distribution=b'centos-7', dataset_backend=DatasetBackend.zfs, dataset_backend_configuration={}, ) self.assertTrue( verifyObject(IClusterRunner, runner) )
Create the ManagedRunner with all the new args.
Create the ManagedRunner with all the new args.
Python
apache-2.0
jml/flocker,lukemarsden/flocker,wallnerryan/flocker-profiles,AndyHuu/flocker,adamtheturtle/flocker,mbrukman/flocker,lukemarsden/flocker,hackday-profilers/flocker,achanda/flocker,w4ngyi/flocker,1d4Nf6/flocker,Azulinho/flocker,mbrukman/flocker,LaynePeng/flocker,w4ngyi/flocker,moypray/flocker,w4ngyi/flocker,LaynePeng/flocker,jml/flocker,agonzalezro/flocker,adamtheturtle/flocker,AndyHuu/flocker,achanda/flocker,hackday-profilers/flocker,agonzalezro/flocker,Azulinho/flocker,moypray/flocker,wallnerryan/flocker-profiles,LaynePeng/flocker,Azulinho/flocker,AndyHuu/flocker,agonzalezro/flocker,adamtheturtle/flocker,lukemarsden/flocker,moypray/flocker,mbrukman/flocker,jml/flocker,wallnerryan/flocker-profiles,1d4Nf6/flocker,achanda/flocker,1d4Nf6/flocker,hackday-profilers/flocker
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for ``admin.acceptance``. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import SynchronousTestCase from ..acceptance import IClusterRunner, ManagedRunner class ManagedRunnerTests(SynchronousTestCase): """ Tests for ``ManagedRunner``. """ def test_interface(self): """ ``ManagedRunner`` provides ``IClusterRunner``. """ runner = ManagedRunner( node_addresses=[b'192.0.2.1'], distribution=b'centos-7' ) self.assertTrue( verifyObject(IClusterRunner, runner) ) Create the ManagedRunner with all the new args.
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for ``admin.acceptance``. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import SynchronousTestCase from ..acceptance import IClusterRunner, ManagedRunner from flocker.provision import PackageSource from flocker.acceptance.testtools import DatasetBackend class ManagedRunnerTests(SynchronousTestCase): """ Tests for ``ManagedRunner``. """ def test_interface(self): """ ``ManagedRunner`` provides ``IClusterRunner``. """ runner = ManagedRunner( node_addresses=[b'192.0.2.1'], package_source=PackageSource( version=b"", os_version=b"", branch=b"", build_serve=b"", ), distribution=b'centos-7', dataset_backend=DatasetBackend.zfs, dataset_backend_configuration={}, ) self.assertTrue( verifyObject(IClusterRunner, runner) )
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for ``admin.acceptance``. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import SynchronousTestCase from ..acceptance import IClusterRunner, ManagedRunner class ManagedRunnerTests(SynchronousTestCase): """ Tests for ``ManagedRunner``. """ def test_interface(self): """ ``ManagedRunner`` provides ``IClusterRunner``. """ runner = ManagedRunner( node_addresses=[b'192.0.2.1'], distribution=b'centos-7' ) self.assertTrue( verifyObject(IClusterRunner, runner) ) <commit_msg>Create the ManagedRunner with all the new args.<commit_after>
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for ``admin.acceptance``. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import SynchronousTestCase from ..acceptance import IClusterRunner, ManagedRunner from flocker.provision import PackageSource from flocker.acceptance.testtools import DatasetBackend class ManagedRunnerTests(SynchronousTestCase): """ Tests for ``ManagedRunner``. """ def test_interface(self): """ ``ManagedRunner`` provides ``IClusterRunner``. """ runner = ManagedRunner( node_addresses=[b'192.0.2.1'], package_source=PackageSource( version=b"", os_version=b"", branch=b"", build_serve=b"", ), distribution=b'centos-7', dataset_backend=DatasetBackend.zfs, dataset_backend_configuration={}, ) self.assertTrue( verifyObject(IClusterRunner, runner) )
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for ``admin.acceptance``. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import SynchronousTestCase from ..acceptance import IClusterRunner, ManagedRunner class ManagedRunnerTests(SynchronousTestCase): """ Tests for ``ManagedRunner``. """ def test_interface(self): """ ``ManagedRunner`` provides ``IClusterRunner``. """ runner = ManagedRunner( node_addresses=[b'192.0.2.1'], distribution=b'centos-7' ) self.assertTrue( verifyObject(IClusterRunner, runner) ) Create the ManagedRunner with all the new args.# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for ``admin.acceptance``. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import SynchronousTestCase from ..acceptance import IClusterRunner, ManagedRunner from flocker.provision import PackageSource from flocker.acceptance.testtools import DatasetBackend class ManagedRunnerTests(SynchronousTestCase): """ Tests for ``ManagedRunner``. """ def test_interface(self): """ ``ManagedRunner`` provides ``IClusterRunner``. """ runner = ManagedRunner( node_addresses=[b'192.0.2.1'], package_source=PackageSource( version=b"", os_version=b"", branch=b"", build_serve=b"", ), distribution=b'centos-7', dataset_backend=DatasetBackend.zfs, dataset_backend_configuration={}, ) self.assertTrue( verifyObject(IClusterRunner, runner) )
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for ``admin.acceptance``. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import SynchronousTestCase from ..acceptance import IClusterRunner, ManagedRunner class ManagedRunnerTests(SynchronousTestCase): """ Tests for ``ManagedRunner``. """ def test_interface(self): """ ``ManagedRunner`` provides ``IClusterRunner``. """ runner = ManagedRunner( node_addresses=[b'192.0.2.1'], distribution=b'centos-7' ) self.assertTrue( verifyObject(IClusterRunner, runner) ) <commit_msg>Create the ManagedRunner with all the new args.<commit_after># Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Tests for ``admin.acceptance``. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import SynchronousTestCase from ..acceptance import IClusterRunner, ManagedRunner from flocker.provision import PackageSource from flocker.acceptance.testtools import DatasetBackend class ManagedRunnerTests(SynchronousTestCase): """ Tests for ``ManagedRunner``. """ def test_interface(self): """ ``ManagedRunner`` provides ``IClusterRunner``. """ runner = ManagedRunner( node_addresses=[b'192.0.2.1'], package_source=PackageSource( version=b"", os_version=b"", branch=b"", build_serve=b"", ), distribution=b'centos-7', dataset_backend=DatasetBackend.zfs, dataset_backend_configuration={}, ) self.assertTrue( verifyObject(IClusterRunner, runner) )
fb82b4f77379ddd1525947cc61f1c46c34674da4
froide/publicbody/admin.py
froide/publicbody/admin.py
from django.contrib import admin from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic, Jurisdiction) class PublicBodyAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',) list_filter = ('classification', 'topic',) search_fields = ['name', "description"] exclude = ('confirmed',) class FoiLawAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("jurisdiction", "name",)} list_display = ('name', 'meta',) class JurisdictionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class PublicBodyTopicAdmin(admin.ModelAdmin): pass admin.site.register(PublicBody, PublicBodyAdmin) admin.site.register(FoiLaw, FoiLawAdmin) admin.site.register(Jurisdiction, JurisdictionAdmin) admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin)
from django.contrib import admin from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic, Jurisdiction) class PublicBodyAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',) list_filter = ('classification', 'topic', 'jurisdiction',) search_fields = ['name', "description"] exclude = ('confirmed',) class FoiLawAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("jurisdiction", "name",)} list_display = ('name', 'meta',) class JurisdictionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class PublicBodyTopicAdmin(admin.ModelAdmin): pass admin.site.register(PublicBody, PublicBodyAdmin) admin.site.register(FoiLaw, FoiLawAdmin) admin.site.register(Jurisdiction, JurisdictionAdmin) admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin)
Add list filter by jurisdiction for public bodies
Add list filter by jurisdiction for public bodies
Python
mit
LilithWittmann/froide,fin/froide,ryankanno/froide,CodeforHawaii/froide,okfse/froide,stefanw/froide,ryankanno/froide,okfse/froide,stefanw/froide,catcosmo/froide,fin/froide,ryankanno/froide,CodeforHawaii/froide,stefanw/froide,LilithWittmann/froide,CodeforHawaii/froide,okfse/froide,okfse/froide,LilithWittmann/froide,fin/froide,ryankanno/froide,ryankanno/froide,stefanw/froide,CodeforHawaii/froide,catcosmo/froide,stefanw/froide,catcosmo/froide,catcosmo/froide,LilithWittmann/froide,catcosmo/froide,LilithWittmann/froide,fin/froide,okfse/froide,CodeforHawaii/froide
from django.contrib import admin from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic, Jurisdiction) class PublicBodyAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',) list_filter = ('classification', 'topic',) search_fields = ['name', "description"] exclude = ('confirmed',) class FoiLawAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("jurisdiction", "name",)} list_display = ('name', 'meta',) class JurisdictionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class PublicBodyTopicAdmin(admin.ModelAdmin): pass admin.site.register(PublicBody, PublicBodyAdmin) admin.site.register(FoiLaw, FoiLawAdmin) admin.site.register(Jurisdiction, JurisdictionAdmin) admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin) Add list filter by jurisdiction for public bodies
from django.contrib import admin from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic, Jurisdiction) class PublicBodyAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',) list_filter = ('classification', 'topic', 'jurisdiction',) search_fields = ['name', "description"] exclude = ('confirmed',) class FoiLawAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("jurisdiction", "name",)} list_display = ('name', 'meta',) class JurisdictionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class PublicBodyTopicAdmin(admin.ModelAdmin): pass admin.site.register(PublicBody, PublicBodyAdmin) admin.site.register(FoiLaw, FoiLawAdmin) admin.site.register(Jurisdiction, JurisdictionAdmin) admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin)
<commit_before>from django.contrib import admin from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic, Jurisdiction) class PublicBodyAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',) list_filter = ('classification', 'topic',) search_fields = ['name', "description"] exclude = ('confirmed',) class FoiLawAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("jurisdiction", "name",)} list_display = ('name', 'meta',) class JurisdictionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class PublicBodyTopicAdmin(admin.ModelAdmin): pass admin.site.register(PublicBody, PublicBodyAdmin) admin.site.register(FoiLaw, FoiLawAdmin) admin.site.register(Jurisdiction, JurisdictionAdmin) admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin) <commit_msg>Add list filter by jurisdiction for public bodies<commit_after>
from django.contrib import admin from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic, Jurisdiction) class PublicBodyAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',) list_filter = ('classification', 'topic', 'jurisdiction',) search_fields = ['name', "description"] exclude = ('confirmed',) class FoiLawAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("jurisdiction", "name",)} list_display = ('name', 'meta',) class JurisdictionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class PublicBodyTopicAdmin(admin.ModelAdmin): pass admin.site.register(PublicBody, PublicBodyAdmin) admin.site.register(FoiLaw, FoiLawAdmin) admin.site.register(Jurisdiction, JurisdictionAdmin) admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin)
from django.contrib import admin from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic, Jurisdiction) class PublicBodyAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',) list_filter = ('classification', 'topic',) search_fields = ['name', "description"] exclude = ('confirmed',) class FoiLawAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("jurisdiction", "name",)} list_display = ('name', 'meta',) class JurisdictionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class PublicBodyTopicAdmin(admin.ModelAdmin): pass admin.site.register(PublicBody, PublicBodyAdmin) admin.site.register(FoiLaw, FoiLawAdmin) admin.site.register(Jurisdiction, JurisdictionAdmin) admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin) Add list filter by jurisdiction for public bodiesfrom django.contrib import admin from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic, Jurisdiction) class PublicBodyAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',) list_filter = ('classification', 'topic', 'jurisdiction',) search_fields = ['name', "description"] exclude = ('confirmed',) class FoiLawAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("jurisdiction", "name",)} list_display = ('name', 'meta',) class JurisdictionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class PublicBodyTopicAdmin(admin.ModelAdmin): pass admin.site.register(PublicBody, PublicBodyAdmin) admin.site.register(FoiLaw, FoiLawAdmin) admin.site.register(Jurisdiction, JurisdictionAdmin) admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin)
<commit_before>from django.contrib import admin from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic, Jurisdiction) class PublicBodyAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',) list_filter = ('classification', 'topic',) search_fields = ['name', "description"] exclude = ('confirmed',) class FoiLawAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("jurisdiction", "name",)} list_display = ('name', 'meta',) class JurisdictionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class PublicBodyTopicAdmin(admin.ModelAdmin): pass admin.site.register(PublicBody, PublicBodyAdmin) admin.site.register(FoiLaw, FoiLawAdmin) admin.site.register(Jurisdiction, JurisdictionAdmin) admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin) <commit_msg>Add list filter by jurisdiction for public bodies<commit_after>from django.contrib import admin from froide.publicbody.models import (PublicBody, FoiLaw, PublicBodyTopic, Jurisdiction) class PublicBodyAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'email', 'url', 'classification', 'topic', 'depth',) list_filter = ('classification', 'topic', 'jurisdiction',) search_fields = ['name', "description"] exclude = ('confirmed',) class FoiLawAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("jurisdiction", "name",)} list_display = ('name', 'meta',) class JurisdictionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} class PublicBodyTopicAdmin(admin.ModelAdmin): pass admin.site.register(PublicBody, PublicBodyAdmin) admin.site.register(FoiLaw, FoiLawAdmin) admin.site.register(Jurisdiction, JurisdictionAdmin) admin.site.register(PublicBodyTopic, PublicBodyTopicAdmin)
6c49d28370cdcd96917286cf68e6a8218db4b8a5
indra/java_vm.py
indra/java_vm.py
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import jnius_config logger = logging.getLogger('java_vm') if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: logger.warning("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = os.path.join(path_here, 'sources/biopax/jars/paxtools.jar') cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import jnius_config logger = logging.getLogger('java_vm') def _has_xmx(options): for option in options: if option.startswith('-Xmx'): return True return False default_mem_limit = '8g' if not _has_xmx(jnius_config.get_options()): if not jnius_config.vm_running: jnius_config.add_options('-Xmx%s' % default_mem_limit) else: logger.warning("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = os.path.join(path_here, 'sources/biopax/jars/paxtools.jar') cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
Increase default maximum java heap size
Increase default maximum java heap size
Python
bsd-2-clause
johnbachman/indra,johnbachman/belpy,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,sorgerlab/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,bgyori/indra,bgyori/indra,sorgerlab/belpy
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import jnius_config logger = logging.getLogger('java_vm') if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: logger.warning("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = os.path.join(path_here, 'sources/biopax/jars/paxtools.jar') cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast Increase default maximum java heap size
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import jnius_config logger = logging.getLogger('java_vm') def _has_xmx(options): for option in options: if option.startswith('-Xmx'): return True return False default_mem_limit = '8g' if not _has_xmx(jnius_config.get_options()): if not jnius_config.vm_running: jnius_config.add_options('-Xmx%s' % default_mem_limit) else: logger.warning("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = os.path.join(path_here, 'sources/biopax/jars/paxtools.jar') cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
<commit_before>"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import jnius_config logger = logging.getLogger('java_vm') if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: logger.warning("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = os.path.join(path_here, 'sources/biopax/jars/paxtools.jar') cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast <commit_msg>Increase default maximum java heap size<commit_after>
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import jnius_config logger = logging.getLogger('java_vm') def _has_xmx(options): for option in options: if option.startswith('-Xmx'): return True return False default_mem_limit = '8g' if not _has_xmx(jnius_config.get_options()): if not jnius_config.vm_running: jnius_config.add_options('-Xmx%s' % default_mem_limit) else: logger.warning("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = os.path.join(path_here, 'sources/biopax/jars/paxtools.jar') cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import jnius_config logger = logging.getLogger('java_vm') if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: logger.warning("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = os.path.join(path_here, 'sources/biopax/jars/paxtools.jar') cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast Increase default maximum java heap size"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import jnius_config logger = logging.getLogger('java_vm') def _has_xmx(options): for option in options: if option.startswith('-Xmx'): return True return False default_mem_limit = '8g' if not _has_xmx(jnius_config.get_options()): if not jnius_config.vm_running: jnius_config.add_options('-Xmx%s' % default_mem_limit) else: logger.warning("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = os.path.join(path_here, 'sources/biopax/jars/paxtools.jar') cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
<commit_before>"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import jnius_config logger = logging.getLogger('java_vm') if '-Xmx4g' not in jnius_config.get_options(): if not jnius_config.vm_running: jnius_config.add_options('-Xmx4g') else: logger.warning("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = os.path.join(path_here, 'sources/biopax/jars/paxtools.jar') cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast <commit_msg>Increase default maximum java heap size<commit_after>"""Handles all imports from jnius to prevent conflicts resulting from attempts to set JVM options while the VM is already running.""" from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import jnius_config logger = logging.getLogger('java_vm') def _has_xmx(options): for option in options: if option.startswith('-Xmx'): return True return False default_mem_limit = '8g' if not _has_xmx(jnius_config.get_options()): if not jnius_config.vm_running: jnius_config.add_options('-Xmx%s' % default_mem_limit) else: logger.warning("Couldn't set memory limit for Java VM because the VM " "is already running.") path_here = os.path.dirname(os.path.realpath(__file__)) cp = os.path.join(path_here, 'sources/biopax/jars/paxtools.jar') cp_existing = os.environ.get('CLASSPATH') if cp_existing is not None: os.environ['CLASSPATH'] = cp + ':' + cp_existing else: os.environ['CLASSPATH'] = cp from jnius import autoclass, JavaException, cast
f1f500488197a40a007af67c2c8dcc0aaef60dd3
src/project/word2vec_corpus.py
src/project/word2vec_corpus.py
import sys from os.path import isdir, isfile from corpus import Corpus from gensim import models class W2VCorpus(Corpus): def __init__(self, dir, dict_loc, vec_loc): Corpus.__init__(self, dir) # Todo: Tweak the default paramaters self.model = models.Word2Vec(self.docs.get_texts(), size=100, window=5, min_count=5, workers=4) return def similarity(self, word1, word2): return self.model.similarity(word1, word2) def main(): if len(sys.argv) > 2 and isdir(sys.argv[1]) and isfile(sys.argv[2]) and isfile(sys.argv[3]): corpus = W2VCorpus(sys.argv[1], sys.argv[2], sys.argv[3]) #print "Sim: column <-> row\t" + str(corpus.similarity("column", "row")) else: print "Corpus requires directory as an argument." if __name__ == "__main__": main()
import sys from os.path import isdir, isfile from corpus import Corpus from gensim import models class W2VCorpus(Corpus): def __init__(self, dict_loc, vec_loc, dir=None): Corpus.__init__(self, dir) self.dict_loc = dict_loc self.vec_loc = vec_loc self.model = None if dir: # Todo: Tweak the default paramaters self.model = models.Word2Vec(self.docs.get_texts(), size=100, window=5, min_count=5, workers=4) return def similarity(self, word1, word2): if self.model: return self.model.similarity(word1, word2) else: # Todo: Raise exception? return None def save(self, file): super(W2VCorpus, self).save(self.dict_loc, self.vec_loc) if self.model: self.model.save(file) def load(self, file): super(W2VCorpus, self).load(self.dict_loc, self.vec_loc) if self.model: self.model.load(file) def main(): w2v = "w2vcorpus.w2v" if len(sys.argv) > 2 and isdir(sys.argv[1]) and isfile(sys.argv[2]) and isfile(sys.argv[3]): if not isfile(w2v): corpus = W2VCorpus(sys.argv[2], sys.argv[3], sys.argv[1]) corpus.save(w2v) else: corpus = W2VCorpus(sys.argv[2], sys.argv[3]) corpus.load(w2v) print "Sim: velocity <-> speed\t" + str(corpus.similarity("velocity", "speed")) else: print "Corpus requires directory as an argument." if __name__ == "__main__": main()
Add functionality to load/save w2v model
Add functionality to load/save w2v model
Python
mit
PinPinIre/Final-Year-Project,PinPinIre/Final-Year-Project,PinPinIre/Final-Year-Project
import sys from os.path import isdir, isfile from corpus import Corpus from gensim import models class W2VCorpus(Corpus): def __init__(self, dir, dict_loc, vec_loc): Corpus.__init__(self, dir) # Todo: Tweak the default paramaters self.model = models.Word2Vec(self.docs.get_texts(), size=100, window=5, min_count=5, workers=4) return def similarity(self, word1, word2): return self.model.similarity(word1, word2) def main(): if len(sys.argv) > 2 and isdir(sys.argv[1]) and isfile(sys.argv[2]) and isfile(sys.argv[3]): corpus = W2VCorpus(sys.argv[1], sys.argv[2], sys.argv[3]) #print "Sim: column <-> row\t" + str(corpus.similarity("column", "row")) else: print "Corpus requires directory as an argument." if __name__ == "__main__": main()Add functionality to load/save w2v model
import sys from os.path import isdir, isfile from corpus import Corpus from gensim import models class W2VCorpus(Corpus): def __init__(self, dict_loc, vec_loc, dir=None): Corpus.__init__(self, dir) self.dict_loc = dict_loc self.vec_loc = vec_loc self.model = None if dir: # Todo: Tweak the default paramaters self.model = models.Word2Vec(self.docs.get_texts(), size=100, window=5, min_count=5, workers=4) return def similarity(self, word1, word2): if self.model: return self.model.similarity(word1, word2) else: # Todo: Raise exception? return None def save(self, file): super(W2VCorpus, self).save(self.dict_loc, self.vec_loc) if self.model: self.model.save(file) def load(self, file): super(W2VCorpus, self).load(self.dict_loc, self.vec_loc) if self.model: self.model.load(file) def main(): w2v = "w2vcorpus.w2v" if len(sys.argv) > 2 and isdir(sys.argv[1]) and isfile(sys.argv[2]) and isfile(sys.argv[3]): if not isfile(w2v): corpus = W2VCorpus(sys.argv[2], sys.argv[3], sys.argv[1]) corpus.save(w2v) else: corpus = W2VCorpus(sys.argv[2], sys.argv[3]) corpus.load(w2v) print "Sim: velocity <-> speed\t" + str(corpus.similarity("velocity", "speed")) else: print "Corpus requires directory as an argument." if __name__ == "__main__": main()
<commit_before>import sys from os.path import isdir, isfile from corpus import Corpus from gensim import models class W2VCorpus(Corpus): def __init__(self, dir, dict_loc, vec_loc): Corpus.__init__(self, dir) # Todo: Tweak the default paramaters self.model = models.Word2Vec(self.docs.get_texts(), size=100, window=5, min_count=5, workers=4) return def similarity(self, word1, word2): return self.model.similarity(word1, word2) def main(): if len(sys.argv) > 2 and isdir(sys.argv[1]) and isfile(sys.argv[2]) and isfile(sys.argv[3]): corpus = W2VCorpus(sys.argv[1], sys.argv[2], sys.argv[3]) #print "Sim: column <-> row\t" + str(corpus.similarity("column", "row")) else: print "Corpus requires directory as an argument." if __name__ == "__main__": main()<commit_msg>Add functionality to load/save w2v model<commit_after>
import sys from os.path import isdir, isfile from corpus import Corpus from gensim import models class W2VCorpus(Corpus): def __init__(self, dict_loc, vec_loc, dir=None): Corpus.__init__(self, dir) self.dict_loc = dict_loc self.vec_loc = vec_loc self.model = None if dir: # Todo: Tweak the default paramaters self.model = models.Word2Vec(self.docs.get_texts(), size=100, window=5, min_count=5, workers=4) return def similarity(self, word1, word2): if self.model: return self.model.similarity(word1, word2) else: # Todo: Raise exception? return None def save(self, file): super(W2VCorpus, self).save(self.dict_loc, self.vec_loc) if self.model: self.model.save(file) def load(self, file): super(W2VCorpus, self).load(self.dict_loc, self.vec_loc) if self.model: self.model.load(file) def main(): w2v = "w2vcorpus.w2v" if len(sys.argv) > 2 and isdir(sys.argv[1]) and isfile(sys.argv[2]) and isfile(sys.argv[3]): if not isfile(w2v): corpus = W2VCorpus(sys.argv[2], sys.argv[3], sys.argv[1]) corpus.save(w2v) else: corpus = W2VCorpus(sys.argv[2], sys.argv[3]) corpus.load(w2v) print "Sim: velocity <-> speed\t" + str(corpus.similarity("velocity", "speed")) else: print "Corpus requires directory as an argument." if __name__ == "__main__": main()
import sys from os.path import isdir, isfile from corpus import Corpus from gensim import models class W2VCorpus(Corpus): def __init__(self, dir, dict_loc, vec_loc): Corpus.__init__(self, dir) # Todo: Tweak the default paramaters self.model = models.Word2Vec(self.docs.get_texts(), size=100, window=5, min_count=5, workers=4) return def similarity(self, word1, word2): return self.model.similarity(word1, word2) def main(): if len(sys.argv) > 2 and isdir(sys.argv[1]) and isfile(sys.argv[2]) and isfile(sys.argv[3]): corpus = W2VCorpus(sys.argv[1], sys.argv[2], sys.argv[3]) #print "Sim: column <-> row\t" + str(corpus.similarity("column", "row")) else: print "Corpus requires directory as an argument." if __name__ == "__main__": main()Add functionality to load/save w2v modelimport sys from os.path import isdir, isfile from corpus import Corpus from gensim import models class W2VCorpus(Corpus): def __init__(self, dict_loc, vec_loc, dir=None): Corpus.__init__(self, dir) self.dict_loc = dict_loc self.vec_loc = vec_loc self.model = None if dir: # Todo: Tweak the default paramaters self.model = models.Word2Vec(self.docs.get_texts(), size=100, window=5, min_count=5, workers=4) return def similarity(self, word1, word2): if self.model: return self.model.similarity(word1, word2) else: # Todo: Raise exception? return None def save(self, file): super(W2VCorpus, self).save(self.dict_loc, self.vec_loc) if self.model: self.model.save(file) def load(self, file): super(W2VCorpus, self).load(self.dict_loc, self.vec_loc) if self.model: self.model.load(file) def main(): w2v = "w2vcorpus.w2v" if len(sys.argv) > 2 and isdir(sys.argv[1]) and isfile(sys.argv[2]) and isfile(sys.argv[3]): if not isfile(w2v): corpus = W2VCorpus(sys.argv[2], sys.argv[3], sys.argv[1]) corpus.save(w2v) else: corpus = W2VCorpus(sys.argv[2], sys.argv[3]) corpus.load(w2v) print "Sim: velocity <-> speed\t" + str(corpus.similarity("velocity", "speed")) else: print "Corpus requires directory as an argument." if __name__ == "__main__": main()
<commit_before>import sys from os.path import isdir, isfile from corpus import Corpus from gensim import models class W2VCorpus(Corpus): def __init__(self, dir, dict_loc, vec_loc): Corpus.__init__(self, dir) # Todo: Tweak the default paramaters self.model = models.Word2Vec(self.docs.get_texts(), size=100, window=5, min_count=5, workers=4) return def similarity(self, word1, word2): return self.model.similarity(word1, word2) def main(): if len(sys.argv) > 2 and isdir(sys.argv[1]) and isfile(sys.argv[2]) and isfile(sys.argv[3]): corpus = W2VCorpus(sys.argv[1], sys.argv[2], sys.argv[3]) #print "Sim: column <-> row\t" + str(corpus.similarity("column", "row")) else: print "Corpus requires directory as an argument." if __name__ == "__main__": main()<commit_msg>Add functionality to load/save w2v model<commit_after>import sys from os.path import isdir, isfile from corpus import Corpus from gensim import models class W2VCorpus(Corpus): def __init__(self, dict_loc, vec_loc, dir=None): Corpus.__init__(self, dir) self.dict_loc = dict_loc self.vec_loc = vec_loc self.model = None if dir: # Todo: Tweak the default paramaters self.model = models.Word2Vec(self.docs.get_texts(), size=100, window=5, min_count=5, workers=4) return def similarity(self, word1, word2): if self.model: return self.model.similarity(word1, word2) else: # Todo: Raise exception? return None def save(self, file): super(W2VCorpus, self).save(self.dict_loc, self.vec_loc) if self.model: self.model.save(file) def load(self, file): super(W2VCorpus, self).load(self.dict_loc, self.vec_loc) if self.model: self.model.load(file) def main(): w2v = "w2vcorpus.w2v" if len(sys.argv) > 2 and isdir(sys.argv[1]) and isfile(sys.argv[2]) and isfile(sys.argv[3]): if not isfile(w2v): corpus = W2VCorpus(sys.argv[2], sys.argv[3], sys.argv[1]) corpus.save(w2v) else: corpus = W2VCorpus(sys.argv[2], sys.argv[3]) corpus.load(w2v) print "Sim: velocity <-> speed\t" + str(corpus.similarity("velocity", "speed")) else: print "Corpus requires directory as an argument." if __name__ == "__main__": main()
c9c3a81187fa3fe21b08fca9b37c6a608e7b03b2
sweettooth/extensions/feeds.py
sweettooth/extensions/feeds.py
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from extensions.models import Extension class LatestExtensionsFeed(Feed): title = "Latest extensions in GNOME Shell Extensions" link = "/" description = "The latest extensions in GNOME Shell Extensions" def items(self): return Extension.objects.visible().order_by('pk')[-10:] def item_title(self, item): return item.name def item_description(self, item): return item.description def item_link(self, item): return reverse('extensions-detail', kwargs=dict(pk=item.pk, slug=item.slug))
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from extensions.models import Extension class LatestExtensionsFeed(Feed): title = "Latest extensions in GNOME Shell Extensions" link = "/" description = "The latest extensions in GNOME Shell Extensions" def items(self): return Extension.objects.visible().order_by('-pk')[:10] def item_title(self, item): return item.name def item_description(self, item): return item.description def item_link(self, item): return reverse('extensions-detail', kwargs=dict(pk=item.pk, slug=item.slug))
Fix RSS feed some more.
Fix RSS feed some more.
Python
agpl-3.0
GNOME/extensions-web,GNOME/extensions-web,GNOME/extensions-web,GNOME/extensions-web,magcius/sweettooth,magcius/sweettooth
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from extensions.models import Extension class LatestExtensionsFeed(Feed): title = "Latest extensions in GNOME Shell Extensions" link = "/" description = "The latest extensions in GNOME Shell Extensions" def items(self): return Extension.objects.visible().order_by('pk')[-10:] def item_title(self, item): return item.name def item_description(self, item): return item.description def item_link(self, item): return reverse('extensions-detail', kwargs=dict(pk=item.pk, slug=item.slug)) Fix RSS feed some more.
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from extensions.models import Extension class LatestExtensionsFeed(Feed): title = "Latest extensions in GNOME Shell Extensions" link = "/" description = "The latest extensions in GNOME Shell Extensions" def items(self): return Extension.objects.visible().order_by('-pk')[:10] def item_title(self, item): return item.name def item_description(self, item): return item.description def item_link(self, item): return reverse('extensions-detail', kwargs=dict(pk=item.pk, slug=item.slug))
<commit_before> from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from extensions.models import Extension class LatestExtensionsFeed(Feed): title = "Latest extensions in GNOME Shell Extensions" link = "/" description = "The latest extensions in GNOME Shell Extensions" def items(self): return Extension.objects.visible().order_by('pk')[-10:] def item_title(self, item): return item.name def item_description(self, item): return item.description def item_link(self, item): return reverse('extensions-detail', kwargs=dict(pk=item.pk, slug=item.slug)) <commit_msg>Fix RSS feed some more.<commit_after>
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from extensions.models import Extension class LatestExtensionsFeed(Feed): title = "Latest extensions in GNOME Shell Extensions" link = "/" description = "The latest extensions in GNOME Shell Extensions" def items(self): return Extension.objects.visible().order_by('-pk')[:10] def item_title(self, item): return item.name def item_description(self, item): return item.description def item_link(self, item): return reverse('extensions-detail', kwargs=dict(pk=item.pk, slug=item.slug))
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from extensions.models import Extension class LatestExtensionsFeed(Feed): title = "Latest extensions in GNOME Shell Extensions" link = "/" description = "The latest extensions in GNOME Shell Extensions" def items(self): return Extension.objects.visible().order_by('pk')[-10:] def item_title(self, item): return item.name def item_description(self, item): return item.description def item_link(self, item): return reverse('extensions-detail', kwargs=dict(pk=item.pk, slug=item.slug)) Fix RSS feed some more. from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from extensions.models import Extension class LatestExtensionsFeed(Feed): title = "Latest extensions in GNOME Shell Extensions" link = "/" description = "The latest extensions in GNOME Shell Extensions" def items(self): return Extension.objects.visible().order_by('-pk')[:10] def item_title(self, item): return item.name def item_description(self, item): return item.description def item_link(self, item): return reverse('extensions-detail', kwargs=dict(pk=item.pk, slug=item.slug))
<commit_before> from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from extensions.models import Extension class LatestExtensionsFeed(Feed): title = "Latest extensions in GNOME Shell Extensions" link = "/" description = "The latest extensions in GNOME Shell Extensions" def items(self): return Extension.objects.visible().order_by('pk')[-10:] def item_title(self, item): return item.name def item_description(self, item): return item.description def item_link(self, item): return reverse('extensions-detail', kwargs=dict(pk=item.pk, slug=item.slug)) <commit_msg>Fix RSS feed some more.<commit_after> from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse from extensions.models import Extension class LatestExtensionsFeed(Feed): title = "Latest extensions in GNOME Shell Extensions" link = "/" description = "The latest extensions in GNOME Shell Extensions" def items(self): return Extension.objects.visible().order_by('-pk')[:10] def item_title(self, item): return item.name def item_description(self, item): return item.description def item_link(self, item): return reverse('extensions-detail', kwargs=dict(pk=item.pk, slug=item.slug))
87c5f39d5cb072a778bb145e6e5fc49c8d4b350d
core/urls.py
core/urls.py
from django.conf.urls import url from django.views.generic import RedirectView from django.core.urlresolvers import reverse_lazy from .views import AppView, reports_export # Instead of using a wildcard for our app views we insert them one at a time # for naming purposes. This is so that users can change urls if they want and # the change will propagate throughout the site. urlpatterns = [ url(r'^reports/export/$', reports_export, name='reports-export'), url(r'^timesheet/$', AppView.as_view(), name='timesheet'), url(r'^clients/$', AppView.as_view(), name='clients'), url(r'^tasks/$', AppView.as_view(), name='tasks'), url(r'^reports/$', AppView.as_view(), name='reports'), url(r'^invoices/$', AppView.as_view(), name='invoices'), url( r'^$', RedirectView.as_view(url=reverse_lazy('timesheet'), permanent=False), name='dashboard' ), ]
from django.conf.urls import url from django.views.generic import RedirectView from django.core.urlresolvers import reverse_lazy from .views import AppView, reports_export # Instead of using a wildcard for our app views we insert them one at a time # for naming purposes. This is so that users can change urls if they want and # the change will propagate throughout the site. urlpatterns = [ url(r'^reports/export/$', reports_export, name='reports-export'), url(r'^timesheet/$', AppView.as_view(), name='timesheet'), url(r'^clients/$', AppView.as_view(), name='clients'), url(r'^tasks/$', AppView.as_view(), name='tasks'), url(r'^reports/$', AppView.as_view(), name='reports'), url(r'^invoices/$', AppView.as_view(), name='invoices'), url(r'^invoices/([0-9]+)/$', AppView.as_view(), name='invoices'), url( r'^$', RedirectView.as_view(url=reverse_lazy('timesheet'), permanent=False), name='dashboard' ), ]
Add ability to go to individual invoice page
Add ability to go to individual invoice page
Python
bsd-2-clause
cdubz/timestrap,overshard/timestrap,overshard/timestrap,cdubz/timestrap,cdubz/timestrap,overshard/timestrap
from django.conf.urls import url from django.views.generic import RedirectView from django.core.urlresolvers import reverse_lazy from .views import AppView, reports_export # Instead of using a wildcard for our app views we insert them one at a time # for naming purposes. This is so that users can change urls if they want and # the change will propagate throughout the site. urlpatterns = [ url(r'^reports/export/$', reports_export, name='reports-export'), url(r'^timesheet/$', AppView.as_view(), name='timesheet'), url(r'^clients/$', AppView.as_view(), name='clients'), url(r'^tasks/$', AppView.as_view(), name='tasks'), url(r'^reports/$', AppView.as_view(), name='reports'), url(r'^invoices/$', AppView.as_view(), name='invoices'), url( r'^$', RedirectView.as_view(url=reverse_lazy('timesheet'), permanent=False), name='dashboard' ), ] Add ability to go to individual invoice page
from django.conf.urls import url from django.views.generic import RedirectView from django.core.urlresolvers import reverse_lazy from .views import AppView, reports_export # Instead of using a wildcard for our app views we insert them one at a time # for naming purposes. This is so that users can change urls if they want and # the change will propagate throughout the site. urlpatterns = [ url(r'^reports/export/$', reports_export, name='reports-export'), url(r'^timesheet/$', AppView.as_view(), name='timesheet'), url(r'^clients/$', AppView.as_view(), name='clients'), url(r'^tasks/$', AppView.as_view(), name='tasks'), url(r'^reports/$', AppView.as_view(), name='reports'), url(r'^invoices/$', AppView.as_view(), name='invoices'), url(r'^invoices/([0-9]+)/$', AppView.as_view(), name='invoices'), url( r'^$', RedirectView.as_view(url=reverse_lazy('timesheet'), permanent=False), name='dashboard' ), ]
<commit_before>from django.conf.urls import url from django.views.generic import RedirectView from django.core.urlresolvers import reverse_lazy from .views import AppView, reports_export # Instead of using a wildcard for our app views we insert them one at a time # for naming purposes. This is so that users can change urls if they want and # the change will propagate throughout the site. urlpatterns = [ url(r'^reports/export/$', reports_export, name='reports-export'), url(r'^timesheet/$', AppView.as_view(), name='timesheet'), url(r'^clients/$', AppView.as_view(), name='clients'), url(r'^tasks/$', AppView.as_view(), name='tasks'), url(r'^reports/$', AppView.as_view(), name='reports'), url(r'^invoices/$', AppView.as_view(), name='invoices'), url( r'^$', RedirectView.as_view(url=reverse_lazy('timesheet'), permanent=False), name='dashboard' ), ] <commit_msg>Add ability to go to individual invoice page<commit_after>
from django.conf.urls import url from django.views.generic import RedirectView from django.core.urlresolvers import reverse_lazy from .views import AppView, reports_export # Instead of using a wildcard for our app views we insert them one at a time # for naming purposes. This is so that users can change urls if they want and # the change will propagate throughout the site. urlpatterns = [ url(r'^reports/export/$', reports_export, name='reports-export'), url(r'^timesheet/$', AppView.as_view(), name='timesheet'), url(r'^clients/$', AppView.as_view(), name='clients'), url(r'^tasks/$', AppView.as_view(), name='tasks'), url(r'^reports/$', AppView.as_view(), name='reports'), url(r'^invoices/$', AppView.as_view(), name='invoices'), url(r'^invoices/([0-9]+)/$', AppView.as_view(), name='invoices'), url( r'^$', RedirectView.as_view(url=reverse_lazy('timesheet'), permanent=False), name='dashboard' ), ]
from django.conf.urls import url from django.views.generic import RedirectView from django.core.urlresolvers import reverse_lazy from .views import AppView, reports_export # Instead of using a wildcard for our app views we insert them one at a time # for naming purposes. This is so that users can change urls if they want and # the change will propagate throughout the site. urlpatterns = [ url(r'^reports/export/$', reports_export, name='reports-export'), url(r'^timesheet/$', AppView.as_view(), name='timesheet'), url(r'^clients/$', AppView.as_view(), name='clients'), url(r'^tasks/$', AppView.as_view(), name='tasks'), url(r'^reports/$', AppView.as_view(), name='reports'), url(r'^invoices/$', AppView.as_view(), name='invoices'), url( r'^$', RedirectView.as_view(url=reverse_lazy('timesheet'), permanent=False), name='dashboard' ), ] Add ability to go to individual invoice pagefrom django.conf.urls import url from django.views.generic import RedirectView from django.core.urlresolvers import reverse_lazy from .views import AppView, reports_export # Instead of using a wildcard for our app views we insert them one at a time # for naming purposes. This is so that users can change urls if they want and # the change will propagate throughout the site. urlpatterns = [ url(r'^reports/export/$', reports_export, name='reports-export'), url(r'^timesheet/$', AppView.as_view(), name='timesheet'), url(r'^clients/$', AppView.as_view(), name='clients'), url(r'^tasks/$', AppView.as_view(), name='tasks'), url(r'^reports/$', AppView.as_view(), name='reports'), url(r'^invoices/$', AppView.as_view(), name='invoices'), url(r'^invoices/([0-9]+)/$', AppView.as_view(), name='invoices'), url( r'^$', RedirectView.as_view(url=reverse_lazy('timesheet'), permanent=False), name='dashboard' ), ]
<commit_before>from django.conf.urls import url from django.views.generic import RedirectView from django.core.urlresolvers import reverse_lazy from .views import AppView, reports_export # Instead of using a wildcard for our app views we insert them one at a time # for naming purposes. This is so that users can change urls if they want and # the change will propagate throughout the site. urlpatterns = [ url(r'^reports/export/$', reports_export, name='reports-export'), url(r'^timesheet/$', AppView.as_view(), name='timesheet'), url(r'^clients/$', AppView.as_view(), name='clients'), url(r'^tasks/$', AppView.as_view(), name='tasks'), url(r'^reports/$', AppView.as_view(), name='reports'), url(r'^invoices/$', AppView.as_view(), name='invoices'), url( r'^$', RedirectView.as_view(url=reverse_lazy('timesheet'), permanent=False), name='dashboard' ), ] <commit_msg>Add ability to go to individual invoice page<commit_after>from django.conf.urls import url from django.views.generic import RedirectView from django.core.urlresolvers import reverse_lazy from .views import AppView, reports_export # Instead of using a wildcard for our app views we insert them one at a time # for naming purposes. This is so that users can change urls if they want and # the change will propagate throughout the site. urlpatterns = [ url(r'^reports/export/$', reports_export, name='reports-export'), url(r'^timesheet/$', AppView.as_view(), name='timesheet'), url(r'^clients/$', AppView.as_view(), name='clients'), url(r'^tasks/$', AppView.as_view(), name='tasks'), url(r'^reports/$', AppView.as_view(), name='reports'), url(r'^invoices/$', AppView.as_view(), name='invoices'), url(r'^invoices/([0-9]+)/$', AppView.as_view(), name='invoices'), url( r'^$', RedirectView.as_view(url=reverse_lazy('timesheet'), permanent=False), name='dashboard' ), ]
3b7e9d42db8ba0f4f3330544d3789427e7e3858c
python/04-1.py
python/04-1.py
#!/usr/bin/env python import hashlib prefix = '' number = 1 with open('../inputs/04.txt') as f: prefix = f.readlines() prefix = prefix[0].rstrip() while True: md5 = hashlib.md5() md5.update('{0}{1}'.format(prefix, number)) if md5.hexdigest()[:5] == '00000': #print md5.hexdigest() print number break number += 1
#!/usr/bin/env python import hashlib prefix = '' number = 1 with open('../inputs/04.txt') as f: prefix = f.readlines() prefix = prefix[0].rstrip() md5 = hashlib.md5() md5.update(prefix) while True: m = md5.copy() m.update(str(number)) if m.hexdigest()[:5] == '00000': print number break number += 1
Use md5.copy() to be more efficient.
Use md5.copy() to be more efficient. The hash.copy() documentation says this is more efficient given a common initial substring.
Python
mit
opello/adventofcode
#!/usr/bin/env python import hashlib prefix = '' number = 1 with open('../inputs/04.txt') as f: prefix = f.readlines() prefix = prefix[0].rstrip() while True: md5 = hashlib.md5() md5.update('{0}{1}'.format(prefix, number)) if md5.hexdigest()[:5] == '00000': #print md5.hexdigest() print number break number += 1 Use md5.copy() to be more efficient. The hash.copy() documentation says this is more efficient given a common initial substring.
#!/usr/bin/env python import hashlib prefix = '' number = 1 with open('../inputs/04.txt') as f: prefix = f.readlines() prefix = prefix[0].rstrip() md5 = hashlib.md5() md5.update(prefix) while True: m = md5.copy() m.update(str(number)) if m.hexdigest()[:5] == '00000': print number break number += 1
<commit_before>#!/usr/bin/env python import hashlib prefix = '' number = 1 with open('../inputs/04.txt') as f: prefix = f.readlines() prefix = prefix[0].rstrip() while True: md5 = hashlib.md5() md5.update('{0}{1}'.format(prefix, number)) if md5.hexdigest()[:5] == '00000': #print md5.hexdigest() print number break number += 1 <commit_msg>Use md5.copy() to be more efficient. The hash.copy() documentation says this is more efficient given a common initial substring.<commit_after>
#!/usr/bin/env python import hashlib prefix = '' number = 1 with open('../inputs/04.txt') as f: prefix = f.readlines() prefix = prefix[0].rstrip() md5 = hashlib.md5() md5.update(prefix) while True: m = md5.copy() m.update(str(number)) if m.hexdigest()[:5] == '00000': print number break number += 1
#!/usr/bin/env python import hashlib prefix = '' number = 1 with open('../inputs/04.txt') as f: prefix = f.readlines() prefix = prefix[0].rstrip() while True: md5 = hashlib.md5() md5.update('{0}{1}'.format(prefix, number)) if md5.hexdigest()[:5] == '00000': #print md5.hexdigest() print number break number += 1 Use md5.copy() to be more efficient. The hash.copy() documentation says this is more efficient given a common initial substring.#!/usr/bin/env python import hashlib prefix = '' number = 1 with open('../inputs/04.txt') as f: prefix = f.readlines() prefix = prefix[0].rstrip() md5 = hashlib.md5() md5.update(prefix) while True: m = md5.copy() m.update(str(number)) if m.hexdigest()[:5] == '00000': print number break number += 1
<commit_before>#!/usr/bin/env python import hashlib prefix = '' number = 1 with open('../inputs/04.txt') as f: prefix = f.readlines() prefix = prefix[0].rstrip() while True: md5 = hashlib.md5() md5.update('{0}{1}'.format(prefix, number)) if md5.hexdigest()[:5] == '00000': #print md5.hexdigest() print number break number += 1 <commit_msg>Use md5.copy() to be more efficient. The hash.copy() documentation says this is more efficient given a common initial substring.<commit_after>#!/usr/bin/env python import hashlib prefix = '' number = 1 with open('../inputs/04.txt') as f: prefix = f.readlines() prefix = prefix[0].rstrip() md5 = hashlib.md5() md5.update(prefix) while True: m = md5.copy() m.update(str(number)) if m.hexdigest()[:5] == '00000': print number break number += 1
b179423a7678aef0a8e286977055b25b9b0aac99
plugin/main.py
plugin/main.py
#!/usr/bin/env python """ Deploy builds to a Rancher orchestrated stack using rancher-compose """ import os import drone import subprocess def main(): """The main entrypoint for the plugin.""" payload = drone.plugin.get_input() vargs = payload["vargs"] # Change directory to deploy path deploy_path = payload["workspace"]["path"] os.chdir(deploy_path) # Optional fields compose_file = vargs.get('compose_file', 'docker-compose.yml') stack = vargs.get('stack', payload['repo']['name']) services = vargs.get('services', '') # Set Required fields for rancher-compose to work # Should raise an error if they are not declared os.environ["RANCHER_URL"] = vargs['url'] os.environ["RANCHER_ACCESS_KEY"] = vargs['access_key'] os.environ["RANCHER_SECRET_KEY"] = vargs['secret_key'] try: rc_args = [ "rancher-compose", "-f", compose_file, "-p", stack, "up", "-d", ] if services: rc_args.append(services) subprocess.check_call(rc_args) finally: # Unset environmental variables, no point in them hanging about del os.environ['RANCHER_URL'] del os.environ['RANCHER_ACCESS_KEY'] del os.environ['RANCHER_SECRET_KEY'] if __name__ == "__main__": main()
#!/usr/bin/env python """ Deploy builds to a Rancher orchestrated stack using rancher-compose """ import os import drone import subprocess def main(): """The main entrypoint for the plugin.""" payload = drone.plugin.get_input() vargs = payload["vargs"] # Change directory to deploy path deploy_path = payload["workspace"]["path"] os.chdir(deploy_path) # Optional fields compose_file = vargs.get('compose_file', 'docker-compose.yml') stack = vargs.get('stack', payload['repo']['name']) services = vargs.get('services', '') # Set Required fields for rancher-compose to work # Should raise an error if they are not declared os.environ["RANCHER_URL"] = vargs['url'] os.environ["RANCHER_ACCESS_KEY"] = vargs['access_key'] os.environ["RANCHER_SECRET_KEY"] = vargs['secret_key'] try: rancher_compose_command = [ "rancher-compose", "-f", compose_file, "-p", stack, "up", "-d", "--force-upgrade", ] if services: rancher_compose_command.append(services) print(' '.join(rancher_compose_command) subprocess.check_call(rancher_compose_command) finally: # Unset environmental variables, no point in them hanging about del os.environ['RANCHER_URL'] del os.environ['RANCHER_ACCESS_KEY'] del os.environ['RANCHER_SECRET_KEY'] if __name__ == "__main__": main()
Print rancher-compose command to help debug/confirmation
Print rancher-compose command to help debug/confirmation
Python
apache-2.0
dangerfarms/drone-rancher
#!/usr/bin/env python """ Deploy builds to a Rancher orchestrated stack using rancher-compose """ import os import drone import subprocess def main(): """The main entrypoint for the plugin.""" payload = drone.plugin.get_input() vargs = payload["vargs"] # Change directory to deploy path deploy_path = payload["workspace"]["path"] os.chdir(deploy_path) # Optional fields compose_file = vargs.get('compose_file', 'docker-compose.yml') stack = vargs.get('stack', payload['repo']['name']) services = vargs.get('services', '') # Set Required fields for rancher-compose to work # Should raise an error if they are not declared os.environ["RANCHER_URL"] = vargs['url'] os.environ["RANCHER_ACCESS_KEY"] = vargs['access_key'] os.environ["RANCHER_SECRET_KEY"] = vargs['secret_key'] try: rc_args = [ "rancher-compose", "-f", compose_file, "-p", stack, "up", "-d", ] if services: rc_args.append(services) subprocess.check_call(rc_args) finally: # Unset environmental variables, no point in them hanging about del os.environ['RANCHER_URL'] del os.environ['RANCHER_ACCESS_KEY'] del os.environ['RANCHER_SECRET_KEY'] if __name__ == "__main__": main() Print rancher-compose command to help debug/confirmation
#!/usr/bin/env python """ Deploy builds to a Rancher orchestrated stack using rancher-compose """ import os import drone import subprocess def main(): """The main entrypoint for the plugin.""" payload = drone.plugin.get_input() vargs = payload["vargs"] # Change directory to deploy path deploy_path = payload["workspace"]["path"] os.chdir(deploy_path) # Optional fields compose_file = vargs.get('compose_file', 'docker-compose.yml') stack = vargs.get('stack', payload['repo']['name']) services = vargs.get('services', '') # Set Required fields for rancher-compose to work # Should raise an error if they are not declared os.environ["RANCHER_URL"] = vargs['url'] os.environ["RANCHER_ACCESS_KEY"] = vargs['access_key'] os.environ["RANCHER_SECRET_KEY"] = vargs['secret_key'] try: rancher_compose_command = [ "rancher-compose", "-f", compose_file, "-p", stack, "up", "-d", "--force-upgrade", ] if services: rancher_compose_command.append(services) print(' '.join(rancher_compose_command) subprocess.check_call(rancher_compose_command) finally: # Unset environmental variables, no point in them hanging about del os.environ['RANCHER_URL'] del os.environ['RANCHER_ACCESS_KEY'] del os.environ['RANCHER_SECRET_KEY'] if __name__ == "__main__": main()
<commit_before>#!/usr/bin/env python """ Deploy builds to a Rancher orchestrated stack using rancher-compose """ import os import drone import subprocess def main(): """The main entrypoint for the plugin.""" payload = drone.plugin.get_input() vargs = payload["vargs"] # Change directory to deploy path deploy_path = payload["workspace"]["path"] os.chdir(deploy_path) # Optional fields compose_file = vargs.get('compose_file', 'docker-compose.yml') stack = vargs.get('stack', payload['repo']['name']) services = vargs.get('services', '') # Set Required fields for rancher-compose to work # Should raise an error if they are not declared os.environ["RANCHER_URL"] = vargs['url'] os.environ["RANCHER_ACCESS_KEY"] = vargs['access_key'] os.environ["RANCHER_SECRET_KEY"] = vargs['secret_key'] try: rc_args = [ "rancher-compose", "-f", compose_file, "-p", stack, "up", "-d", ] if services: rc_args.append(services) subprocess.check_call(rc_args) finally: # Unset environmental variables, no point in them hanging about del os.environ['RANCHER_URL'] del os.environ['RANCHER_ACCESS_KEY'] del os.environ['RANCHER_SECRET_KEY'] if __name__ == "__main__": main() <commit_msg>Print rancher-compose command to help debug/confirmation<commit_after>
#!/usr/bin/env python """ Deploy builds to a Rancher orchestrated stack using rancher-compose """ import os import drone import subprocess def main(): """The main entrypoint for the plugin.""" payload = drone.plugin.get_input() vargs = payload["vargs"] # Change directory to deploy path deploy_path = payload["workspace"]["path"] os.chdir(deploy_path) # Optional fields compose_file = vargs.get('compose_file', 'docker-compose.yml') stack = vargs.get('stack', payload['repo']['name']) services = vargs.get('services', '') # Set Required fields for rancher-compose to work # Should raise an error if they are not declared os.environ["RANCHER_URL"] = vargs['url'] os.environ["RANCHER_ACCESS_KEY"] = vargs['access_key'] os.environ["RANCHER_SECRET_KEY"] = vargs['secret_key'] try: rancher_compose_command = [ "rancher-compose", "-f", compose_file, "-p", stack, "up", "-d", "--force-upgrade", ] if services: rancher_compose_command.append(services) print(' '.join(rancher_compose_command) subprocess.check_call(rancher_compose_command) finally: # Unset environmental variables, no point in them hanging about del os.environ['RANCHER_URL'] del os.environ['RANCHER_ACCESS_KEY'] del os.environ['RANCHER_SECRET_KEY'] if __name__ == "__main__": main()
#!/usr/bin/env python """ Deploy builds to a Rancher orchestrated stack using rancher-compose """ import os import drone import subprocess def main(): """The main entrypoint for the plugin.""" payload = drone.plugin.get_input() vargs = payload["vargs"] # Change directory to deploy path deploy_path = payload["workspace"]["path"] os.chdir(deploy_path) # Optional fields compose_file = vargs.get('compose_file', 'docker-compose.yml') stack = vargs.get('stack', payload['repo']['name']) services = vargs.get('services', '') # Set Required fields for rancher-compose to work # Should raise an error if they are not declared os.environ["RANCHER_URL"] = vargs['url'] os.environ["RANCHER_ACCESS_KEY"] = vargs['access_key'] os.environ["RANCHER_SECRET_KEY"] = vargs['secret_key'] try: rc_args = [ "rancher-compose", "-f", compose_file, "-p", stack, "up", "-d", ] if services: rc_args.append(services) subprocess.check_call(rc_args) finally: # Unset environmental variables, no point in them hanging about del os.environ['RANCHER_URL'] del os.environ['RANCHER_ACCESS_KEY'] del os.environ['RANCHER_SECRET_KEY'] if __name__ == "__main__": main() Print rancher-compose command to help debug/confirmation#!/usr/bin/env python """ Deploy builds to a Rancher orchestrated stack using rancher-compose """ import os import drone import subprocess def main(): """The main entrypoint for the plugin.""" payload = drone.plugin.get_input() vargs = payload["vargs"] # Change directory to deploy path deploy_path = payload["workspace"]["path"] os.chdir(deploy_path) # Optional fields compose_file = vargs.get('compose_file', 'docker-compose.yml') stack = vargs.get('stack', payload['repo']['name']) services = vargs.get('services', '') # Set Required fields for rancher-compose to work # Should raise an error if they are not declared os.environ["RANCHER_URL"] = vargs['url'] os.environ["RANCHER_ACCESS_KEY"] = vargs['access_key'] os.environ["RANCHER_SECRET_KEY"] = vargs['secret_key'] try: rancher_compose_command = [ "rancher-compose", "-f", compose_file, "-p", stack, "up", "-d", "--force-upgrade", ] if services: rancher_compose_command.append(services) print(' '.join(rancher_compose_command) subprocess.check_call(rancher_compose_command) finally: # Unset environmental variables, no point in them hanging about del os.environ['RANCHER_URL'] del os.environ['RANCHER_ACCESS_KEY'] del os.environ['RANCHER_SECRET_KEY'] if __name__ == "__main__": main()
<commit_before>#!/usr/bin/env python """ Deploy builds to a Rancher orchestrated stack using rancher-compose """ import os import drone import subprocess def main(): """The main entrypoint for the plugin.""" payload = drone.plugin.get_input() vargs = payload["vargs"] # Change directory to deploy path deploy_path = payload["workspace"]["path"] os.chdir(deploy_path) # Optional fields compose_file = vargs.get('compose_file', 'docker-compose.yml') stack = vargs.get('stack', payload['repo']['name']) services = vargs.get('services', '') # Set Required fields for rancher-compose to work # Should raise an error if they are not declared os.environ["RANCHER_URL"] = vargs['url'] os.environ["RANCHER_ACCESS_KEY"] = vargs['access_key'] os.environ["RANCHER_SECRET_KEY"] = vargs['secret_key'] try: rc_args = [ "rancher-compose", "-f", compose_file, "-p", stack, "up", "-d", ] if services: rc_args.append(services) subprocess.check_call(rc_args) finally: # Unset environmental variables, no point in them hanging about del os.environ['RANCHER_URL'] del os.environ['RANCHER_ACCESS_KEY'] del os.environ['RANCHER_SECRET_KEY'] if __name__ == "__main__": main() <commit_msg>Print rancher-compose command to help debug/confirmation<commit_after>#!/usr/bin/env python """ Deploy builds to a Rancher orchestrated stack using rancher-compose """ import os import drone import subprocess def main(): """The main entrypoint for the plugin.""" payload = drone.plugin.get_input() vargs = payload["vargs"] # Change directory to deploy path deploy_path = payload["workspace"]["path"] os.chdir(deploy_path) # Optional fields compose_file = vargs.get('compose_file', 'docker-compose.yml') stack = vargs.get('stack', payload['repo']['name']) services = vargs.get('services', '') # Set Required fields for rancher-compose to work # Should raise an error if they are not declared os.environ["RANCHER_URL"] = vargs['url'] os.environ["RANCHER_ACCESS_KEY"] = vargs['access_key'] os.environ["RANCHER_SECRET_KEY"] = vargs['secret_key'] try: rancher_compose_command = [ "rancher-compose", "-f", compose_file, "-p", stack, "up", "-d", "--force-upgrade", ] if services: rancher_compose_command.append(services) print(' '.join(rancher_compose_command) subprocess.check_call(rancher_compose_command) finally: # Unset environmental variables, no point in them hanging about del os.environ['RANCHER_URL'] del os.environ['RANCHER_ACCESS_KEY'] del os.environ['RANCHER_SECRET_KEY'] if __name__ == "__main__": main()
39ed9fecd03f837c1ca7436b4695734b1602a356
create_sample.py
create_sample.py
# importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv", index = False) # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv", index = False)
# importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv") # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv")
Remove index argument while creating order products sample
fix: Remove index argument while creating order products sample
Python
mit
rjegankumar/instacart_prediction_model
# importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv", index = False) # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv", index = False) fix: Remove index argument while creating order products sample
# importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv") # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv")
<commit_before># importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv", index = False) # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv", index = False) <commit_msg>fix: Remove index argument while creating order products sample<commit_after>
# importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv") # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv")
# importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv", index = False) # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv", index = False) fix: Remove index argument while creating order products sample# importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv") # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv")
<commit_before># importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv", index = False) # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv", index = False) <commit_msg>fix: Remove index argument while creating order products sample<commit_after># importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="prior"].index), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(list(orders_df[orders_df["eval_set"]=="train"].index), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv") # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv")
6eed42d2abbc458b7df4d06faf55a70e33404030
docs/conf.py
docs/conf.py
# -*- coding: utf-8 -*- AUTHOR = u'Adrian Sampson' # General configuration extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks'] exclude_patterns = ['_build'] source_suffix = '.rst' master_doc = 'index' project = u'beets' copyright = u'2012, Adrian Sampson' version = '1.3' release = '1.3.17' pygments_style = 'sphinx' # External links to the bug tracker. extlinks = { 'bug': ('https://github.com/beetbox/beets/issues/%s', '#'), 'user': ('https://github.com/%s', ''), } # Options for HTML output htmlhelp_basename = 'beetsdoc' # Options for LaTeX output latex_documents = [ ('index', 'beets.tex', u'beets Documentation', AUTHOR, 'manual'), ] # Options for manual page output man_pages = [ ('reference/cli', 'beet', u'music tagger and library organizer', [AUTHOR], 1), ('reference/config', 'beetsconfig', u'beets configuration file', [AUTHOR], 5), ]
# -*- coding: utf-8 -*- AUTHOR = u'Adrian Sampson' # General configuration extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks'] exclude_patterns = ['_build'] source_suffix = '.rst' master_doc = 'index' project = u'beets' copyright = u'2016, Adrian Sampson' version = '1.3' release = '1.3.17' pygments_style = 'sphinx' # External links to the bug tracker. extlinks = { 'bug': ('https://github.com/beetbox/beets/issues/%s', '#'), 'user': ('https://github.com/%s', ''), } # Options for HTML output htmlhelp_basename = 'beetsdoc' # Options for LaTeX output latex_documents = [ ('index', 'beets.tex', u'beets Documentation', AUTHOR, 'manual'), ] # Options for manual page output man_pages = [ ('reference/cli', 'beet', u'music tagger and library organizer', [AUTHOR], 1), ('reference/config', 'beetsconfig', u'beets configuration file', [AUTHOR], 5), ]
Update documentation copyright year to 2016
Update documentation copyright year to 2016
Python
mit
mried/beets,SusannaMaria/beets,Freso/beets,madmouser1/beets,Freso/beets,ibmibmibm/beets,MyTunesFreeMusic/privacy-policy,jcoady9/beets,parapente/beets,ibmibmibm/beets,parapente/beets,beetbox/beets,Freso/beets,madmouser1/beets,SusannaMaria/beets,MyTunesFreeMusic/privacy-policy,beetbox/beets,mried/beets,xsteadfastx/beets,pkess/beets,jcoady9/beets,shamangeorge/beets,SusannaMaria/beets,MyTunesFreeMusic/privacy-policy,xsteadfastx/beets,Kraymer/beets,swt30/beets,SusannaMaria/beets,mosesfistos1/beetbox,swt30/beets,swt30/beets,beetbox/beets,madmouser1/beets,artemutin/beets,parapente/beets,lengtche/beets,mosesfistos1/beetbox,mried/beets,jackwilsdon/beets,xsteadfastx/beets,mried/beets,ibmibmibm/beets,artemutin/beets,jackwilsdon/beets,jcoady9/beets,artemutin/beets,lengtche/beets,sampsyo/beets,diego-plan9/beets,artemutin/beets,lengtche/beets,diego-plan9/beets,pkess/beets,shamangeorge/beets,xsteadfastx/beets,jackwilsdon/beets,madmouser1/beets,MyTunesFreeMusic/privacy-policy,pkess/beets,parapente/beets,ibmibmibm/beets,Kraymer/beets,mosesfistos1/beetbox,jackwilsdon/beets,Kraymer/beets,sampsyo/beets,diego-plan9/beets,swt30/beets,Kraymer/beets,beetbox/beets,shamangeorge/beets,sampsyo/beets,sampsyo/beets,diego-plan9/beets,jcoady9/beets,lengtche/beets,pkess/beets,Freso/beets,shamangeorge/beets,mosesfistos1/beetbox
# -*- coding: utf-8 -*- AUTHOR = u'Adrian Sampson' # General configuration extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks'] exclude_patterns = ['_build'] source_suffix = '.rst' master_doc = 'index' project = u'beets' copyright = u'2012, Adrian Sampson' version = '1.3' release = '1.3.17' pygments_style = 'sphinx' # External links to the bug tracker. extlinks = { 'bug': ('https://github.com/beetbox/beets/issues/%s', '#'), 'user': ('https://github.com/%s', ''), } # Options for HTML output htmlhelp_basename = 'beetsdoc' # Options for LaTeX output latex_documents = [ ('index', 'beets.tex', u'beets Documentation', AUTHOR, 'manual'), ] # Options for manual page output man_pages = [ ('reference/cli', 'beet', u'music tagger and library organizer', [AUTHOR], 1), ('reference/config', 'beetsconfig', u'beets configuration file', [AUTHOR], 5), ] Update documentation copyright year to 2016
# -*- coding: utf-8 -*- AUTHOR = u'Adrian Sampson' # General configuration extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks'] exclude_patterns = ['_build'] source_suffix = '.rst' master_doc = 'index' project = u'beets' copyright = u'2016, Adrian Sampson' version = '1.3' release = '1.3.17' pygments_style = 'sphinx' # External links to the bug tracker. extlinks = { 'bug': ('https://github.com/beetbox/beets/issues/%s', '#'), 'user': ('https://github.com/%s', ''), } # Options for HTML output htmlhelp_basename = 'beetsdoc' # Options for LaTeX output latex_documents = [ ('index', 'beets.tex', u'beets Documentation', AUTHOR, 'manual'), ] # Options for manual page output man_pages = [ ('reference/cli', 'beet', u'music tagger and library organizer', [AUTHOR], 1), ('reference/config', 'beetsconfig', u'beets configuration file', [AUTHOR], 5), ]
<commit_before># -*- coding: utf-8 -*- AUTHOR = u'Adrian Sampson' # General configuration extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks'] exclude_patterns = ['_build'] source_suffix = '.rst' master_doc = 'index' project = u'beets' copyright = u'2012, Adrian Sampson' version = '1.3' release = '1.3.17' pygments_style = 'sphinx' # External links to the bug tracker. extlinks = { 'bug': ('https://github.com/beetbox/beets/issues/%s', '#'), 'user': ('https://github.com/%s', ''), } # Options for HTML output htmlhelp_basename = 'beetsdoc' # Options for LaTeX output latex_documents = [ ('index', 'beets.tex', u'beets Documentation', AUTHOR, 'manual'), ] # Options for manual page output man_pages = [ ('reference/cli', 'beet', u'music tagger and library organizer', [AUTHOR], 1), ('reference/config', 'beetsconfig', u'beets configuration file', [AUTHOR], 5), ] <commit_msg>Update documentation copyright year to 2016<commit_after>
# -*- coding: utf-8 -*- AUTHOR = u'Adrian Sampson' # General configuration extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks'] exclude_patterns = ['_build'] source_suffix = '.rst' master_doc = 'index' project = u'beets' copyright = u'2016, Adrian Sampson' version = '1.3' release = '1.3.17' pygments_style = 'sphinx' # External links to the bug tracker. extlinks = { 'bug': ('https://github.com/beetbox/beets/issues/%s', '#'), 'user': ('https://github.com/%s', ''), } # Options for HTML output htmlhelp_basename = 'beetsdoc' # Options for LaTeX output latex_documents = [ ('index', 'beets.tex', u'beets Documentation', AUTHOR, 'manual'), ] # Options for manual page output man_pages = [ ('reference/cli', 'beet', u'music tagger and library organizer', [AUTHOR], 1), ('reference/config', 'beetsconfig', u'beets configuration file', [AUTHOR], 5), ]
# -*- coding: utf-8 -*- AUTHOR = u'Adrian Sampson' # General configuration extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks'] exclude_patterns = ['_build'] source_suffix = '.rst' master_doc = 'index' project = u'beets' copyright = u'2012, Adrian Sampson' version = '1.3' release = '1.3.17' pygments_style = 'sphinx' # External links to the bug tracker. extlinks = { 'bug': ('https://github.com/beetbox/beets/issues/%s', '#'), 'user': ('https://github.com/%s', ''), } # Options for HTML output htmlhelp_basename = 'beetsdoc' # Options for LaTeX output latex_documents = [ ('index', 'beets.tex', u'beets Documentation', AUTHOR, 'manual'), ] # Options for manual page output man_pages = [ ('reference/cli', 'beet', u'music tagger and library organizer', [AUTHOR], 1), ('reference/config', 'beetsconfig', u'beets configuration file', [AUTHOR], 5), ] Update documentation copyright year to 2016# -*- coding: utf-8 -*- AUTHOR = u'Adrian Sampson' # General configuration extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks'] exclude_patterns = ['_build'] source_suffix = '.rst' master_doc = 'index' project = u'beets' copyright = u'2016, Adrian Sampson' version = '1.3' release = '1.3.17' pygments_style = 'sphinx' # External links to the bug tracker. extlinks = { 'bug': ('https://github.com/beetbox/beets/issues/%s', '#'), 'user': ('https://github.com/%s', ''), } # Options for HTML output htmlhelp_basename = 'beetsdoc' # Options for LaTeX output latex_documents = [ ('index', 'beets.tex', u'beets Documentation', AUTHOR, 'manual'), ] # Options for manual page output man_pages = [ ('reference/cli', 'beet', u'music tagger and library organizer', [AUTHOR], 1), ('reference/config', 'beetsconfig', u'beets configuration file', [AUTHOR], 5), ]
<commit_before># -*- coding: utf-8 -*- AUTHOR = u'Adrian Sampson' # General configuration extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks'] exclude_patterns = ['_build'] source_suffix = '.rst' master_doc = 'index' project = u'beets' copyright = u'2012, Adrian Sampson' version = '1.3' release = '1.3.17' pygments_style = 'sphinx' # External links to the bug tracker. extlinks = { 'bug': ('https://github.com/beetbox/beets/issues/%s', '#'), 'user': ('https://github.com/%s', ''), } # Options for HTML output htmlhelp_basename = 'beetsdoc' # Options for LaTeX output latex_documents = [ ('index', 'beets.tex', u'beets Documentation', AUTHOR, 'manual'), ] # Options for manual page output man_pages = [ ('reference/cli', 'beet', u'music tagger and library organizer', [AUTHOR], 1), ('reference/config', 'beetsconfig', u'beets configuration file', [AUTHOR], 5), ] <commit_msg>Update documentation copyright year to 2016<commit_after># -*- coding: utf-8 -*- AUTHOR = u'Adrian Sampson' # General configuration extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks'] exclude_patterns = ['_build'] source_suffix = '.rst' master_doc = 'index' project = u'beets' copyright = u'2016, Adrian Sampson' version = '1.3' release = '1.3.17' pygments_style = 'sphinx' # External links to the bug tracker. extlinks = { 'bug': ('https://github.com/beetbox/beets/issues/%s', '#'), 'user': ('https://github.com/%s', ''), } # Options for HTML output htmlhelp_basename = 'beetsdoc' # Options for LaTeX output latex_documents = [ ('index', 'beets.tex', u'beets Documentation', AUTHOR, 'manual'), ] # Options for manual page output man_pages = [ ('reference/cli', 'beet', u'music tagger and library organizer', [AUTHOR], 1), ('reference/config', 'beetsconfig', u'beets configuration file', [AUTHOR], 5), ]
21f74472d8e229d6e662eff39f90886f4357d8c3
been/source/markdown.py
been/source/markdown.py
from been.core import DirectorySource, source_registry class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
from been.core import DirectorySource, source_registry import re import unicodedata def slugify(value): value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['slug'] = slugify(event['title']) event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
Add slug generation to Markdown source.
Add slug generation to Markdown source.
Python
bsd-3-clause
chromakode/been
from been.core import DirectorySource, source_registry class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory) Add slug generation to Markdown source.
from been.core import DirectorySource, source_registry import re import unicodedata def slugify(value): value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['slug'] = slugify(event['title']) event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
<commit_before>from been.core import DirectorySource, source_registry class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory) <commit_msg>Add slug generation to Markdown source.<commit_after>
from been.core import DirectorySource, source_registry import re import unicodedata def slugify(value): value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['slug'] = slugify(event['title']) event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
from been.core import DirectorySource, source_registry class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory) Add slug generation to Markdown source.from been.core import DirectorySource, source_registry import re import unicodedata def slugify(value): value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['slug'] = slugify(event['title']) event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
<commit_before>from been.core import DirectorySource, source_registry class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory) <commit_msg>Add slug generation to Markdown source.<commit_after>from been.core import DirectorySource, source_registry import re import unicodedata def slugify(value): value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): lines = event['content'].splitlines() event['title'] = lines[0] event['slug'] = slugify(event['title']) event['content'] = "\n".join(lines[1:]) event['summary'] = event['content'] return event source_registry.add(MarkdownDirectory)
54d55ada152338cc038a4249e03ee25c4739c68f
python/sum-of-multiples/sum_of_multiples.py
python/sum-of-multiples/sum_of_multiples.py
def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] multiples = set() for i in range(0, limit): if i % factor == 0: multiples.add(i) return multiples
def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] return [multiple for multiple in range(limit) if multiple % factor == 0]
Refactor to use list comprehension
Refactor to use list comprehension
Python
mit
rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism
def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] multiples = set() for i in range(0, limit): if i % factor == 0: multiples.add(i) return multiples Refactor to use list comprehension
def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] return [multiple for multiple in range(limit) if multiple % factor == 0]
<commit_before>def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] multiples = set() for i in range(0, limit): if i % factor == 0: multiples.add(i) return multiples <commit_msg>Refactor to use list comprehension<commit_after>
def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] return [multiple for multiple in range(limit) if multiple % factor == 0]
def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] multiples = set() for i in range(0, limit): if i % factor == 0: multiples.add(i) return multiples Refactor to use list comprehensiondef sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] return [multiple for multiple in range(limit) if multiple % factor == 0]
<commit_before>def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] multiples = set() for i in range(0, limit): if i % factor == 0: multiples.add(i) return multiples <commit_msg>Refactor to use list comprehension<commit_after>def sum_of_multiples(limit, factors): return sum(all_multiples(limit, factors)) def all_multiples(limit, factors): multiples = set() for factor in factors: multiples = multiples.union(get_multiples(limit, factor)) return multiples def get_multiples(limit, factor): if factor == 0: return [] return [multiple for multiple in range(limit) if multiple % factor == 0]
4ca388ca6ef21d8e93de71e783ea5175a223980e
seleniumbase/console_scripts/rich_helper.py
seleniumbase/console_scripts/rich_helper.py
from rich.console import Console from rich.markdown import Markdown from rich.syntax import Syntax def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap): syntax = Syntax( code, lang, theme=theme, line_numbers=line_numbers, code_width=code_width, word_wrap=word_wrap, ) return syntax def display_markdown(code): try: markdown = Markdown(code) console = Console() console.print(markdown) # noqa return True # Success except Exception: return False # Failure def display_code(code): try: console = Console() console.print(code) # noqa return True # Success except Exception: return False # Failure def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces double_width_emojis = [ "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️", ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ") except Exception: pass return code
from rich.console import Console from rich.markdown import Markdown from rich.syntax import Syntax def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap): syntax = Syntax( code, lang, theme=theme, line_numbers=line_numbers, code_width=code_width, word_wrap=word_wrap, ) return syntax def display_markdown(code): try: markdown = Markdown(code) console = Console() console.print(markdown) # noqa return True # Success except Exception: return False # Failure def display_code(code): try: console = Console() console.print(code) # noqa return True # Success except Exception: return False # Failure def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces double_width_emojis = [ "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️", "🎛️", "🎖️", "↘️", "⬇️", "↙️", "⬅️", "↖️", "⬆️", "↗️", "➡️", ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ") except Exception: pass return code
Update double-width emoji list to improve "sbase print FILE"
Update double-width emoji list to improve "sbase print FILE"
Python
mit
seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase
from rich.console import Console from rich.markdown import Markdown from rich.syntax import Syntax def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap): syntax = Syntax( code, lang, theme=theme, line_numbers=line_numbers, code_width=code_width, word_wrap=word_wrap, ) return syntax def display_markdown(code): try: markdown = Markdown(code) console = Console() console.print(markdown) # noqa return True # Success except Exception: return False # Failure def display_code(code): try: console = Console() console.print(code) # noqa return True # Success except Exception: return False # Failure def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces double_width_emojis = [ "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️", ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ") except Exception: pass return code Update double-width emoji list to improve "sbase print FILE"
from rich.console import Console from rich.markdown import Markdown from rich.syntax import Syntax def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap): syntax = Syntax( code, lang, theme=theme, line_numbers=line_numbers, code_width=code_width, word_wrap=word_wrap, ) return syntax def display_markdown(code): try: markdown = Markdown(code) console = Console() console.print(markdown) # noqa return True # Success except Exception: return False # Failure def display_code(code): try: console = Console() console.print(code) # noqa return True # Success except Exception: return False # Failure def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces double_width_emojis = [ "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️", "🎛️", "🎖️", "↘️", "⬇️", "↙️", "⬅️", "↖️", "⬆️", "↗️", "➡️", ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ") except Exception: pass return code
<commit_before>from rich.console import Console from rich.markdown import Markdown from rich.syntax import Syntax def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap): syntax = Syntax( code, lang, theme=theme, line_numbers=line_numbers, code_width=code_width, word_wrap=word_wrap, ) return syntax def display_markdown(code): try: markdown = Markdown(code) console = Console() console.print(markdown) # noqa return True # Success except Exception: return False # Failure def display_code(code): try: console = Console() console.print(code) # noqa return True # Success except Exception: return False # Failure def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces double_width_emojis = [ "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️", ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ") except Exception: pass return code <commit_msg>Update double-width emoji list to improve "sbase print FILE"<commit_after>
from rich.console import Console from rich.markdown import Markdown from rich.syntax import Syntax def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap): syntax = Syntax( code, lang, theme=theme, line_numbers=line_numbers, code_width=code_width, word_wrap=word_wrap, ) return syntax def display_markdown(code): try: markdown = Markdown(code) console = Console() console.print(markdown) # noqa return True # Success except Exception: return False # Failure def display_code(code): try: console = Console() console.print(code) # noqa return True # Success except Exception: return False # Failure def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces double_width_emojis = [ "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️", "🎛️", "🎖️", "↘️", "⬇️", "↙️", "⬅️", "↖️", "⬆️", "↗️", "➡️", ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ") except Exception: pass return code
from rich.console import Console from rich.markdown import Markdown from rich.syntax import Syntax def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap): syntax = Syntax( code, lang, theme=theme, line_numbers=line_numbers, code_width=code_width, word_wrap=word_wrap, ) return syntax def display_markdown(code): try: markdown = Markdown(code) console = Console() console.print(markdown) # noqa return True # Success except Exception: return False # Failure def display_code(code): try: console = Console() console.print(code) # noqa return True # Success except Exception: return False # Failure def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces double_width_emojis = [ "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️", ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ") except Exception: pass return code Update double-width emoji list to improve "sbase print FILE"from rich.console import Console from rich.markdown import Markdown from rich.syntax import Syntax def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap): syntax = Syntax( code, lang, theme=theme, line_numbers=line_numbers, code_width=code_width, word_wrap=word_wrap, ) return syntax def display_markdown(code): try: markdown = Markdown(code) console = Console() console.print(markdown) # noqa return True # Success except Exception: return False # Failure def display_code(code): try: console = Console() console.print(code) # noqa return True # Success except Exception: return False # Failure def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces double_width_emojis = [ "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️", "🎛️", "🎖️", "↘️", "⬇️", "↙️", "⬅️", "↖️", "⬆️", "↗️", "➡️", ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ") except Exception: pass return code
<commit_before>from rich.console import Console from rich.markdown import Markdown from rich.syntax import Syntax def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap): syntax = Syntax( code, lang, theme=theme, line_numbers=line_numbers, code_width=code_width, word_wrap=word_wrap, ) return syntax def display_markdown(code): try: markdown = Markdown(code) console = Console() console.print(markdown) # noqa return True # Success except Exception: return False # Failure def display_code(code): try: console = Console() console.print(code) # noqa return True # Success except Exception: return False # Failure def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces double_width_emojis = [ "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️", ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ") except Exception: pass return code <commit_msg>Update double-width emoji list to improve "sbase print FILE"<commit_after>from rich.console import Console from rich.markdown import Markdown from rich.syntax import Syntax def process_syntax(code, lang, theme, line_numbers, code_width, word_wrap): syntax = Syntax( code, lang, theme=theme, line_numbers=line_numbers, code_width=code_width, word_wrap=word_wrap, ) return syntax def display_markdown(code): try: markdown = Markdown(code) console = Console() console.print(markdown) # noqa return True # Success except Exception: return False # Failure def display_code(code): try: console = Console() console.print(code) # noqa return True # Success except Exception: return False # Failure def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces double_width_emojis = [ "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️", "🎛️", "🎖️", "↘️", "⬇️", "↙️", "⬅️", "↖️", "⬆️", "↗️", "➡️", ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ") except Exception: pass return code
8de4fc98785f130c9b0fdb7d7f022aa2b0c4f389
uchicagohvz/game/middleware.py
uchicagohvz/game/middleware.py
from django.core.urlresolvers import resolve, reverse, Resolver404 from django.http import HttpResponseRedirect from django.utils import timezone from datetime import datetime class Feb262015Middleware(object): self.target_url = 'feb-26-2015-charlie-hebdo' def process_request(self, request): try: rm = resolve(request.path) if rm.url_name == self.target_url: return None except Resolver404: pass start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect(reverse(self.target_url)) return None
from django.core.urlresolvers import resolve, reverse, Resolver404 from django.http import HttpResponseRedirect from django.utils import timezone from datetime import datetime class Feb262015Middleware(object): target_url = 'feb-26-2015-charlie-hebdo' def process_request(self, request): try: rm = resolve(request.path) if rm.url_name == self.target_url: return None except Resolver404: pass start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect(reverse(self.target_url)) return None
Remove erroneous reference to self
Remove erroneous reference to self
Python
mit
kz26/uchicago-hvz,kz26/uchicago-hvz,kz26/uchicago-hvz
from django.core.urlresolvers import resolve, reverse, Resolver404 from django.http import HttpResponseRedirect from django.utils import timezone from datetime import datetime class Feb262015Middleware(object): self.target_url = 'feb-26-2015-charlie-hebdo' def process_request(self, request): try: rm = resolve(request.path) if rm.url_name == self.target_url: return None except Resolver404: pass start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect(reverse(self.target_url)) return NoneRemove erroneous reference to self
from django.core.urlresolvers import resolve, reverse, Resolver404 from django.http import HttpResponseRedirect from django.utils import timezone from datetime import datetime class Feb262015Middleware(object): target_url = 'feb-26-2015-charlie-hebdo' def process_request(self, request): try: rm = resolve(request.path) if rm.url_name == self.target_url: return None except Resolver404: pass start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect(reverse(self.target_url)) return None
<commit_before>from django.core.urlresolvers import resolve, reverse, Resolver404 from django.http import HttpResponseRedirect from django.utils import timezone from datetime import datetime class Feb262015Middleware(object): self.target_url = 'feb-26-2015-charlie-hebdo' def process_request(self, request): try: rm = resolve(request.path) if rm.url_name == self.target_url: return None except Resolver404: pass start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect(reverse(self.target_url)) return None<commit_msg>Remove erroneous reference to self<commit_after>
from django.core.urlresolvers import resolve, reverse, Resolver404 from django.http import HttpResponseRedirect from django.utils import timezone from datetime import datetime class Feb262015Middleware(object): target_url = 'feb-26-2015-charlie-hebdo' def process_request(self, request): try: rm = resolve(request.path) if rm.url_name == self.target_url: return None except Resolver404: pass start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect(reverse(self.target_url)) return None
from django.core.urlresolvers import resolve, reverse, Resolver404 from django.http import HttpResponseRedirect from django.utils import timezone from datetime import datetime class Feb262015Middleware(object): self.target_url = 'feb-26-2015-charlie-hebdo' def process_request(self, request): try: rm = resolve(request.path) if rm.url_name == self.target_url: return None except Resolver404: pass start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect(reverse(self.target_url)) return NoneRemove erroneous reference to selffrom django.core.urlresolvers import resolve, reverse, Resolver404 from django.http import HttpResponseRedirect from django.utils import timezone from datetime import datetime class Feb262015Middleware(object): target_url = 'feb-26-2015-charlie-hebdo' def process_request(self, request): try: rm = resolve(request.path) if rm.url_name == self.target_url: return None except Resolver404: pass start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect(reverse(self.target_url)) return None
<commit_before>from django.core.urlresolvers import resolve, reverse, Resolver404 from django.http import HttpResponseRedirect from django.utils import timezone from datetime import datetime class Feb262015Middleware(object): self.target_url = 'feb-26-2015-charlie-hebdo' def process_request(self, request): try: rm = resolve(request.path) if rm.url_name == self.target_url: return None except Resolver404: pass start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect(reverse(self.target_url)) return None<commit_msg>Remove erroneous reference to self<commit_after>from django.core.urlresolvers import resolve, reverse, Resolver404 from django.http import HttpResponseRedirect from django.utils import timezone from datetime import datetime class Feb262015Middleware(object): target_url = 'feb-26-2015-charlie-hebdo' def process_request(self, request): try: rm = resolve(request.path) if rm.url_name == self.target_url: return None except Resolver404: pass start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect(reverse(self.target_url)) return None
195138143ed9cb374175710369b2a77089cac593
px/px_pager.py
px/px_pager.py
import os import sys import threading import subprocess from . import px_processinfo if sys.version_info.major >= 3: # For mypy PEP-484 static typing validation from . import px_process # NOQA from typing import List # NOQA def _pump_info_to_fd(fileno, process, processes): try: px_processinfo.print_process_info(fileno, process, processes) os.close(fileno) except Exception: # Ignore exceptions; we can get those if the pager hangs / goes away # unexpectedly, and we really don't care about those. # FIXME: Should we report this to the user? How and where in that case? pass def page_process_info(process, processes): # type: (px_process.PxProcess, List[px_process.PxProcess]) -> None # FIXME: Get a suitable pager + command line options based on the $PAGER variable pager = subprocess.Popen(['moar'], stdin=subprocess.PIPE) pager_stdin = pager.stdin assert pager_stdin is not None # Do this in a thread to avoid problems if the pager hangs / goes away # unexpectedly info_thread = threading.Thread( target=_pump_info_to_fd, args=(pager_stdin.fileno(), process, processes)) info_thread.start() # FIXME: If this returns an error code, what do we do? pager.wait()
import os import sys import threading import subprocess from . import px_processinfo if sys.version_info.major >= 3: # For mypy PEP-484 static typing validation from . import px_process # NOQA from typing import List # NOQA def _pump_info_to_fd(fileno, process, processes): # type: (int, px_process.PxProcess, List[px_process.PxProcess]) -> None try: px_processinfo.print_process_info(fileno, process, processes) os.close(fileno) except Exception: # Ignore exceptions; we can get those if the pager hangs / goes away # unexpectedly, and we really don't care about those. # FIXME: Should we report this to the user? How and where in that case? pass def launch_pager(): # FIXME: Get a suitable pager + command line options based on the $PAGER # variable return subprocess.Popen(['moar'], stdin=subprocess.PIPE) def page_process_info(process, processes): # type: (px_process.PxProcess, List[px_process.PxProcess]) -> None pager = launch_pager() pager_stdin = pager.stdin assert pager_stdin is not None # Do this in a thread to avoid problems if the pager hangs / goes away # unexpectedly info_thread = threading.Thread( target=_pump_info_to_fd, args=(pager_stdin.fileno(), process, processes)) info_thread.start() # FIXME: If this returns an error code, what do we do? pager.wait()
Move pager selection into its own function
Move pager selection into its own function
Python
mit
walles/px,walles/px
import os import sys import threading import subprocess from . import px_processinfo if sys.version_info.major >= 3: # For mypy PEP-484 static typing validation from . import px_process # NOQA from typing import List # NOQA def _pump_info_to_fd(fileno, process, processes): try: px_processinfo.print_process_info(fileno, process, processes) os.close(fileno) except Exception: # Ignore exceptions; we can get those if the pager hangs / goes away # unexpectedly, and we really don't care about those. # FIXME: Should we report this to the user? How and where in that case? pass def page_process_info(process, processes): # type: (px_process.PxProcess, List[px_process.PxProcess]) -> None # FIXME: Get a suitable pager + command line options based on the $PAGER variable pager = subprocess.Popen(['moar'], stdin=subprocess.PIPE) pager_stdin = pager.stdin assert pager_stdin is not None # Do this in a thread to avoid problems if the pager hangs / goes away # unexpectedly info_thread = threading.Thread( target=_pump_info_to_fd, args=(pager_stdin.fileno(), process, processes)) info_thread.start() # FIXME: If this returns an error code, what do we do? pager.wait() Move pager selection into its own function
import os import sys import threading import subprocess from . import px_processinfo if sys.version_info.major >= 3: # For mypy PEP-484 static typing validation from . import px_process # NOQA from typing import List # NOQA def _pump_info_to_fd(fileno, process, processes): # type: (int, px_process.PxProcess, List[px_process.PxProcess]) -> None try: px_processinfo.print_process_info(fileno, process, processes) os.close(fileno) except Exception: # Ignore exceptions; we can get those if the pager hangs / goes away # unexpectedly, and we really don't care about those. # FIXME: Should we report this to the user? How and where in that case? pass def launch_pager(): # FIXME: Get a suitable pager + command line options based on the $PAGER # variable return subprocess.Popen(['moar'], stdin=subprocess.PIPE) def page_process_info(process, processes): # type: (px_process.PxProcess, List[px_process.PxProcess]) -> None pager = launch_pager() pager_stdin = pager.stdin assert pager_stdin is not None # Do this in a thread to avoid problems if the pager hangs / goes away # unexpectedly info_thread = threading.Thread( target=_pump_info_to_fd, args=(pager_stdin.fileno(), process, processes)) info_thread.start() # FIXME: If this returns an error code, what do we do? pager.wait()
<commit_before>import os import sys import threading import subprocess from . import px_processinfo if sys.version_info.major >= 3: # For mypy PEP-484 static typing validation from . import px_process # NOQA from typing import List # NOQA def _pump_info_to_fd(fileno, process, processes): try: px_processinfo.print_process_info(fileno, process, processes) os.close(fileno) except Exception: # Ignore exceptions; we can get those if the pager hangs / goes away # unexpectedly, and we really don't care about those. # FIXME: Should we report this to the user? How and where in that case? pass def page_process_info(process, processes): # type: (px_process.PxProcess, List[px_process.PxProcess]) -> None # FIXME: Get a suitable pager + command line options based on the $PAGER variable pager = subprocess.Popen(['moar'], stdin=subprocess.PIPE) pager_stdin = pager.stdin assert pager_stdin is not None # Do this in a thread to avoid problems if the pager hangs / goes away # unexpectedly info_thread = threading.Thread( target=_pump_info_to_fd, args=(pager_stdin.fileno(), process, processes)) info_thread.start() # FIXME: If this returns an error code, what do we do? pager.wait() <commit_msg>Move pager selection into its own function<commit_after>
import os import sys import threading import subprocess from . import px_processinfo if sys.version_info.major >= 3: # For mypy PEP-484 static typing validation from . import px_process # NOQA from typing import List # NOQA def _pump_info_to_fd(fileno, process, processes): # type: (int, px_process.PxProcess, List[px_process.PxProcess]) -> None try: px_processinfo.print_process_info(fileno, process, processes) os.close(fileno) except Exception: # Ignore exceptions; we can get those if the pager hangs / goes away # unexpectedly, and we really don't care about those. # FIXME: Should we report this to the user? How and where in that case? pass def launch_pager(): # FIXME: Get a suitable pager + command line options based on the $PAGER # variable return subprocess.Popen(['moar'], stdin=subprocess.PIPE) def page_process_info(process, processes): # type: (px_process.PxProcess, List[px_process.PxProcess]) -> None pager = launch_pager() pager_stdin = pager.stdin assert pager_stdin is not None # Do this in a thread to avoid problems if the pager hangs / goes away # unexpectedly info_thread = threading.Thread( target=_pump_info_to_fd, args=(pager_stdin.fileno(), process, processes)) info_thread.start() # FIXME: If this returns an error code, what do we do? pager.wait()
import os import sys import threading import subprocess from . import px_processinfo if sys.version_info.major >= 3: # For mypy PEP-484 static typing validation from . import px_process # NOQA from typing import List # NOQA def _pump_info_to_fd(fileno, process, processes): try: px_processinfo.print_process_info(fileno, process, processes) os.close(fileno) except Exception: # Ignore exceptions; we can get those if the pager hangs / goes away # unexpectedly, and we really don't care about those. # FIXME: Should we report this to the user? How and where in that case? pass def page_process_info(process, processes): # type: (px_process.PxProcess, List[px_process.PxProcess]) -> None # FIXME: Get a suitable pager + command line options based on the $PAGER variable pager = subprocess.Popen(['moar'], stdin=subprocess.PIPE) pager_stdin = pager.stdin assert pager_stdin is not None # Do this in a thread to avoid problems if the pager hangs / goes away # unexpectedly info_thread = threading.Thread( target=_pump_info_to_fd, args=(pager_stdin.fileno(), process, processes)) info_thread.start() # FIXME: If this returns an error code, what do we do? pager.wait() Move pager selection into its own functionimport os import sys import threading import subprocess from . import px_processinfo if sys.version_info.major >= 3: # For mypy PEP-484 static typing validation from . import px_process # NOQA from typing import List # NOQA def _pump_info_to_fd(fileno, process, processes): # type: (int, px_process.PxProcess, List[px_process.PxProcess]) -> None try: px_processinfo.print_process_info(fileno, process, processes) os.close(fileno) except Exception: # Ignore exceptions; we can get those if the pager hangs / goes away # unexpectedly, and we really don't care about those. # FIXME: Should we report this to the user? How and where in that case? pass def launch_pager(): # FIXME: Get a suitable pager + command line options based on the $PAGER # variable return subprocess.Popen(['moar'], stdin=subprocess.PIPE) def page_process_info(process, processes): # type: (px_process.PxProcess, List[px_process.PxProcess]) -> None pager = launch_pager() pager_stdin = pager.stdin assert pager_stdin is not None # Do this in a thread to avoid problems if the pager hangs / goes away # unexpectedly info_thread = threading.Thread( target=_pump_info_to_fd, args=(pager_stdin.fileno(), process, processes)) info_thread.start() # FIXME: If this returns an error code, what do we do? pager.wait()
<commit_before>import os import sys import threading import subprocess from . import px_processinfo if sys.version_info.major >= 3: # For mypy PEP-484 static typing validation from . import px_process # NOQA from typing import List # NOQA def _pump_info_to_fd(fileno, process, processes): try: px_processinfo.print_process_info(fileno, process, processes) os.close(fileno) except Exception: # Ignore exceptions; we can get those if the pager hangs / goes away # unexpectedly, and we really don't care about those. # FIXME: Should we report this to the user? How and where in that case? pass def page_process_info(process, processes): # type: (px_process.PxProcess, List[px_process.PxProcess]) -> None # FIXME: Get a suitable pager + command line options based on the $PAGER variable pager = subprocess.Popen(['moar'], stdin=subprocess.PIPE) pager_stdin = pager.stdin assert pager_stdin is not None # Do this in a thread to avoid problems if the pager hangs / goes away # unexpectedly info_thread = threading.Thread( target=_pump_info_to_fd, args=(pager_stdin.fileno(), process, processes)) info_thread.start() # FIXME: If this returns an error code, what do we do? pager.wait() <commit_msg>Move pager selection into its own function<commit_after>import os import sys import threading import subprocess from . import px_processinfo if sys.version_info.major >= 3: # For mypy PEP-484 static typing validation from . import px_process # NOQA from typing import List # NOQA def _pump_info_to_fd(fileno, process, processes): # type: (int, px_process.PxProcess, List[px_process.PxProcess]) -> None try: px_processinfo.print_process_info(fileno, process, processes) os.close(fileno) except Exception: # Ignore exceptions; we can get those if the pager hangs / goes away # unexpectedly, and we really don't care about those. # FIXME: Should we report this to the user? How and where in that case? pass def launch_pager(): # FIXME: Get a suitable pager + command line options based on the $PAGER # variable return subprocess.Popen(['moar'], stdin=subprocess.PIPE) def page_process_info(process, processes): # type: (px_process.PxProcess, List[px_process.PxProcess]) -> None pager = launch_pager() pager_stdin = pager.stdin assert pager_stdin is not None # Do this in a thread to avoid problems if the pager hangs / goes away # unexpectedly info_thread = threading.Thread( target=_pump_info_to_fd, args=(pager_stdin.fileno(), process, processes)) info_thread.start() # FIXME: If this returns an error code, what do we do? pager.wait()
63fbbd36bc9123fc8fe9ed41481dd36373959549
Helper/Helper/Helper.py
Helper/Helper/Helper.py
def main(): print ("Hello world!") if __name__ == '__main__': main()
def main(): try: fileName = "MengZi_Traditional.md" filePath = "../../source/" + fileName with open(filePath, 'r') as file: for line in file: print line except IOError: print ("The file (" + filePath + ") does not exist.") if __name__ == '__main__': main()
Read a file from Python
Read a file from Python
Python
mit
fan-jiang/Dujing
def main(): print ("Hello world!") if __name__ == '__main__': main()Read a file from Python
def main(): try: fileName = "MengZi_Traditional.md" filePath = "../../source/" + fileName with open(filePath, 'r') as file: for line in file: print line except IOError: print ("The file (" + filePath + ") does not exist.") if __name__ == '__main__': main()
<commit_before>def main(): print ("Hello world!") if __name__ == '__main__': main()<commit_msg>Read a file from Python<commit_after>
def main(): try: fileName = "MengZi_Traditional.md" filePath = "../../source/" + fileName with open(filePath, 'r') as file: for line in file: print line except IOError: print ("The file (" + filePath + ") does not exist.") if __name__ == '__main__': main()
def main(): print ("Hello world!") if __name__ == '__main__': main()Read a file from Pythondef main(): try: fileName = "MengZi_Traditional.md" filePath = "../../source/" + fileName with open(filePath, 'r') as file: for line in file: print line except IOError: print ("The file (" + filePath + ") does not exist.") if __name__ == '__main__': main()
<commit_before>def main(): print ("Hello world!") if __name__ == '__main__': main()<commit_msg>Read a file from Python<commit_after>def main(): try: fileName = "MengZi_Traditional.md" filePath = "../../source/" + fileName with open(filePath, 'r') as file: for line in file: print line except IOError: print ("The file (" + filePath + ") does not exist.") if __name__ == '__main__': main()
eac89e401d64079f4a3ef05ce7078cbefea271df
tests/system/shared/mainwin.py
tests/system/shared/mainwin.py
def invokeMenuItem(menu, item): menuObject = waitForObjectItem("{type='QMenuBar' visible='true'}", menu) activateItem(menuObject) activateItem(waitForObjectItem(menuObject, item)) def openQmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) waitForObject("{type='Qt4ProjectManager::Internal::ProjectLoadWizard' visible='1' windowTitle='Project Setup'}") selectFromCombo(":scrollArea.Create Build Configurations:_QComboBox", "per Qt Version a Debug and Release") clickButton(findObject("{text='Finish' type='QPushButton'}")) def openCmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) clickButton(waitForObject(":CMake Wizard.Next_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Run CMake_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Finish_QPushButton", 60000))
def invokeMenuItem(menu, item): menuObject = waitForObjectItem("{type='QMenuBar' visible='true'}", menu) activateItem(menuObject) activateItem(waitForObjectItem(menuObject, item)) def openQmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) waitForObject("{type='Qt4ProjectManager::Internal::ProjectLoadWizard' visible='1' windowTitle='Project Setup'}") selectFromCombo(":scrollArea.Create Build Configurations:_QComboBox", "For Each Qt Version One Debug And One Release") clickButton(findObject("{text='Finish' type='QPushButton'}")) def openCmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) clickButton(waitForObject(":CMake Wizard.Next_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Run CMake_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Finish_QPushButton", 60000))
Fix openQmakeProject to match new combo value.
Fix openQmakeProject to match new combo value. Change-Id: Ice0050bf1bb7af59eb57c1e9218d96b3114f4c08 Reviewed-on: http://codereview.qt.nokia.com/4129 Reviewed-by: Qt Sanity Bot <5581206bb7e0307f0d99eb71898ae6b694149ca5@ovi.com> Reviewed-by: Christian Stenger <accbb51712d7b9c4fb108439d01e716148e5f9e6@nokia.com>
Python
lgpl-2.1
omniacreator/qtcreator,xianian/qt-creator,bakaiadam/collaborative_qt_creator,amyvmiwei/qt-creator,KDAB/KDAB-Creator,maui-packages/qt-creator,KDE/android-qt-creator,xianian/qt-creator,colede/qtcreator,darksylinc/qt-creator,Distrotech/qtcreator,azat/qtcreator,xianian/qt-creator,malikcjm/qtcreator,kuba1/qtcreator,kuba1/qtcreator,colede/qtcreator,KDAB/KDAB-Creator,xianian/qt-creator,xianian/qt-creator,jonnor/qt-creator,amyvmiwei/qt-creator,AltarBeastiful/qt-creator,richardmg/qtcreator,omniacreator/qtcreator,darksylinc/qt-creator,kuba1/qtcreator,richardmg/qtcreator,farseerri/git_code,amyvmiwei/qt-creator,ostash/qt-creator-i18n-uk,duythanhphan/qt-creator,danimo/qt-creator,jonnor/qt-creator,malikcjm/qtcreator,martyone/sailfish-qtcreator,martyone/sailfish-qtcreator,danimo/qt-creator,martyone/sailfish-qtcreator,xianian/qt-creator,farseerri/git_code,omniacreator/qtcreator,bakaiadam/collaborative_qt_creator,Distrotech/qtcreator,colede/qtcreator,martyone/sailfish-qtcreator,martyone/sailfish-qtcreator,AltarBeastiful/qt-creator,martyone/sailfish-qtcreator,darksylinc/qt-creator,hdweiss/qt-creator-visualizer,azat/qtcreator,KDE/android-qt-creator,KDAB/KDAB-Creator,maui-packages/qt-creator,richardmg/qtcreator,hdweiss/qt-creator-visualizer,amyvmiwei/qt-creator,AltarBeastiful/qt-creator,ostash/qt-creator-i18n-uk,duythanhphan/qt-creator,Distrotech/qtcreator,bakaiadam/collaborative_qt_creator,farseerri/git_code,kuba1/qtcreator,ostash/qt-creator-i18n-uk,martyone/sailfish-qtcreator,malikcjm/qtcreator,omniacreator/qtcreator,azat/qtcreator,richardmg/qtcreator,malikcjm/qtcreator,bakaiadam/collaborative_qt_creator,omniacreator/qtcreator,syntheticpp/qt-creator,richardmg/qtcreator,bakaiadam/collaborative_qt_creator,jonnor/qt-creator,KDAB/KDAB-Creator,malikcjm/qtcreator,danimo/qt-creator,martyone/sailfish-qtcreator,KDE/android-qt-creator,syntheticpp/qt-creator,darksylinc/qt-creator,richardmg/qtcreator,danimo/qt-creator,syntheticpp/qt-creator,AltarBeastiful/qt-creator,bakaiadam/collaborative_qt_creator,hdweiss/qt-creator-visualizer,danimo/qt-creator,amyvmiwei/qt-creator,maui-packages/qt-creator,ostash/qt-creator-i18n-uk,omniacreator/qtcreator,danimo/qt-creator,maui-packages/qt-creator,duythanhphan/qt-creator,syntheticpp/qt-creator,martyone/sailfish-qtcreator,KDE/android-qt-creator,maui-packages/qt-creator,KDE/android-qt-creator,azat/qtcreator,AltarBeastiful/qt-creator,AltarBeastiful/qt-creator,farseerri/git_code,KDAB/KDAB-Creator,xianian/qt-creator,duythanhphan/qt-creator,kuba1/qtcreator,darksylinc/qt-creator,omniacreator/qtcreator,bakaiadam/collaborative_qt_creator,hdweiss/qt-creator-visualizer,syntheticpp/qt-creator,danimo/qt-creator,colede/qtcreator,richardmg/qtcreator,darksylinc/qt-creator,KDE/android-qt-creator,duythanhphan/qt-creator,danimo/qt-creator,darksylinc/qt-creator,jonnor/qt-creator,ostash/qt-creator-i18n-uk,ostash/qt-creator-i18n-uk,AltarBeastiful/qt-creator,Distrotech/qtcreator,colede/qtcreator,darksylinc/qt-creator,hdweiss/qt-creator-visualizer,jonnor/qt-creator,jonnor/qt-creator,kuba1/qtcreator,farseerri/git_code,KDE/android-qt-creator,Distrotech/qtcreator,xianian/qt-creator,xianian/qt-creator,danimo/qt-creator,kuba1/qtcreator,duythanhphan/qt-creator,farseerri/git_code,KDE/android-qt-creator,azat/qtcreator,hdweiss/qt-creator-visualizer,azat/qtcreator,syntheticpp/qt-creator,malikcjm/qtcreator,farseerri/git_code,amyvmiwei/qt-creator,kuba1/qtcreator,farseerri/git_code,maui-packages/qt-creator,amyvmiwei/qt-creator,malikcjm/qtcreator,KDAB/KDAB-Creator,syntheticpp/qt-creator,amyvmiwei/qt-creator,duythanhphan/qt-creator,Distrotech/qtcreator,ostash/qt-creator-i18n-uk,colede/qtcreator,kuba1/qtcreator,AltarBeastiful/qt-creator,maui-packages/qt-creator,colede/qtcreator,Distrotech/qtcreator
def invokeMenuItem(menu, item): menuObject = waitForObjectItem("{type='QMenuBar' visible='true'}", menu) activateItem(menuObject) activateItem(waitForObjectItem(menuObject, item)) def openQmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) waitForObject("{type='Qt4ProjectManager::Internal::ProjectLoadWizard' visible='1' windowTitle='Project Setup'}") selectFromCombo(":scrollArea.Create Build Configurations:_QComboBox", "per Qt Version a Debug and Release") clickButton(findObject("{text='Finish' type='QPushButton'}")) def openCmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) clickButton(waitForObject(":CMake Wizard.Next_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Run CMake_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Finish_QPushButton", 60000)) Fix openQmakeProject to match new combo value. Change-Id: Ice0050bf1bb7af59eb57c1e9218d96b3114f4c08 Reviewed-on: http://codereview.qt.nokia.com/4129 Reviewed-by: Qt Sanity Bot <5581206bb7e0307f0d99eb71898ae6b694149ca5@ovi.com> Reviewed-by: Christian Stenger <accbb51712d7b9c4fb108439d01e716148e5f9e6@nokia.com>
def invokeMenuItem(menu, item): menuObject = waitForObjectItem("{type='QMenuBar' visible='true'}", menu) activateItem(menuObject) activateItem(waitForObjectItem(menuObject, item)) def openQmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) waitForObject("{type='Qt4ProjectManager::Internal::ProjectLoadWizard' visible='1' windowTitle='Project Setup'}") selectFromCombo(":scrollArea.Create Build Configurations:_QComboBox", "For Each Qt Version One Debug And One Release") clickButton(findObject("{text='Finish' type='QPushButton'}")) def openCmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) clickButton(waitForObject(":CMake Wizard.Next_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Run CMake_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Finish_QPushButton", 60000))
<commit_before> def invokeMenuItem(menu, item): menuObject = waitForObjectItem("{type='QMenuBar' visible='true'}", menu) activateItem(menuObject) activateItem(waitForObjectItem(menuObject, item)) def openQmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) waitForObject("{type='Qt4ProjectManager::Internal::ProjectLoadWizard' visible='1' windowTitle='Project Setup'}") selectFromCombo(":scrollArea.Create Build Configurations:_QComboBox", "per Qt Version a Debug and Release") clickButton(findObject("{text='Finish' type='QPushButton'}")) def openCmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) clickButton(waitForObject(":CMake Wizard.Next_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Run CMake_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Finish_QPushButton", 60000)) <commit_msg>Fix openQmakeProject to match new combo value. Change-Id: Ice0050bf1bb7af59eb57c1e9218d96b3114f4c08 Reviewed-on: http://codereview.qt.nokia.com/4129 Reviewed-by: Qt Sanity Bot <5581206bb7e0307f0d99eb71898ae6b694149ca5@ovi.com> Reviewed-by: Christian Stenger <accbb51712d7b9c4fb108439d01e716148e5f9e6@nokia.com><commit_after>
def invokeMenuItem(menu, item): menuObject = waitForObjectItem("{type='QMenuBar' visible='true'}", menu) activateItem(menuObject) activateItem(waitForObjectItem(menuObject, item)) def openQmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) waitForObject("{type='Qt4ProjectManager::Internal::ProjectLoadWizard' visible='1' windowTitle='Project Setup'}") selectFromCombo(":scrollArea.Create Build Configurations:_QComboBox", "For Each Qt Version One Debug And One Release") clickButton(findObject("{text='Finish' type='QPushButton'}")) def openCmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) clickButton(waitForObject(":CMake Wizard.Next_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Run CMake_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Finish_QPushButton", 60000))
def invokeMenuItem(menu, item): menuObject = waitForObjectItem("{type='QMenuBar' visible='true'}", menu) activateItem(menuObject) activateItem(waitForObjectItem(menuObject, item)) def openQmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) waitForObject("{type='Qt4ProjectManager::Internal::ProjectLoadWizard' visible='1' windowTitle='Project Setup'}") selectFromCombo(":scrollArea.Create Build Configurations:_QComboBox", "per Qt Version a Debug and Release") clickButton(findObject("{text='Finish' type='QPushButton'}")) def openCmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) clickButton(waitForObject(":CMake Wizard.Next_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Run CMake_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Finish_QPushButton", 60000)) Fix openQmakeProject to match new combo value. Change-Id: Ice0050bf1bb7af59eb57c1e9218d96b3114f4c08 Reviewed-on: http://codereview.qt.nokia.com/4129 Reviewed-by: Qt Sanity Bot <5581206bb7e0307f0d99eb71898ae6b694149ca5@ovi.com> Reviewed-by: Christian Stenger <accbb51712d7b9c4fb108439d01e716148e5f9e6@nokia.com> def invokeMenuItem(menu, item): menuObject = waitForObjectItem("{type='QMenuBar' visible='true'}", menu) activateItem(menuObject) activateItem(waitForObjectItem(menuObject, item)) def openQmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) waitForObject("{type='Qt4ProjectManager::Internal::ProjectLoadWizard' visible='1' windowTitle='Project Setup'}") selectFromCombo(":scrollArea.Create Build Configurations:_QComboBox", "For Each Qt Version One Debug And One Release") clickButton(findObject("{text='Finish' type='QPushButton'}")) def openCmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) clickButton(waitForObject(":CMake Wizard.Next_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Run CMake_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Finish_QPushButton", 60000))
<commit_before> def invokeMenuItem(menu, item): menuObject = waitForObjectItem("{type='QMenuBar' visible='true'}", menu) activateItem(menuObject) activateItem(waitForObjectItem(menuObject, item)) def openQmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) waitForObject("{type='Qt4ProjectManager::Internal::ProjectLoadWizard' visible='1' windowTitle='Project Setup'}") selectFromCombo(":scrollArea.Create Build Configurations:_QComboBox", "per Qt Version a Debug and Release") clickButton(findObject("{text='Finish' type='QPushButton'}")) def openCmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) clickButton(waitForObject(":CMake Wizard.Next_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Run CMake_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Finish_QPushButton", 60000)) <commit_msg>Fix openQmakeProject to match new combo value. Change-Id: Ice0050bf1bb7af59eb57c1e9218d96b3114f4c08 Reviewed-on: http://codereview.qt.nokia.com/4129 Reviewed-by: Qt Sanity Bot <5581206bb7e0307f0d99eb71898ae6b694149ca5@ovi.com> Reviewed-by: Christian Stenger <accbb51712d7b9c4fb108439d01e716148e5f9e6@nokia.com><commit_after> def invokeMenuItem(menu, item): menuObject = waitForObjectItem("{type='QMenuBar' visible='true'}", menu) activateItem(menuObject) activateItem(waitForObjectItem(menuObject, item)) def openQmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) waitForObject("{type='Qt4ProjectManager::Internal::ProjectLoadWizard' visible='1' windowTitle='Project Setup'}") selectFromCombo(":scrollArea.Create Build Configurations:_QComboBox", "For Each Qt Version One Debug And One Release") clickButton(findObject("{text='Finish' type='QPushButton'}")) def openCmakeProject(projectPath): invokeMenuItem("File", "Open File or Project...") waitForObject("{name='QFileDialog' type='QFileDialog' visible='1' windowTitle='Open File'}") type(findObject("{name='fileNameEdit' type='QLineEdit'}"), projectPath) clickButton(findObject("{text='Open' type='QPushButton'}")) clickButton(waitForObject(":CMake Wizard.Next_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Run CMake_QPushButton", 20000)) clickButton(waitForObject(":CMake Wizard.Finish_QPushButton", 60000))
07bec7db879aee92316570770316857417636207
addons/hr_payroll_account/wizard/hr_payroll_payslips_by_employees.py
addons/hr_payroll_account/wizard/hr_payroll_payslips_by_employees.py
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class HrPayslipEmployees(models.TransientModel): _inherit = 'hr.payslip.employees' @api.multi def compute_sheet(self): journal_id = False if self.env.context.get('active_id'): journal_id = self.env['hr.payslip.run'].browse(self.env.context.get('active_id')).journal_id.id return super(HrPayslipEmployees, self.with_context(journal_id=journal_id)).compute_sheet()
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class HrPayslipEmployees(models.TransientModel): _inherit = 'hr.payslip.employees' @api.multi def compute_sheet(self): if self.env.context.get('active_id'): journal_id = self.env['hr.payslip.run'].browse(self.env.context.get('active_id')).journal_id.id return super(HrPayslipEmployees, self.with_context(journal_id=journal_id)).compute_sheet() return super(HrPayslipEmployees, self).compute_sheet()
Remove journal_id: False in context
[FIX] hr_payroll_account: Remove journal_id: False in context If the wizard to generate payslip was launched without an ´active_id´ in the context. A ´journal_id´ entry was set to False in the context later leading to a crash at payslip creation. The crash happens because, at creation time, the journal_id of a ´hr.payslip´ is set to the journal_id in the context if its present. Since this context entry is False and the journal_id field is required, creation crashes.
Python
agpl-3.0
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class HrPayslipEmployees(models.TransientModel): _inherit = 'hr.payslip.employees' @api.multi def compute_sheet(self): journal_id = False if self.env.context.get('active_id'): journal_id = self.env['hr.payslip.run'].browse(self.env.context.get('active_id')).journal_id.id return super(HrPayslipEmployees, self.with_context(journal_id=journal_id)).compute_sheet() [FIX] hr_payroll_account: Remove journal_id: False in context If the wizard to generate payslip was launched without an ´active_id´ in the context. A ´journal_id´ entry was set to False in the context later leading to a crash at payslip creation. The crash happens because, at creation time, the journal_id of a ´hr.payslip´ is set to the journal_id in the context if its present. Since this context entry is False and the journal_id field is required, creation crashes.
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class HrPayslipEmployees(models.TransientModel): _inherit = 'hr.payslip.employees' @api.multi def compute_sheet(self): if self.env.context.get('active_id'): journal_id = self.env['hr.payslip.run'].browse(self.env.context.get('active_id')).journal_id.id return super(HrPayslipEmployees, self.with_context(journal_id=journal_id)).compute_sheet() return super(HrPayslipEmployees, self).compute_sheet()
<commit_before># -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class HrPayslipEmployees(models.TransientModel): _inherit = 'hr.payslip.employees' @api.multi def compute_sheet(self): journal_id = False if self.env.context.get('active_id'): journal_id = self.env['hr.payslip.run'].browse(self.env.context.get('active_id')).journal_id.id return super(HrPayslipEmployees, self.with_context(journal_id=journal_id)).compute_sheet() <commit_msg>[FIX] hr_payroll_account: Remove journal_id: False in context If the wizard to generate payslip was launched without an ´active_id´ in the context. A ´journal_id´ entry was set to False in the context later leading to a crash at payslip creation. The crash happens because, at creation time, the journal_id of a ´hr.payslip´ is set to the journal_id in the context if its present. Since this context entry is False and the journal_id field is required, creation crashes.<commit_after>
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class HrPayslipEmployees(models.TransientModel): _inherit = 'hr.payslip.employees' @api.multi def compute_sheet(self): if self.env.context.get('active_id'): journal_id = self.env['hr.payslip.run'].browse(self.env.context.get('active_id')).journal_id.id return super(HrPayslipEmployees, self.with_context(journal_id=journal_id)).compute_sheet() return super(HrPayslipEmployees, self).compute_sheet()
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class HrPayslipEmployees(models.TransientModel): _inherit = 'hr.payslip.employees' @api.multi def compute_sheet(self): journal_id = False if self.env.context.get('active_id'): journal_id = self.env['hr.payslip.run'].browse(self.env.context.get('active_id')).journal_id.id return super(HrPayslipEmployees, self.with_context(journal_id=journal_id)).compute_sheet() [FIX] hr_payroll_account: Remove journal_id: False in context If the wizard to generate payslip was launched without an ´active_id´ in the context. A ´journal_id´ entry was set to False in the context later leading to a crash at payslip creation. The crash happens because, at creation time, the journal_id of a ´hr.payslip´ is set to the journal_id in the context if its present. Since this context entry is False and the journal_id field is required, creation crashes.# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class HrPayslipEmployees(models.TransientModel): _inherit = 'hr.payslip.employees' @api.multi def compute_sheet(self): if self.env.context.get('active_id'): journal_id = self.env['hr.payslip.run'].browse(self.env.context.get('active_id')).journal_id.id return super(HrPayslipEmployees, self.with_context(journal_id=journal_id)).compute_sheet() return super(HrPayslipEmployees, self).compute_sheet()
<commit_before># -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class HrPayslipEmployees(models.TransientModel): _inherit = 'hr.payslip.employees' @api.multi def compute_sheet(self): journal_id = False if self.env.context.get('active_id'): journal_id = self.env['hr.payslip.run'].browse(self.env.context.get('active_id')).journal_id.id return super(HrPayslipEmployees, self.with_context(journal_id=journal_id)).compute_sheet() <commit_msg>[FIX] hr_payroll_account: Remove journal_id: False in context If the wizard to generate payslip was launched without an ´active_id´ in the context. A ´journal_id´ entry was set to False in the context later leading to a crash at payslip creation. The crash happens because, at creation time, the journal_id of a ´hr.payslip´ is set to the journal_id in the context if its present. Since this context entry is False and the journal_id field is required, creation crashes.<commit_after># -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class HrPayslipEmployees(models.TransientModel): _inherit = 'hr.payslip.employees' @api.multi def compute_sheet(self): if self.env.context.get('active_id'): journal_id = self.env['hr.payslip.run'].browse(self.env.context.get('active_id')).journal_id.id return super(HrPayslipEmployees, self.with_context(journal_id=journal_id)).compute_sheet() return super(HrPayslipEmployees, self).compute_sheet()
6487c04c85f890a8d767216efac24bf42fb9e387
spare5/client.py
spare5/client.py
import requests from .resources.batches import Batches from .resources.jobs import Jobs DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2' class Spare5Client(object): def __init__(self, username, token, api_root=DEFAULT_API_ROOT): super(Spare5Client, self).__init__() self.api_root = api_root self.username = username self.token = token self.batches = Batches(self) def _make_request(self, verb, *args, **kwargs): kwargs.update({ 'auth': (self.username, self.token) }) response = requests.request(verb, *args, **kwargs) return response.json() def _get(self, url, **kwargs): return self._make_request('get', url, **kwargs) def _post(self, url, data, **kwargs): return self._make_request('post', url, data=data, **kwargs) def _put(self, url, data, **kwargs): return self._make_request('put', url, data=data, **kwargs) def _delete(self, url, **kwargs): return self._make_request('delete', url, **kwargs)
import requests from .resources.batches import Batches from .resources.jobs import Jobs DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2' class Spare5Client(object): def __init__(self, username, token, api_root=DEFAULT_API_ROOT): super(Spare5Client, self).__init__() self.api_root = api_root self.username = username self.token = token self.batches = Batches(self) def _make_request(self, verb, *args, **kwargs): kwargs.update({ 'auth': (self.username, self.token), 'headers': { 'content-type': 'application/json', }, }) response = requests.request(verb, *args, **kwargs) return response.json() def _get(self, url, **kwargs): return self._make_request('get', url, **kwargs) def _post(self, url, data, **kwargs): return self._make_request('post', url, data=data, **kwargs) def _put(self, url, data, **kwargs): return self._make_request('put', url, data=data, **kwargs) def _delete(self, url, **kwargs): return self._make_request('delete', url, **kwargs)
Update to specify content-type header
Update to specify content-type header
Python
mit
roverdotcom/spare5-python
import requests from .resources.batches import Batches from .resources.jobs import Jobs DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2' class Spare5Client(object): def __init__(self, username, token, api_root=DEFAULT_API_ROOT): super(Spare5Client, self).__init__() self.api_root = api_root self.username = username self.token = token self.batches = Batches(self) def _make_request(self, verb, *args, **kwargs): kwargs.update({ 'auth': (self.username, self.token) }) response = requests.request(verb, *args, **kwargs) return response.json() def _get(self, url, **kwargs): return self._make_request('get', url, **kwargs) def _post(self, url, data, **kwargs): return self._make_request('post', url, data=data, **kwargs) def _put(self, url, data, **kwargs): return self._make_request('put', url, data=data, **kwargs) def _delete(self, url, **kwargs): return self._make_request('delete', url, **kwargs) Update to specify content-type header
import requests from .resources.batches import Batches from .resources.jobs import Jobs DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2' class Spare5Client(object): def __init__(self, username, token, api_root=DEFAULT_API_ROOT): super(Spare5Client, self).__init__() self.api_root = api_root self.username = username self.token = token self.batches = Batches(self) def _make_request(self, verb, *args, **kwargs): kwargs.update({ 'auth': (self.username, self.token), 'headers': { 'content-type': 'application/json', }, }) response = requests.request(verb, *args, **kwargs) return response.json() def _get(self, url, **kwargs): return self._make_request('get', url, **kwargs) def _post(self, url, data, **kwargs): return self._make_request('post', url, data=data, **kwargs) def _put(self, url, data, **kwargs): return self._make_request('put', url, data=data, **kwargs) def _delete(self, url, **kwargs): return self._make_request('delete', url, **kwargs)
<commit_before>import requests from .resources.batches import Batches from .resources.jobs import Jobs DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2' class Spare5Client(object): def __init__(self, username, token, api_root=DEFAULT_API_ROOT): super(Spare5Client, self).__init__() self.api_root = api_root self.username = username self.token = token self.batches = Batches(self) def _make_request(self, verb, *args, **kwargs): kwargs.update({ 'auth': (self.username, self.token) }) response = requests.request(verb, *args, **kwargs) return response.json() def _get(self, url, **kwargs): return self._make_request('get', url, **kwargs) def _post(self, url, data, **kwargs): return self._make_request('post', url, data=data, **kwargs) def _put(self, url, data, **kwargs): return self._make_request('put', url, data=data, **kwargs) def _delete(self, url, **kwargs): return self._make_request('delete', url, **kwargs) <commit_msg>Update to specify content-type header<commit_after>
import requests from .resources.batches import Batches from .resources.jobs import Jobs DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2' class Spare5Client(object): def __init__(self, username, token, api_root=DEFAULT_API_ROOT): super(Spare5Client, self).__init__() self.api_root = api_root self.username = username self.token = token self.batches = Batches(self) def _make_request(self, verb, *args, **kwargs): kwargs.update({ 'auth': (self.username, self.token), 'headers': { 'content-type': 'application/json', }, }) response = requests.request(verb, *args, **kwargs) return response.json() def _get(self, url, **kwargs): return self._make_request('get', url, **kwargs) def _post(self, url, data, **kwargs): return self._make_request('post', url, data=data, **kwargs) def _put(self, url, data, **kwargs): return self._make_request('put', url, data=data, **kwargs) def _delete(self, url, **kwargs): return self._make_request('delete', url, **kwargs)
import requests from .resources.batches import Batches from .resources.jobs import Jobs DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2' class Spare5Client(object): def __init__(self, username, token, api_root=DEFAULT_API_ROOT): super(Spare5Client, self).__init__() self.api_root = api_root self.username = username self.token = token self.batches = Batches(self) def _make_request(self, verb, *args, **kwargs): kwargs.update({ 'auth': (self.username, self.token) }) response = requests.request(verb, *args, **kwargs) return response.json() def _get(self, url, **kwargs): return self._make_request('get', url, **kwargs) def _post(self, url, data, **kwargs): return self._make_request('post', url, data=data, **kwargs) def _put(self, url, data, **kwargs): return self._make_request('put', url, data=data, **kwargs) def _delete(self, url, **kwargs): return self._make_request('delete', url, **kwargs) Update to specify content-type headerimport requests from .resources.batches import Batches from .resources.jobs import Jobs DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2' class Spare5Client(object): def __init__(self, username, token, api_root=DEFAULT_API_ROOT): super(Spare5Client, self).__init__() self.api_root = api_root self.username = username self.token = token self.batches = Batches(self) def _make_request(self, verb, *args, **kwargs): kwargs.update({ 'auth': (self.username, self.token), 'headers': { 'content-type': 'application/json', }, }) response = requests.request(verb, *args, **kwargs) return response.json() def _get(self, url, **kwargs): return self._make_request('get', url, **kwargs) def _post(self, url, data, **kwargs): return self._make_request('post', url, data=data, **kwargs) def _put(self, url, data, **kwargs): return self._make_request('put', url, data=data, **kwargs) def _delete(self, url, **kwargs): return self._make_request('delete', url, **kwargs)
<commit_before>import requests from .resources.batches import Batches from .resources.jobs import Jobs DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2' class Spare5Client(object): def __init__(self, username, token, api_root=DEFAULT_API_ROOT): super(Spare5Client, self).__init__() self.api_root = api_root self.username = username self.token = token self.batches = Batches(self) def _make_request(self, verb, *args, **kwargs): kwargs.update({ 'auth': (self.username, self.token) }) response = requests.request(verb, *args, **kwargs) return response.json() def _get(self, url, **kwargs): return self._make_request('get', url, **kwargs) def _post(self, url, data, **kwargs): return self._make_request('post', url, data=data, **kwargs) def _put(self, url, data, **kwargs): return self._make_request('put', url, data=data, **kwargs) def _delete(self, url, **kwargs): return self._make_request('delete', url, **kwargs) <commit_msg>Update to specify content-type header<commit_after>import requests from .resources.batches import Batches from .resources.jobs import Jobs DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2' class Spare5Client(object): def __init__(self, username, token, api_root=DEFAULT_API_ROOT): super(Spare5Client, self).__init__() self.api_root = api_root self.username = username self.token = token self.batches = Batches(self) def _make_request(self, verb, *args, **kwargs): kwargs.update({ 'auth': (self.username, self.token), 'headers': { 'content-type': 'application/json', }, }) response = requests.request(verb, *args, **kwargs) return response.json() def _get(self, url, **kwargs): return self._make_request('get', url, **kwargs) def _post(self, url, data, **kwargs): return self._make_request('post', url, data=data, **kwargs) def _put(self, url, data, **kwargs): return self._make_request('put', url, data=data, **kwargs) def _delete(self, url, **kwargs): return self._make_request('delete', url, **kwargs)
3d5093b46763acca9e3b3309073f73a7ca8daf73
src/clients/lib/python/xmmsclient/consts.py
src/clients/lib/python/xmmsclient/consts.py
from xmmsapi import VALUE_TYPE_NONE from xmmsapi import VALUE_TYPE_ERROR from xmmsapi import VALUE_TYPE_UINT32 from xmmsapi import VALUE_TYPE_INT32 from xmmsapi import VALUE_TYPE_STRING from xmmsapi import VALUE_TYPE_COLL from xmmsapi import VALUE_TYPE_BIN from xmmsapi import VALUE_TYPE_LIST from xmmsapi import VALUE_TYPE_DICT from xmmsapi import PLAYBACK_STATUS_STOP from xmmsapi import PLAYBACK_STATUS_PLAY from xmmsapi import PLAYBACK_STATUS_PAUSE from xmmsapi import PLAYLIST_CHANGED_ADD from xmmsapi import PLAYLIST_CHANGED_INSERT from xmmsapi import PLAYLIST_CHANGED_SHUFFLE from xmmsapi import PLAYLIST_CHANGED_REMOVE from xmmsapi import PLAYLIST_CHANGED_CLEAR from xmmsapi import PLAYLIST_CHANGED_MOVE from xmmsapi import PLAYLIST_CHANGED_SORT from xmmsapi import PLAYLIST_CHANGED_UPDATE from xmmsapi import PLUGIN_TYPE_ALL from xmmsapi import PLUGIN_TYPE_XFORM from xmmsapi import PLUGIN_TYPE_OUTPUT from xmmsapi import COLLECTION_CHANGED_ADD from xmmsapi import COLLECTION_CHANGED_UPDATE from xmmsapi import COLLECTION_CHANGED_RENAME from xmmsapi import COLLECTION_CHANGED_REMOVE
from xmmsapi import VALUE_TYPE_NONE from xmmsapi import VALUE_TYPE_ERROR from xmmsapi import VALUE_TYPE_INT32 from xmmsapi import VALUE_TYPE_STRING from xmmsapi import VALUE_TYPE_COLL from xmmsapi import VALUE_TYPE_BIN from xmmsapi import VALUE_TYPE_LIST from xmmsapi import VALUE_TYPE_DICT from xmmsapi import PLAYBACK_STATUS_STOP from xmmsapi import PLAYBACK_STATUS_PLAY from xmmsapi import PLAYBACK_STATUS_PAUSE from xmmsapi import PLAYLIST_CHANGED_ADD from xmmsapi import PLAYLIST_CHANGED_INSERT from xmmsapi import PLAYLIST_CHANGED_SHUFFLE from xmmsapi import PLAYLIST_CHANGED_REMOVE from xmmsapi import PLAYLIST_CHANGED_CLEAR from xmmsapi import PLAYLIST_CHANGED_MOVE from xmmsapi import PLAYLIST_CHANGED_SORT from xmmsapi import PLAYLIST_CHANGED_UPDATE from xmmsapi import PLUGIN_TYPE_ALL from xmmsapi import PLUGIN_TYPE_XFORM from xmmsapi import PLUGIN_TYPE_OUTPUT from xmmsapi import COLLECTION_CHANGED_ADD from xmmsapi import COLLECTION_CHANGED_UPDATE from xmmsapi import COLLECTION_CHANGED_RENAME from xmmsapi import COLLECTION_CHANGED_REMOVE
Remove import of nonexistant UINT32 type in python bindings
BUG(2151): Remove import of nonexistant UINT32 type in python bindings
Python
lgpl-2.1
mantaraya36/xmms2-mantaraya36,theeternalsw0rd/xmms2,oneman/xmms2-oneman-old,mantaraya36/xmms2-mantaraya36,xmms2/xmms2-stable,theefer/xmms2,theeternalsw0rd/xmms2,six600110/xmms2,chrippa/xmms2,oneman/xmms2-oneman,chrippa/xmms2,xmms2/xmms2-stable,xmms2/xmms2-stable,theeternalsw0rd/xmms2,mantaraya36/xmms2-mantaraya36,theefer/xmms2,krad-radio/xmms2-krad,dreamerc/xmms2,theeternalsw0rd/xmms2,krad-radio/xmms2-krad,chrippa/xmms2,six600110/xmms2,xmms2/xmms2-stable,theefer/xmms2,mantaraya36/xmms2-mantaraya36,krad-radio/xmms2-krad,oneman/xmms2-oneman,oneman/xmms2-oneman,dreamerc/xmms2,xmms2/xmms2-stable,krad-radio/xmms2-krad,theefer/xmms2,mantaraya36/xmms2-mantaraya36,xmms2/xmms2-stable,dreamerc/xmms2,dreamerc/xmms2,six600110/xmms2,oneman/xmms2-oneman-old,theefer/xmms2,oneman/xmms2-oneman,six600110/xmms2,krad-radio/xmms2-krad,krad-radio/xmms2-krad,chrippa/xmms2,theeternalsw0rd/xmms2,oneman/xmms2-oneman,oneman/xmms2-oneman,dreamerc/xmms2,chrippa/xmms2,oneman/xmms2-oneman-old,mantaraya36/xmms2-mantaraya36,theefer/xmms2,oneman/xmms2-oneman,six600110/xmms2,chrippa/xmms2,oneman/xmms2-oneman-old,theefer/xmms2,six600110/xmms2,oneman/xmms2-oneman-old,theeternalsw0rd/xmms2,mantaraya36/xmms2-mantaraya36
from xmmsapi import VALUE_TYPE_NONE from xmmsapi import VALUE_TYPE_ERROR from xmmsapi import VALUE_TYPE_UINT32 from xmmsapi import VALUE_TYPE_INT32 from xmmsapi import VALUE_TYPE_STRING from xmmsapi import VALUE_TYPE_COLL from xmmsapi import VALUE_TYPE_BIN from xmmsapi import VALUE_TYPE_LIST from xmmsapi import VALUE_TYPE_DICT from xmmsapi import PLAYBACK_STATUS_STOP from xmmsapi import PLAYBACK_STATUS_PLAY from xmmsapi import PLAYBACK_STATUS_PAUSE from xmmsapi import PLAYLIST_CHANGED_ADD from xmmsapi import PLAYLIST_CHANGED_INSERT from xmmsapi import PLAYLIST_CHANGED_SHUFFLE from xmmsapi import PLAYLIST_CHANGED_REMOVE from xmmsapi import PLAYLIST_CHANGED_CLEAR from xmmsapi import PLAYLIST_CHANGED_MOVE from xmmsapi import PLAYLIST_CHANGED_SORT from xmmsapi import PLAYLIST_CHANGED_UPDATE from xmmsapi import PLUGIN_TYPE_ALL from xmmsapi import PLUGIN_TYPE_XFORM from xmmsapi import PLUGIN_TYPE_OUTPUT from xmmsapi import COLLECTION_CHANGED_ADD from xmmsapi import COLLECTION_CHANGED_UPDATE from xmmsapi import COLLECTION_CHANGED_RENAME from xmmsapi import COLLECTION_CHANGED_REMOVE BUG(2151): Remove import of nonexistant UINT32 type in python bindings
from xmmsapi import VALUE_TYPE_NONE from xmmsapi import VALUE_TYPE_ERROR from xmmsapi import VALUE_TYPE_INT32 from xmmsapi import VALUE_TYPE_STRING from xmmsapi import VALUE_TYPE_COLL from xmmsapi import VALUE_TYPE_BIN from xmmsapi import VALUE_TYPE_LIST from xmmsapi import VALUE_TYPE_DICT from xmmsapi import PLAYBACK_STATUS_STOP from xmmsapi import PLAYBACK_STATUS_PLAY from xmmsapi import PLAYBACK_STATUS_PAUSE from xmmsapi import PLAYLIST_CHANGED_ADD from xmmsapi import PLAYLIST_CHANGED_INSERT from xmmsapi import PLAYLIST_CHANGED_SHUFFLE from xmmsapi import PLAYLIST_CHANGED_REMOVE from xmmsapi import PLAYLIST_CHANGED_CLEAR from xmmsapi import PLAYLIST_CHANGED_MOVE from xmmsapi import PLAYLIST_CHANGED_SORT from xmmsapi import PLAYLIST_CHANGED_UPDATE from xmmsapi import PLUGIN_TYPE_ALL from xmmsapi import PLUGIN_TYPE_XFORM from xmmsapi import PLUGIN_TYPE_OUTPUT from xmmsapi import COLLECTION_CHANGED_ADD from xmmsapi import COLLECTION_CHANGED_UPDATE from xmmsapi import COLLECTION_CHANGED_RENAME from xmmsapi import COLLECTION_CHANGED_REMOVE
<commit_before> from xmmsapi import VALUE_TYPE_NONE from xmmsapi import VALUE_TYPE_ERROR from xmmsapi import VALUE_TYPE_UINT32 from xmmsapi import VALUE_TYPE_INT32 from xmmsapi import VALUE_TYPE_STRING from xmmsapi import VALUE_TYPE_COLL from xmmsapi import VALUE_TYPE_BIN from xmmsapi import VALUE_TYPE_LIST from xmmsapi import VALUE_TYPE_DICT from xmmsapi import PLAYBACK_STATUS_STOP from xmmsapi import PLAYBACK_STATUS_PLAY from xmmsapi import PLAYBACK_STATUS_PAUSE from xmmsapi import PLAYLIST_CHANGED_ADD from xmmsapi import PLAYLIST_CHANGED_INSERT from xmmsapi import PLAYLIST_CHANGED_SHUFFLE from xmmsapi import PLAYLIST_CHANGED_REMOVE from xmmsapi import PLAYLIST_CHANGED_CLEAR from xmmsapi import PLAYLIST_CHANGED_MOVE from xmmsapi import PLAYLIST_CHANGED_SORT from xmmsapi import PLAYLIST_CHANGED_UPDATE from xmmsapi import PLUGIN_TYPE_ALL from xmmsapi import PLUGIN_TYPE_XFORM from xmmsapi import PLUGIN_TYPE_OUTPUT from xmmsapi import COLLECTION_CHANGED_ADD from xmmsapi import COLLECTION_CHANGED_UPDATE from xmmsapi import COLLECTION_CHANGED_RENAME from xmmsapi import COLLECTION_CHANGED_REMOVE <commit_msg>BUG(2151): Remove import of nonexistant UINT32 type in python bindings<commit_after>
from xmmsapi import VALUE_TYPE_NONE from xmmsapi import VALUE_TYPE_ERROR from xmmsapi import VALUE_TYPE_INT32 from xmmsapi import VALUE_TYPE_STRING from xmmsapi import VALUE_TYPE_COLL from xmmsapi import VALUE_TYPE_BIN from xmmsapi import VALUE_TYPE_LIST from xmmsapi import VALUE_TYPE_DICT from xmmsapi import PLAYBACK_STATUS_STOP from xmmsapi import PLAYBACK_STATUS_PLAY from xmmsapi import PLAYBACK_STATUS_PAUSE from xmmsapi import PLAYLIST_CHANGED_ADD from xmmsapi import PLAYLIST_CHANGED_INSERT from xmmsapi import PLAYLIST_CHANGED_SHUFFLE from xmmsapi import PLAYLIST_CHANGED_REMOVE from xmmsapi import PLAYLIST_CHANGED_CLEAR from xmmsapi import PLAYLIST_CHANGED_MOVE from xmmsapi import PLAYLIST_CHANGED_SORT from xmmsapi import PLAYLIST_CHANGED_UPDATE from xmmsapi import PLUGIN_TYPE_ALL from xmmsapi import PLUGIN_TYPE_XFORM from xmmsapi import PLUGIN_TYPE_OUTPUT from xmmsapi import COLLECTION_CHANGED_ADD from xmmsapi import COLLECTION_CHANGED_UPDATE from xmmsapi import COLLECTION_CHANGED_RENAME from xmmsapi import COLLECTION_CHANGED_REMOVE
from xmmsapi import VALUE_TYPE_NONE from xmmsapi import VALUE_TYPE_ERROR from xmmsapi import VALUE_TYPE_UINT32 from xmmsapi import VALUE_TYPE_INT32 from xmmsapi import VALUE_TYPE_STRING from xmmsapi import VALUE_TYPE_COLL from xmmsapi import VALUE_TYPE_BIN from xmmsapi import VALUE_TYPE_LIST from xmmsapi import VALUE_TYPE_DICT from xmmsapi import PLAYBACK_STATUS_STOP from xmmsapi import PLAYBACK_STATUS_PLAY from xmmsapi import PLAYBACK_STATUS_PAUSE from xmmsapi import PLAYLIST_CHANGED_ADD from xmmsapi import PLAYLIST_CHANGED_INSERT from xmmsapi import PLAYLIST_CHANGED_SHUFFLE from xmmsapi import PLAYLIST_CHANGED_REMOVE from xmmsapi import PLAYLIST_CHANGED_CLEAR from xmmsapi import PLAYLIST_CHANGED_MOVE from xmmsapi import PLAYLIST_CHANGED_SORT from xmmsapi import PLAYLIST_CHANGED_UPDATE from xmmsapi import PLUGIN_TYPE_ALL from xmmsapi import PLUGIN_TYPE_XFORM from xmmsapi import PLUGIN_TYPE_OUTPUT from xmmsapi import COLLECTION_CHANGED_ADD from xmmsapi import COLLECTION_CHANGED_UPDATE from xmmsapi import COLLECTION_CHANGED_RENAME from xmmsapi import COLLECTION_CHANGED_REMOVE BUG(2151): Remove import of nonexistant UINT32 type in python bindings from xmmsapi import VALUE_TYPE_NONE from xmmsapi import VALUE_TYPE_ERROR from xmmsapi import VALUE_TYPE_INT32 from xmmsapi import VALUE_TYPE_STRING from xmmsapi import VALUE_TYPE_COLL from xmmsapi import VALUE_TYPE_BIN from xmmsapi import VALUE_TYPE_LIST from xmmsapi import VALUE_TYPE_DICT from xmmsapi import PLAYBACK_STATUS_STOP from xmmsapi import PLAYBACK_STATUS_PLAY from xmmsapi import PLAYBACK_STATUS_PAUSE from xmmsapi import PLAYLIST_CHANGED_ADD from xmmsapi import PLAYLIST_CHANGED_INSERT from xmmsapi import PLAYLIST_CHANGED_SHUFFLE from xmmsapi import PLAYLIST_CHANGED_REMOVE from xmmsapi import PLAYLIST_CHANGED_CLEAR from xmmsapi import PLAYLIST_CHANGED_MOVE from xmmsapi import PLAYLIST_CHANGED_SORT from xmmsapi import PLAYLIST_CHANGED_UPDATE from xmmsapi import PLUGIN_TYPE_ALL from xmmsapi import PLUGIN_TYPE_XFORM from xmmsapi import PLUGIN_TYPE_OUTPUT from xmmsapi import COLLECTION_CHANGED_ADD from xmmsapi import COLLECTION_CHANGED_UPDATE from xmmsapi import COLLECTION_CHANGED_RENAME from xmmsapi import COLLECTION_CHANGED_REMOVE
<commit_before> from xmmsapi import VALUE_TYPE_NONE from xmmsapi import VALUE_TYPE_ERROR from xmmsapi import VALUE_TYPE_UINT32 from xmmsapi import VALUE_TYPE_INT32 from xmmsapi import VALUE_TYPE_STRING from xmmsapi import VALUE_TYPE_COLL from xmmsapi import VALUE_TYPE_BIN from xmmsapi import VALUE_TYPE_LIST from xmmsapi import VALUE_TYPE_DICT from xmmsapi import PLAYBACK_STATUS_STOP from xmmsapi import PLAYBACK_STATUS_PLAY from xmmsapi import PLAYBACK_STATUS_PAUSE from xmmsapi import PLAYLIST_CHANGED_ADD from xmmsapi import PLAYLIST_CHANGED_INSERT from xmmsapi import PLAYLIST_CHANGED_SHUFFLE from xmmsapi import PLAYLIST_CHANGED_REMOVE from xmmsapi import PLAYLIST_CHANGED_CLEAR from xmmsapi import PLAYLIST_CHANGED_MOVE from xmmsapi import PLAYLIST_CHANGED_SORT from xmmsapi import PLAYLIST_CHANGED_UPDATE from xmmsapi import PLUGIN_TYPE_ALL from xmmsapi import PLUGIN_TYPE_XFORM from xmmsapi import PLUGIN_TYPE_OUTPUT from xmmsapi import COLLECTION_CHANGED_ADD from xmmsapi import COLLECTION_CHANGED_UPDATE from xmmsapi import COLLECTION_CHANGED_RENAME from xmmsapi import COLLECTION_CHANGED_REMOVE <commit_msg>BUG(2151): Remove import of nonexistant UINT32 type in python bindings<commit_after> from xmmsapi import VALUE_TYPE_NONE from xmmsapi import VALUE_TYPE_ERROR from xmmsapi import VALUE_TYPE_INT32 from xmmsapi import VALUE_TYPE_STRING from xmmsapi import VALUE_TYPE_COLL from xmmsapi import VALUE_TYPE_BIN from xmmsapi import VALUE_TYPE_LIST from xmmsapi import VALUE_TYPE_DICT from xmmsapi import PLAYBACK_STATUS_STOP from xmmsapi import PLAYBACK_STATUS_PLAY from xmmsapi import PLAYBACK_STATUS_PAUSE from xmmsapi import PLAYLIST_CHANGED_ADD from xmmsapi import PLAYLIST_CHANGED_INSERT from xmmsapi import PLAYLIST_CHANGED_SHUFFLE from xmmsapi import PLAYLIST_CHANGED_REMOVE from xmmsapi import PLAYLIST_CHANGED_CLEAR from xmmsapi import PLAYLIST_CHANGED_MOVE from xmmsapi import PLAYLIST_CHANGED_SORT from xmmsapi import PLAYLIST_CHANGED_UPDATE from xmmsapi import PLUGIN_TYPE_ALL from xmmsapi import PLUGIN_TYPE_XFORM from xmmsapi import PLUGIN_TYPE_OUTPUT from xmmsapi import COLLECTION_CHANGED_ADD from xmmsapi import COLLECTION_CHANGED_UPDATE from xmmsapi import COLLECTION_CHANGED_RENAME from xmmsapi import COLLECTION_CHANGED_REMOVE
99ab527550b91d17342ef3112e35f3cdb1be9867
src/binsearch.py
src/binsearch.py
""" Binary search """ def binary_search0(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) - 1 while lft <= rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid elif x < xs[mid]: rgt = mid - 1 elif x > xs[mid]: lft = mid + 1 return None def binary_search1(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) while lft < rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid elif x < xs[mid]: rgt = mid elif x > xs[mid]: lft = mid + 1 return None
""" Binary search """ def binary_search0(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) - 1 while lft <= rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid if xs[mid] < x: lft = mid + 1 else: rgt = mid - 1 return None def binary_search1(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) while lft < rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid if xs[mid] < x: lft = mid + 1 else: rgt = mid return None
Fix the "no else after return" lint
Fix the "no else after return" lint
Python
mit
all3fox/algos-py
""" Binary search """ def binary_search0(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) - 1 while lft <= rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid elif x < xs[mid]: rgt = mid - 1 elif x > xs[mid]: lft = mid + 1 return None def binary_search1(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) while lft < rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid elif x < xs[mid]: rgt = mid elif x > xs[mid]: lft = mid + 1 return None Fix the "no else after return" lint
""" Binary search """ def binary_search0(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) - 1 while lft <= rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid if xs[mid] < x: lft = mid + 1 else: rgt = mid - 1 return None def binary_search1(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) while lft < rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid if xs[mid] < x: lft = mid + 1 else: rgt = mid return None
<commit_before>""" Binary search """ def binary_search0(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) - 1 while lft <= rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid elif x < xs[mid]: rgt = mid - 1 elif x > xs[mid]: lft = mid + 1 return None def binary_search1(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) while lft < rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid elif x < xs[mid]: rgt = mid elif x > xs[mid]: lft = mid + 1 return None <commit_msg>Fix the "no else after return" lint<commit_after>
""" Binary search """ def binary_search0(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) - 1 while lft <= rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid if xs[mid] < x: lft = mid + 1 else: rgt = mid - 1 return None def binary_search1(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) while lft < rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid if xs[mid] < x: lft = mid + 1 else: rgt = mid return None
""" Binary search """ def binary_search0(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) - 1 while lft <= rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid elif x < xs[mid]: rgt = mid - 1 elif x > xs[mid]: lft = mid + 1 return None def binary_search1(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) while lft < rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid elif x < xs[mid]: rgt = mid elif x > xs[mid]: lft = mid + 1 return None Fix the "no else after return" lint""" Binary search """ def binary_search0(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) - 1 while lft <= rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid if xs[mid] < x: lft = mid + 1 else: rgt = mid - 1 return None def binary_search1(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) while lft < rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid if xs[mid] < x: lft = mid + 1 else: rgt = mid return None
<commit_before>""" Binary search """ def binary_search0(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) - 1 while lft <= rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid elif x < xs[mid]: rgt = mid - 1 elif x > xs[mid]: lft = mid + 1 return None def binary_search1(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) while lft < rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid elif x < xs[mid]: rgt = mid elif x > xs[mid]: lft = mid + 1 return None <commit_msg>Fix the "no else after return" lint<commit_after>""" Binary search """ def binary_search0(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) - 1 while lft <= rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid if xs[mid] < x: lft = mid + 1 else: rgt = mid - 1 return None def binary_search1(xs, x): """ Perform binary search for a specific value in the given sorted list :param xs: a sorted list :param x: the target value :return: an index if the value was found, or None if not """ lft, rgt = 0, len(xs) while lft < rgt: mid = (lft + rgt) // 2 if xs[mid] == x: return mid if xs[mid] < x: lft = mid + 1 else: rgt = mid return None
f9926da62fc50c8602797cb12ac80264140c8028
mfh.py
mfh.py
import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse from settings import HONEYPORT def main(): update_event = Event() mfhclient_process = Process( args=(args, update_event,), name="mfhclient_process", target=mfhclient.main, ) if args.client is not None: mfhclient_process.start() trigger_process = Process( args=(update_event,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() if args.c: args.client = HONEYPORT main()
import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse from settings import HONEYPORT def main(): update_event = Event() mfhclient_process = Process( args=(args, update_event,), name="mfhclient_process", target=mfhclient.main, ) if args.client is not None: mfhclient_process.start() if args.updater: trigger_process = Process( args=(update_event,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: if args.updater: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() if args.c: args.client = HONEYPORT main()
Add condition to only launch updater if -u or --updater is specified
Add condition to only launch updater if -u or --updater is specified
Python
mit
Zloool/manyfaced-honeypot
import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse from settings import HONEYPORT def main(): update_event = Event() mfhclient_process = Process( args=(args, update_event,), name="mfhclient_process", target=mfhclient.main, ) if args.client is not None: mfhclient_process.start() trigger_process = Process( args=(update_event,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() if args.c: args.client = HONEYPORT main() Add condition to only launch updater if -u or --updater is specified
import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse from settings import HONEYPORT def main(): update_event = Event() mfhclient_process = Process( args=(args, update_event,), name="mfhclient_process", target=mfhclient.main, ) if args.client is not None: mfhclient_process.start() if args.updater: trigger_process = Process( args=(update_event,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: if args.updater: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() if args.c: args.client = HONEYPORT main()
<commit_before>import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse from settings import HONEYPORT def main(): update_event = Event() mfhclient_process = Process( args=(args, update_event,), name="mfhclient_process", target=mfhclient.main, ) if args.client is not None: mfhclient_process.start() trigger_process = Process( args=(update_event,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() if args.c: args.client = HONEYPORT main() <commit_msg>Add condition to only launch updater if -u or --updater is specified<commit_after>
import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse from settings import HONEYPORT def main(): update_event = Event() mfhclient_process = Process( args=(args, update_event,), name="mfhclient_process", target=mfhclient.main, ) if args.client is not None: mfhclient_process.start() if args.updater: trigger_process = Process( args=(update_event,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: if args.updater: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() if args.c: args.client = HONEYPORT main()
import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse from settings import HONEYPORT def main(): update_event = Event() mfhclient_process = Process( args=(args, update_event,), name="mfhclient_process", target=mfhclient.main, ) if args.client is not None: mfhclient_process.start() trigger_process = Process( args=(update_event,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() if args.c: args.client = HONEYPORT main() Add condition to only launch updater if -u or --updater is specifiedimport os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse from settings import HONEYPORT def main(): update_event = Event() mfhclient_process = Process( args=(args, update_event,), name="mfhclient_process", target=mfhclient.main, ) if args.client is not None: mfhclient_process.start() if args.updater: trigger_process = Process( args=(update_event,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: if args.updater: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() if args.c: args.client = HONEYPORT main()
<commit_before>import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse from settings import HONEYPORT def main(): update_event = Event() mfhclient_process = Process( args=(args, update_event,), name="mfhclient_process", target=mfhclient.main, ) if args.client is not None: mfhclient_process.start() trigger_process = Process( args=(update_event,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() if args.c: args.client = HONEYPORT main() <commit_msg>Add condition to only launch updater if -u or --updater is specified<commit_after>import os import sys import time from multiprocessing import Process, Event import mfhclient import update from arguments import parse from settings import HONEYPORT def main(): update_event = Event() mfhclient_process = Process( args=(args, update_event,), name="mfhclient_process", target=mfhclient.main, ) if args.client is not None: mfhclient_process.start() if args.updater: trigger_process = Process( args=(update_event,), name="trigger_process", target=update.trigger, ) trigger_process.start() trigger_process.join() while mfhclient_process.is_alive(): time.sleep(5) else: if args.updater: update.pull("origin", "master") sys.stdout.flush() os.execl(sys.executable, sys.executable, *sys.argv) if __name__ == '__main__': # Parse arguments args = parse() if args.c: args.client = HONEYPORT main()
8276c2548bd84c4318a2fff45625a08d2ab38fcd
tests/integration/shell/key.py
tests/integration/shell/key.py
# Import salt libs import integration class KeyTest(integration.CliCase): ''' Test salt-key script ''' def test_list(self): ''' test salt-key -L ''' data = self.run_key('-L') expect = [ '\x1b[1;31mUnaccepted Keys:\x1b[0m', '\x1b[1;32mAccepted Keys:\x1b[0m', '\x1b[0;32mminion\x1b[0m', '\x1b[1;34mRejected:\x1b[0m', ''] self.assertEqual(data, expect) def test_list_acc(self): ''' test salt-key -l ''' data = self.run_key('-l acc') self.assertEqual( data, ['\x1b[0;32mminion\x1b[0m', ''] ) def test_list_un(self): ''' test salt-key -l ''' data = self.run_key('-l un') self.assertEqual( data, [''] )
# Import salt libs import integration class KeyTest(integration.ShellCase): ''' Test salt-key script ''' def test_list(self): ''' test salt-key -L ''' data = self.run_key('-L') expect = [ '\x1b[1;31mUnaccepted Keys:\x1b[0m', '\x1b[1;32mAccepted Keys:\x1b[0m', '\x1b[0;32mminion\x1b[0m', '\x1b[1;34mRejected:\x1b[0m', ''] self.assertEqual(data, expect) def test_list_acc(self): ''' test salt-key -l ''' data = self.run_key('-l acc') self.assertEqual( data, ['\x1b[0;32mminion\x1b[0m', ''] ) def test_list_un(self): ''' test salt-key -l ''' data = self.run_key('-l un') self.assertEqual( data, [''] )
Change name of clicase to shellcase
Change name of clicase to shellcase
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
# Import salt libs import integration class KeyTest(integration.CliCase): ''' Test salt-key script ''' def test_list(self): ''' test salt-key -L ''' data = self.run_key('-L') expect = [ '\x1b[1;31mUnaccepted Keys:\x1b[0m', '\x1b[1;32mAccepted Keys:\x1b[0m', '\x1b[0;32mminion\x1b[0m', '\x1b[1;34mRejected:\x1b[0m', ''] self.assertEqual(data, expect) def test_list_acc(self): ''' test salt-key -l ''' data = self.run_key('-l acc') self.assertEqual( data, ['\x1b[0;32mminion\x1b[0m', ''] ) def test_list_un(self): ''' test salt-key -l ''' data = self.run_key('-l un') self.assertEqual( data, [''] ) Change name of clicase to shellcase
# Import salt libs import integration class KeyTest(integration.ShellCase): ''' Test salt-key script ''' def test_list(self): ''' test salt-key -L ''' data = self.run_key('-L') expect = [ '\x1b[1;31mUnaccepted Keys:\x1b[0m', '\x1b[1;32mAccepted Keys:\x1b[0m', '\x1b[0;32mminion\x1b[0m', '\x1b[1;34mRejected:\x1b[0m', ''] self.assertEqual(data, expect) def test_list_acc(self): ''' test salt-key -l ''' data = self.run_key('-l acc') self.assertEqual( data, ['\x1b[0;32mminion\x1b[0m', ''] ) def test_list_un(self): ''' test salt-key -l ''' data = self.run_key('-l un') self.assertEqual( data, [''] )
<commit_before># Import salt libs import integration class KeyTest(integration.CliCase): ''' Test salt-key script ''' def test_list(self): ''' test salt-key -L ''' data = self.run_key('-L') expect = [ '\x1b[1;31mUnaccepted Keys:\x1b[0m', '\x1b[1;32mAccepted Keys:\x1b[0m', '\x1b[0;32mminion\x1b[0m', '\x1b[1;34mRejected:\x1b[0m', ''] self.assertEqual(data, expect) def test_list_acc(self): ''' test salt-key -l ''' data = self.run_key('-l acc') self.assertEqual( data, ['\x1b[0;32mminion\x1b[0m', ''] ) def test_list_un(self): ''' test salt-key -l ''' data = self.run_key('-l un') self.assertEqual( data, [''] ) <commit_msg>Change name of clicase to shellcase<commit_after>
# Import salt libs import integration class KeyTest(integration.ShellCase): ''' Test salt-key script ''' def test_list(self): ''' test salt-key -L ''' data = self.run_key('-L') expect = [ '\x1b[1;31mUnaccepted Keys:\x1b[0m', '\x1b[1;32mAccepted Keys:\x1b[0m', '\x1b[0;32mminion\x1b[0m', '\x1b[1;34mRejected:\x1b[0m', ''] self.assertEqual(data, expect) def test_list_acc(self): ''' test salt-key -l ''' data = self.run_key('-l acc') self.assertEqual( data, ['\x1b[0;32mminion\x1b[0m', ''] ) def test_list_un(self): ''' test salt-key -l ''' data = self.run_key('-l un') self.assertEqual( data, [''] )
# Import salt libs import integration class KeyTest(integration.CliCase): ''' Test salt-key script ''' def test_list(self): ''' test salt-key -L ''' data = self.run_key('-L') expect = [ '\x1b[1;31mUnaccepted Keys:\x1b[0m', '\x1b[1;32mAccepted Keys:\x1b[0m', '\x1b[0;32mminion\x1b[0m', '\x1b[1;34mRejected:\x1b[0m', ''] self.assertEqual(data, expect) def test_list_acc(self): ''' test salt-key -l ''' data = self.run_key('-l acc') self.assertEqual( data, ['\x1b[0;32mminion\x1b[0m', ''] ) def test_list_un(self): ''' test salt-key -l ''' data = self.run_key('-l un') self.assertEqual( data, [''] ) Change name of clicase to shellcase# Import salt libs import integration class KeyTest(integration.ShellCase): ''' Test salt-key script ''' def test_list(self): ''' test salt-key -L ''' data = self.run_key('-L') expect = [ '\x1b[1;31mUnaccepted Keys:\x1b[0m', '\x1b[1;32mAccepted Keys:\x1b[0m', '\x1b[0;32mminion\x1b[0m', '\x1b[1;34mRejected:\x1b[0m', ''] self.assertEqual(data, expect) def test_list_acc(self): ''' test salt-key -l ''' data = self.run_key('-l acc') self.assertEqual( data, ['\x1b[0;32mminion\x1b[0m', ''] ) def test_list_un(self): ''' test salt-key -l ''' data = self.run_key('-l un') self.assertEqual( data, [''] )
<commit_before># Import salt libs import integration class KeyTest(integration.CliCase): ''' Test salt-key script ''' def test_list(self): ''' test salt-key -L ''' data = self.run_key('-L') expect = [ '\x1b[1;31mUnaccepted Keys:\x1b[0m', '\x1b[1;32mAccepted Keys:\x1b[0m', '\x1b[0;32mminion\x1b[0m', '\x1b[1;34mRejected:\x1b[0m', ''] self.assertEqual(data, expect) def test_list_acc(self): ''' test salt-key -l ''' data = self.run_key('-l acc') self.assertEqual( data, ['\x1b[0;32mminion\x1b[0m', ''] ) def test_list_un(self): ''' test salt-key -l ''' data = self.run_key('-l un') self.assertEqual( data, [''] ) <commit_msg>Change name of clicase to shellcase<commit_after># Import salt libs import integration class KeyTest(integration.ShellCase): ''' Test salt-key script ''' def test_list(self): ''' test salt-key -L ''' data = self.run_key('-L') expect = [ '\x1b[1;31mUnaccepted Keys:\x1b[0m', '\x1b[1;32mAccepted Keys:\x1b[0m', '\x1b[0;32mminion\x1b[0m', '\x1b[1;34mRejected:\x1b[0m', ''] self.assertEqual(data, expect) def test_list_acc(self): ''' test salt-key -l ''' data = self.run_key('-l acc') self.assertEqual( data, ['\x1b[0;32mminion\x1b[0m', ''] ) def test_list_un(self): ''' test salt-key -l ''' data = self.run_key('-l un') self.assertEqual( data, [''] )
c99652b3992b1a4aa6f1ca44051cbfd66410505a
examples/generate-manager-file.py
examples/generate-manager-file.py
#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print for protocol in manager.ListProtocols(): print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): print "param-%s=%s" % (param[0], param[2]), # FIXME: deal with the "register" flag if param[1] == 1L: print "required", print print
#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print protocols = manager.ListProtocols() protocols.sort() for protocol in protocols: print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): print "param-%s=%s" % (param[0], param[2]), # FIXME: deal with the "register" flag if param[1] == 1L: print "required", print print
Sort the generated manager file by protocol name
Sort the generated manager file by protocol name 20070831193701-4210b-ede9decef118aba3937b0291512956927333dbe6.gz
Python
lgpl-2.1
detrout/telepathy-python,PabloCastellano/telepathy-python,max-posedon/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,detrout/telepathy-python,epage/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,epage/telepathy-python,PabloCastellano/telepathy-python,max-posedon/telepathy-python
#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print for protocol in manager.ListProtocols(): print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): print "param-%s=%s" % (param[0], param[2]), # FIXME: deal with the "register" flag if param[1] == 1L: print "required", print print Sort the generated manager file by protocol name 20070831193701-4210b-ede9decef118aba3937b0291512956927333dbe6.gz
#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print protocols = manager.ListProtocols() protocols.sort() for protocol in protocols: print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): print "param-%s=%s" % (param[0], param[2]), # FIXME: deal with the "register" flag if param[1] == 1L: print "required", print print
<commit_before>#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print for protocol in manager.ListProtocols(): print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): print "param-%s=%s" % (param[0], param[2]), # FIXME: deal with the "register" flag if param[1] == 1L: print "required", print print <commit_msg>Sort the generated manager file by protocol name 20070831193701-4210b-ede9decef118aba3937b0291512956927333dbe6.gz<commit_after>
#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print protocols = manager.ListProtocols() protocols.sort() for protocol in protocols: print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): print "param-%s=%s" % (param[0], param[2]), # FIXME: deal with the "register" flag if param[1] == 1L: print "required", print print
#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print for protocol in manager.ListProtocols(): print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): print "param-%s=%s" % (param[0], param[2]), # FIXME: deal with the "register" flag if param[1] == 1L: print "required", print print Sort the generated manager file by protocol name 20070831193701-4210b-ede9decef118aba3937b0291512956927333dbe6.gz#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print protocols = manager.ListProtocols() protocols.sort() for protocol in protocols: print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): print "param-%s=%s" % (param[0], param[2]), # FIXME: deal with the "register" flag if param[1] == 1L: print "required", print print
<commit_before>#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print for protocol in manager.ListProtocols(): print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): print "param-%s=%s" % (param[0], param[2]), # FIXME: deal with the "register" flag if param[1] == 1L: print "required", print print <commit_msg>Sort the generated manager file by protocol name 20070831193701-4210b-ede9decef118aba3937b0291512956927333dbe6.gz<commit_after>#!/usr/bin/python import sys import telepathy from telepathy.interfaces import CONN_MGR_INTERFACE if len(sys.argv) >= 2: manager_name = sys.argv[1] else: manager_name = "haze" service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name object = telepathy.client.ConnectionManager(service_name, object_path) manager = object[CONN_MGR_INTERFACE] print "[ConnectionManager]" print "BusName=%s" % service_name print "ObjectPath=%s" % object_path print protocols = manager.ListProtocols() protocols.sort() for protocol in protocols: print "[Protocol %s]" % protocol for param in manager.GetParameters(protocol): print "param-%s=%s" % (param[0], param[2]), # FIXME: deal with the "register" flag if param[1] == 1L: print "required", print print
967b8cd4d11e8619a8da2b6f9935846559df7347
bluesky/callbacks/__init__.py
bluesky/callbacks/__init__.py
from .core import (CallbackBase, CallbackCounter, print_metadata, collector, LiveMesh, LivePlot, LiveRaster, LiveTable, CollectThenCompute, _get_obj_fields)
from .core import (CallbackBase, CallbackCounter, print_metadata, collector, LiveMesh, LivePlot, LiveRaster, LiveTable, CollectThenCompute, LiveSpecFile, _get_obj_fields)
Add LiveSpecFile to callbacks API.
API: Add LiveSpecFile to callbacks API.
Python
bsd-3-clause
ericdill/bluesky,ericdill/bluesky
from .core import (CallbackBase, CallbackCounter, print_metadata, collector, LiveMesh, LivePlot, LiveRaster, LiveTable, CollectThenCompute, _get_obj_fields) API: Add LiveSpecFile to callbacks API.
from .core import (CallbackBase, CallbackCounter, print_metadata, collector, LiveMesh, LivePlot, LiveRaster, LiveTable, CollectThenCompute, LiveSpecFile, _get_obj_fields)
<commit_before>from .core import (CallbackBase, CallbackCounter, print_metadata, collector, LiveMesh, LivePlot, LiveRaster, LiveTable, CollectThenCompute, _get_obj_fields) <commit_msg>API: Add LiveSpecFile to callbacks API.<commit_after>
from .core import (CallbackBase, CallbackCounter, print_metadata, collector, LiveMesh, LivePlot, LiveRaster, LiveTable, CollectThenCompute, LiveSpecFile, _get_obj_fields)
from .core import (CallbackBase, CallbackCounter, print_metadata, collector, LiveMesh, LivePlot, LiveRaster, LiveTable, CollectThenCompute, _get_obj_fields) API: Add LiveSpecFile to callbacks API.from .core import (CallbackBase, CallbackCounter, print_metadata, collector, LiveMesh, LivePlot, LiveRaster, LiveTable, CollectThenCompute, LiveSpecFile, _get_obj_fields)
<commit_before>from .core import (CallbackBase, CallbackCounter, print_metadata, collector, LiveMesh, LivePlot, LiveRaster, LiveTable, CollectThenCompute, _get_obj_fields) <commit_msg>API: Add LiveSpecFile to callbacks API.<commit_after>from .core import (CallbackBase, CallbackCounter, print_metadata, collector, LiveMesh, LivePlot, LiveRaster, LiveTable, CollectThenCompute, LiveSpecFile, _get_obj_fields)
788f9a920fdafe6d341432f46295bb737c57cccd
moteconnection/serial_ports.py
moteconnection/serial_ports.py
__author__ = "Raido Pahtma" __license__ = "MIT" import glob import sys import os def _list_windows_serial_ports(): raise NotImplementedError("windows support") def _list_unix_serial_ports(additional=None): ports = [] port_list = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') + glob.glob('/dev/ttyMI*') if additional is not None: for location in additional: port_list += glob.glob(location) for port in port_list: if os.path.exists(port): ports.append(port) return ports def list_serial_ports(additional=None): if sys.platform == "win32": return _list_windows_serial_ports() return _list_unix_serial_ports(additional)
__author__ = "Raido Pahtma" __license__ = "MIT" import re import os import sys import glob import serial def _list_windows_serial_ports(): ports = [] for i in range(256): try: s = serial.Serial(i) ports.append(s.portstr) s.close() except serial.SerialException as e: msg = e.message.lower() if msg.find("could not open port") != -1 and msg.find("access is denied") != -1: match = re.match("could not open port '(\w+)'", msg) if match is not None: ports.append(match.group(1).upper()) return ports def _list_unix_serial_ports(additional=None): ports = [] port_list = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') + glob.glob('/dev/ttyMI*') if additional is not None: for location in additional: port_list += glob.glob(location) for port in port_list: if os.path.exists(port): ports.append(port) return ports def list_serial_ports(additional=None): if sys.platform == "win32": return _list_windows_serial_ports() return _list_unix_serial_ports(additional) if __name__ == "__main__": print list_serial_ports()
Support for Windows serial ports.
Support for Windows serial ports.
Python
mit
proactivity-lab/python-moteconnection,proactivity-lab/py-moteconnection
__author__ = "Raido Pahtma" __license__ = "MIT" import glob import sys import os def _list_windows_serial_ports(): raise NotImplementedError("windows support") def _list_unix_serial_ports(additional=None): ports = [] port_list = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') + glob.glob('/dev/ttyMI*') if additional is not None: for location in additional: port_list += glob.glob(location) for port in port_list: if os.path.exists(port): ports.append(port) return ports def list_serial_ports(additional=None): if sys.platform == "win32": return _list_windows_serial_ports() return _list_unix_serial_ports(additional) Support for Windows serial ports.
__author__ = "Raido Pahtma" __license__ = "MIT" import re import os import sys import glob import serial def _list_windows_serial_ports(): ports = [] for i in range(256): try: s = serial.Serial(i) ports.append(s.portstr) s.close() except serial.SerialException as e: msg = e.message.lower() if msg.find("could not open port") != -1 and msg.find("access is denied") != -1: match = re.match("could not open port '(\w+)'", msg) if match is not None: ports.append(match.group(1).upper()) return ports def _list_unix_serial_ports(additional=None): ports = [] port_list = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') + glob.glob('/dev/ttyMI*') if additional is not None: for location in additional: port_list += glob.glob(location) for port in port_list: if os.path.exists(port): ports.append(port) return ports def list_serial_ports(additional=None): if sys.platform == "win32": return _list_windows_serial_ports() return _list_unix_serial_ports(additional) if __name__ == "__main__": print list_serial_ports()
<commit_before>__author__ = "Raido Pahtma" __license__ = "MIT" import glob import sys import os def _list_windows_serial_ports(): raise NotImplementedError("windows support") def _list_unix_serial_ports(additional=None): ports = [] port_list = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') + glob.glob('/dev/ttyMI*') if additional is not None: for location in additional: port_list += glob.glob(location) for port in port_list: if os.path.exists(port): ports.append(port) return ports def list_serial_ports(additional=None): if sys.platform == "win32": return _list_windows_serial_ports() return _list_unix_serial_ports(additional) <commit_msg>Support for Windows serial ports.<commit_after>
__author__ = "Raido Pahtma" __license__ = "MIT" import re import os import sys import glob import serial def _list_windows_serial_ports(): ports = [] for i in range(256): try: s = serial.Serial(i) ports.append(s.portstr) s.close() except serial.SerialException as e: msg = e.message.lower() if msg.find("could not open port") != -1 and msg.find("access is denied") != -1: match = re.match("could not open port '(\w+)'", msg) if match is not None: ports.append(match.group(1).upper()) return ports def _list_unix_serial_ports(additional=None): ports = [] port_list = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') + glob.glob('/dev/ttyMI*') if additional is not None: for location in additional: port_list += glob.glob(location) for port in port_list: if os.path.exists(port): ports.append(port) return ports def list_serial_ports(additional=None): if sys.platform == "win32": return _list_windows_serial_ports() return _list_unix_serial_ports(additional) if __name__ == "__main__": print list_serial_ports()
__author__ = "Raido Pahtma" __license__ = "MIT" import glob import sys import os def _list_windows_serial_ports(): raise NotImplementedError("windows support") def _list_unix_serial_ports(additional=None): ports = [] port_list = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') + glob.glob('/dev/ttyMI*') if additional is not None: for location in additional: port_list += glob.glob(location) for port in port_list: if os.path.exists(port): ports.append(port) return ports def list_serial_ports(additional=None): if sys.platform == "win32": return _list_windows_serial_ports() return _list_unix_serial_ports(additional) Support for Windows serial ports.__author__ = "Raido Pahtma" __license__ = "MIT" import re import os import sys import glob import serial def _list_windows_serial_ports(): ports = [] for i in range(256): try: s = serial.Serial(i) ports.append(s.portstr) s.close() except serial.SerialException as e: msg = e.message.lower() if msg.find("could not open port") != -1 and msg.find("access is denied") != -1: match = re.match("could not open port '(\w+)'", msg) if match is not None: ports.append(match.group(1).upper()) return ports def _list_unix_serial_ports(additional=None): ports = [] port_list = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') + glob.glob('/dev/ttyMI*') if additional is not None: for location in additional: port_list += glob.glob(location) for port in port_list: if os.path.exists(port): ports.append(port) return ports def list_serial_ports(additional=None): if sys.platform == "win32": return _list_windows_serial_ports() return _list_unix_serial_ports(additional) if __name__ == "__main__": print list_serial_ports()
<commit_before>__author__ = "Raido Pahtma" __license__ = "MIT" import glob import sys import os def _list_windows_serial_ports(): raise NotImplementedError("windows support") def _list_unix_serial_ports(additional=None): ports = [] port_list = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') + glob.glob('/dev/ttyMI*') if additional is not None: for location in additional: port_list += glob.glob(location) for port in port_list: if os.path.exists(port): ports.append(port) return ports def list_serial_ports(additional=None): if sys.platform == "win32": return _list_windows_serial_ports() return _list_unix_serial_ports(additional) <commit_msg>Support for Windows serial ports.<commit_after>__author__ = "Raido Pahtma" __license__ = "MIT" import re import os import sys import glob import serial def _list_windows_serial_ports(): ports = [] for i in range(256): try: s = serial.Serial(i) ports.append(s.portstr) s.close() except serial.SerialException as e: msg = e.message.lower() if msg.find("could not open port") != -1 and msg.find("access is denied") != -1: match = re.match("could not open port '(\w+)'", msg) if match is not None: ports.append(match.group(1).upper()) return ports def _list_unix_serial_ports(additional=None): ports = [] port_list = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyAMA*') + glob.glob('/dev/ttyMI*') if additional is not None: for location in additional: port_list += glob.glob(location) for port in port_list: if os.path.exists(port): ports.append(port) return ports def list_serial_ports(additional=None): if sys.platform == "win32": return _list_windows_serial_ports() return _list_unix_serial_ports(additional) if __name__ == "__main__": print list_serial_ports()
8fb958821cd58016c56b5eee2c6531827e4c57b8
modules/juliet_module.py
modules/juliet_module.py
class module: mod_name = "unnamed_module"; mod_id = -1; mod_rect = None; mod_surface = None; mod_attribs = []; def __init__(self, _id): print("Initializing generic module (This shouldn't happen...)");
from pygame import Rect class module: mod_name = "unnamed_module" mod_id = -1 mod_size = Rect(0,0,0,0) def __init__(self, _id = -1): print("Initializing generic module (This shouldn't happen...)") def draw(self, surf): "Takes a surface object and blits its data onto it" print("Draw call of generic module (This shouldn't happen...)") def update(self): "Update this module's internal state (do things like time updates, get weather, etc." print("Update call of generic module (This shouldn't happen...)") def new_module(_id = -1): return module(_id) test_module = new_module()
Change module class to use Rect for size and take a surface as an argument to draw()
Change module class to use Rect for size and take a surface as an argument to draw()
Python
bsd-2-clause
halfbro/juliet
class module: mod_name = "unnamed_module"; mod_id = -1; mod_rect = None; mod_surface = None; mod_attribs = []; def __init__(self, _id): print("Initializing generic module (This shouldn't happen...)"); Change module class to use Rect for size and take a surface as an argument to draw()
from pygame import Rect class module: mod_name = "unnamed_module" mod_id = -1 mod_size = Rect(0,0,0,0) def __init__(self, _id = -1): print("Initializing generic module (This shouldn't happen...)") def draw(self, surf): "Takes a surface object and blits its data onto it" print("Draw call of generic module (This shouldn't happen...)") def update(self): "Update this module's internal state (do things like time updates, get weather, etc." print("Update call of generic module (This shouldn't happen...)") def new_module(_id = -1): return module(_id) test_module = new_module()
<commit_before>class module: mod_name = "unnamed_module"; mod_id = -1; mod_rect = None; mod_surface = None; mod_attribs = []; def __init__(self, _id): print("Initializing generic module (This shouldn't happen...)"); <commit_msg>Change module class to use Rect for size and take a surface as an argument to draw()<commit_after>
from pygame import Rect class module: mod_name = "unnamed_module" mod_id = -1 mod_size = Rect(0,0,0,0) def __init__(self, _id = -1): print("Initializing generic module (This shouldn't happen...)") def draw(self, surf): "Takes a surface object and blits its data onto it" print("Draw call of generic module (This shouldn't happen...)") def update(self): "Update this module's internal state (do things like time updates, get weather, etc." print("Update call of generic module (This shouldn't happen...)") def new_module(_id = -1): return module(_id) test_module = new_module()
class module: mod_name = "unnamed_module"; mod_id = -1; mod_rect = None; mod_surface = None; mod_attribs = []; def __init__(self, _id): print("Initializing generic module (This shouldn't happen...)"); Change module class to use Rect for size and take a surface as an argument to draw()from pygame import Rect class module: mod_name = "unnamed_module" mod_id = -1 mod_size = Rect(0,0,0,0) def __init__(self, _id = -1): print("Initializing generic module (This shouldn't happen...)") def draw(self, surf): "Takes a surface object and blits its data onto it" print("Draw call of generic module (This shouldn't happen...)") def update(self): "Update this module's internal state (do things like time updates, get weather, etc." print("Update call of generic module (This shouldn't happen...)") def new_module(_id = -1): return module(_id) test_module = new_module()
<commit_before>class module: mod_name = "unnamed_module"; mod_id = -1; mod_rect = None; mod_surface = None; mod_attribs = []; def __init__(self, _id): print("Initializing generic module (This shouldn't happen...)"); <commit_msg>Change module class to use Rect for size and take a surface as an argument to draw()<commit_after>from pygame import Rect class module: mod_name = "unnamed_module" mod_id = -1 mod_size = Rect(0,0,0,0) def __init__(self, _id = -1): print("Initializing generic module (This shouldn't happen...)") def draw(self, surf): "Takes a surface object and blits its data onto it" print("Draw call of generic module (This shouldn't happen...)") def update(self): "Update this module's internal state (do things like time updates, get weather, etc." print("Update call of generic module (This shouldn't happen...)") def new_module(_id = -1): return module(_id) test_module = new_module()
56b84d3271ab2312a0bdbd27c6ee3ad60c139920
search/urls.py
search/urls.py
__author__ = 'Nick' from django.conf.urls import url from search import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^query$', views.query, name='query') ]
__author__ = 'Nick' from django.conf.urls import url from search import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^query/$', views.query, name='query') ]
Make sure django doesn't mangle route
Make sure django doesn't mangle route
Python
mit
nh0815/PySearch,nh0815/PySearch
__author__ = 'Nick' from django.conf.urls import url from search import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^query$', views.query, name='query') ]Make sure django doesn't mangle route
__author__ = 'Nick' from django.conf.urls import url from search import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^query/$', views.query, name='query') ]
<commit_before>__author__ = 'Nick' from django.conf.urls import url from search import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^query$', views.query, name='query') ]<commit_msg>Make sure django doesn't mangle route<commit_after>
__author__ = 'Nick' from django.conf.urls import url from search import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^query/$', views.query, name='query') ]
__author__ = 'Nick' from django.conf.urls import url from search import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^query$', views.query, name='query') ]Make sure django doesn't mangle route__author__ = 'Nick' from django.conf.urls import url from search import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^query/$', views.query, name='query') ]
<commit_before>__author__ = 'Nick' from django.conf.urls import url from search import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^query$', views.query, name='query') ]<commit_msg>Make sure django doesn't mangle route<commit_after>__author__ = 'Nick' from django.conf.urls import url from search import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^query/$', views.query, name='query') ]
c035ee8273b3582133c78353259de026ce81d0b5
app/assets.py
app/assets.py
from flask.ext.assets import Bundle app_css = Bundle( 'app.scss', filters='scss', output='styles/app.css' ) app_js = Bundle( 'app.js', filters='jsmin', output='scripts/app.js' ) vendor_css = Bundle( 'vendor/semantic.min.css', output='styles/vendor.css' ) vendor_js = Bundle( 'vendor/jquery.min.js', 'vendor/semantic.min.js', 'vendor/tablesort.min.js', filters='jsmin', output='scripts/vendor.js' )
from flask.ext.assets import Bundle app_css = Bundle( '*.scss', filters='scss', output='styles/app.css' ) app_js = Bundle( 'app.js', filters='jsmin', output='scripts/app.js' ) vendor_css = Bundle( 'vendor/semantic.min.css', output='styles/vendor.css' ) vendor_js = Bundle( 'vendor/jquery.min.js', 'vendor/semantic.min.js', 'vendor/tablesort.min.js', filters='jsmin', output='scripts/vendor.js' )
Generalize scss bundle to track all scss files
Generalize scss bundle to track all scss files
Python
mit
hack4impact/flask-base,ColinHaley/Konsole,ColinHaley/Konsole,aharelick/netter-center,AsylumConnect/asylum-connect-catalog,AsylumConnect/asylum-connect-catalog,tobymccann/flask-base,AsylumConnect/asylum-connect-catalog,AsylumConnect/asylum-connect-catalog,hack4impact/flask-base,hack4impact/asylum-connect-catalog,tobymccann/flask-base,hack4impact/asylum-connect-catalog,tobymccann/flask-base,ColinHaley/Konsole,hack4impact/asylum-connect-catalog,aharelick/netter-center,hack4impact/asylum-connect-catalog,hack4impact/flask-base,aharelick/netter-center
from flask.ext.assets import Bundle app_css = Bundle( 'app.scss', filters='scss', output='styles/app.css' ) app_js = Bundle( 'app.js', filters='jsmin', output='scripts/app.js' ) vendor_css = Bundle( 'vendor/semantic.min.css', output='styles/vendor.css' ) vendor_js = Bundle( 'vendor/jquery.min.js', 'vendor/semantic.min.js', 'vendor/tablesort.min.js', filters='jsmin', output='scripts/vendor.js' ) Generalize scss bundle to track all scss files
from flask.ext.assets import Bundle app_css = Bundle( '*.scss', filters='scss', output='styles/app.css' ) app_js = Bundle( 'app.js', filters='jsmin', output='scripts/app.js' ) vendor_css = Bundle( 'vendor/semantic.min.css', output='styles/vendor.css' ) vendor_js = Bundle( 'vendor/jquery.min.js', 'vendor/semantic.min.js', 'vendor/tablesort.min.js', filters='jsmin', output='scripts/vendor.js' )
<commit_before>from flask.ext.assets import Bundle app_css = Bundle( 'app.scss', filters='scss', output='styles/app.css' ) app_js = Bundle( 'app.js', filters='jsmin', output='scripts/app.js' ) vendor_css = Bundle( 'vendor/semantic.min.css', output='styles/vendor.css' ) vendor_js = Bundle( 'vendor/jquery.min.js', 'vendor/semantic.min.js', 'vendor/tablesort.min.js', filters='jsmin', output='scripts/vendor.js' ) <commit_msg>Generalize scss bundle to track all scss files<commit_after>
from flask.ext.assets import Bundle app_css = Bundle( '*.scss', filters='scss', output='styles/app.css' ) app_js = Bundle( 'app.js', filters='jsmin', output='scripts/app.js' ) vendor_css = Bundle( 'vendor/semantic.min.css', output='styles/vendor.css' ) vendor_js = Bundle( 'vendor/jquery.min.js', 'vendor/semantic.min.js', 'vendor/tablesort.min.js', filters='jsmin', output='scripts/vendor.js' )
from flask.ext.assets import Bundle app_css = Bundle( 'app.scss', filters='scss', output='styles/app.css' ) app_js = Bundle( 'app.js', filters='jsmin', output='scripts/app.js' ) vendor_css = Bundle( 'vendor/semantic.min.css', output='styles/vendor.css' ) vendor_js = Bundle( 'vendor/jquery.min.js', 'vendor/semantic.min.js', 'vendor/tablesort.min.js', filters='jsmin', output='scripts/vendor.js' ) Generalize scss bundle to track all scss filesfrom flask.ext.assets import Bundle app_css = Bundle( '*.scss', filters='scss', output='styles/app.css' ) app_js = Bundle( 'app.js', filters='jsmin', output='scripts/app.js' ) vendor_css = Bundle( 'vendor/semantic.min.css', output='styles/vendor.css' ) vendor_js = Bundle( 'vendor/jquery.min.js', 'vendor/semantic.min.js', 'vendor/tablesort.min.js', filters='jsmin', output='scripts/vendor.js' )
<commit_before>from flask.ext.assets import Bundle app_css = Bundle( 'app.scss', filters='scss', output='styles/app.css' ) app_js = Bundle( 'app.js', filters='jsmin', output='scripts/app.js' ) vendor_css = Bundle( 'vendor/semantic.min.css', output='styles/vendor.css' ) vendor_js = Bundle( 'vendor/jquery.min.js', 'vendor/semantic.min.js', 'vendor/tablesort.min.js', filters='jsmin', output='scripts/vendor.js' ) <commit_msg>Generalize scss bundle to track all scss files<commit_after>from flask.ext.assets import Bundle app_css = Bundle( '*.scss', filters='scss', output='styles/app.css' ) app_js = Bundle( 'app.js', filters='jsmin', output='scripts/app.js' ) vendor_css = Bundle( 'vendor/semantic.min.css', output='styles/vendor.css' ) vendor_js = Bundle( 'vendor/jquery.min.js', 'vendor/semantic.min.js', 'vendor/tablesort.min.js', filters='jsmin', output='scripts/vendor.js' )
1cd5126c90c87df1bfe10434aa850913aed560b4
splice/default_settings.py
splice/default_settings.py
import os class DefaultConfig(object): """ Configuration suitable for use for development """ DEBUG = True APPLICATION_ROOT = None JSONIFY_PRETTYPRINT_REGULAR = True STATIC_ENABLED_ENVS = {"dev", "test"} ENVIRONMENT = "dev" SECRET_KEY = "moz-splice-development-key" TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates") STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static") FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") COUNTRY_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "iso3166.csv") LOCALE_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "all-locales.mozilla-aurora") SQLALCHEMY_DATABASE_URI = "postgres://postgres:p@ssw0rd66@localhost/mozsplice" SQLALCHEMY_ECHO = False SQLALCHEMY_POOL_SIZE = 5 SQLALCHEMY_POOL_TIMEOUT = 10
import os class DefaultConfig(object): """ Configuration suitable for use for development """ DEBUG = True APPLICATION_ROOT = None JSONIFY_PRETTYPRINT_REGULAR = True STATIC_ENABLED_ENVS = {"dev", "test"} ENVIRONMENT = "dev" SECRET_KEY = "moz-splice-development-key" TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates") STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static") FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") COUNTRY_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "iso3166.csv") LOCALE_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "all-locales.mozilla-aurora") SQLALCHEMY_DATABASE_URI = "postgres://localhost/mozsplice" SQLALCHEMY_ECHO = False SQLALCHEMY_POOL_SIZE = 5 SQLALCHEMY_POOL_TIMEOUT = 10
Revert "adding slot_index to impression_stats_daily"
Revert "adding slot_index to impression_stats_daily" This reverts commit 71202d2f9e2cafa5bf8ef9e0c6a355a8d65d5c7a.
Python
mpl-2.0
tkiethanom/splice,mostlygeek/splice,tkiethanom/splice,rlr/splice,rlr/splice,mozilla/splice,mostlygeek/splice,tkiethanom/splice,tkiethanom/splice,rlr/splice,mozilla/splice,mozilla/splice,ncloudioj/splice,oyiptong/splice,oyiptong/splice,ncloudioj/splice,oyiptong/splice,ncloudioj/splice,oyiptong/splice,mostlygeek/splice,mostlygeek/splice,rlr/splice,ncloudioj/splice,mozilla/splice
import os class DefaultConfig(object): """ Configuration suitable for use for development """ DEBUG = True APPLICATION_ROOT = None JSONIFY_PRETTYPRINT_REGULAR = True STATIC_ENABLED_ENVS = {"dev", "test"} ENVIRONMENT = "dev" SECRET_KEY = "moz-splice-development-key" TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates") STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static") FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") COUNTRY_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "iso3166.csv") LOCALE_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "all-locales.mozilla-aurora") SQLALCHEMY_DATABASE_URI = "postgres://postgres:p@ssw0rd66@localhost/mozsplice" SQLALCHEMY_ECHO = False SQLALCHEMY_POOL_SIZE = 5 SQLALCHEMY_POOL_TIMEOUT = 10 Revert "adding slot_index to impression_stats_daily" This reverts commit 71202d2f9e2cafa5bf8ef9e0c6a355a8d65d5c7a.
import os class DefaultConfig(object): """ Configuration suitable for use for development """ DEBUG = True APPLICATION_ROOT = None JSONIFY_PRETTYPRINT_REGULAR = True STATIC_ENABLED_ENVS = {"dev", "test"} ENVIRONMENT = "dev" SECRET_KEY = "moz-splice-development-key" TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates") STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static") FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") COUNTRY_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "iso3166.csv") LOCALE_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "all-locales.mozilla-aurora") SQLALCHEMY_DATABASE_URI = "postgres://localhost/mozsplice" SQLALCHEMY_ECHO = False SQLALCHEMY_POOL_SIZE = 5 SQLALCHEMY_POOL_TIMEOUT = 10
<commit_before>import os class DefaultConfig(object): """ Configuration suitable for use for development """ DEBUG = True APPLICATION_ROOT = None JSONIFY_PRETTYPRINT_REGULAR = True STATIC_ENABLED_ENVS = {"dev", "test"} ENVIRONMENT = "dev" SECRET_KEY = "moz-splice-development-key" TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates") STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static") FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") COUNTRY_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "iso3166.csv") LOCALE_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "all-locales.mozilla-aurora") SQLALCHEMY_DATABASE_URI = "postgres://postgres:p@ssw0rd66@localhost/mozsplice" SQLALCHEMY_ECHO = False SQLALCHEMY_POOL_SIZE = 5 SQLALCHEMY_POOL_TIMEOUT = 10 <commit_msg>Revert "adding slot_index to impression_stats_daily" This reverts commit 71202d2f9e2cafa5bf8ef9e0c6a355a8d65d5c7a.<commit_after>
import os class DefaultConfig(object): """ Configuration suitable for use for development """ DEBUG = True APPLICATION_ROOT = None JSONIFY_PRETTYPRINT_REGULAR = True STATIC_ENABLED_ENVS = {"dev", "test"} ENVIRONMENT = "dev" SECRET_KEY = "moz-splice-development-key" TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates") STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static") FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") COUNTRY_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "iso3166.csv") LOCALE_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "all-locales.mozilla-aurora") SQLALCHEMY_DATABASE_URI = "postgres://localhost/mozsplice" SQLALCHEMY_ECHO = False SQLALCHEMY_POOL_SIZE = 5 SQLALCHEMY_POOL_TIMEOUT = 10
import os class DefaultConfig(object): """ Configuration suitable for use for development """ DEBUG = True APPLICATION_ROOT = None JSONIFY_PRETTYPRINT_REGULAR = True STATIC_ENABLED_ENVS = {"dev", "test"} ENVIRONMENT = "dev" SECRET_KEY = "moz-splice-development-key" TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates") STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static") FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") COUNTRY_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "iso3166.csv") LOCALE_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "all-locales.mozilla-aurora") SQLALCHEMY_DATABASE_URI = "postgres://postgres:p@ssw0rd66@localhost/mozsplice" SQLALCHEMY_ECHO = False SQLALCHEMY_POOL_SIZE = 5 SQLALCHEMY_POOL_TIMEOUT = 10 Revert "adding slot_index to impression_stats_daily" This reverts commit 71202d2f9e2cafa5bf8ef9e0c6a355a8d65d5c7a.import os class DefaultConfig(object): """ Configuration suitable for use for development """ DEBUG = True APPLICATION_ROOT = None JSONIFY_PRETTYPRINT_REGULAR = True STATIC_ENABLED_ENVS = {"dev", "test"} ENVIRONMENT = "dev" SECRET_KEY = "moz-splice-development-key" TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates") STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static") FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") COUNTRY_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "iso3166.csv") LOCALE_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "all-locales.mozilla-aurora") SQLALCHEMY_DATABASE_URI = "postgres://localhost/mozsplice" SQLALCHEMY_ECHO = False SQLALCHEMY_POOL_SIZE = 5 SQLALCHEMY_POOL_TIMEOUT = 10
<commit_before>import os class DefaultConfig(object): """ Configuration suitable for use for development """ DEBUG = True APPLICATION_ROOT = None JSONIFY_PRETTYPRINT_REGULAR = True STATIC_ENABLED_ENVS = {"dev", "test"} ENVIRONMENT = "dev" SECRET_KEY = "moz-splice-development-key" TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates") STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static") FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") COUNTRY_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "iso3166.csv") LOCALE_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "all-locales.mozilla-aurora") SQLALCHEMY_DATABASE_URI = "postgres://postgres:p@ssw0rd66@localhost/mozsplice" SQLALCHEMY_ECHO = False SQLALCHEMY_POOL_SIZE = 5 SQLALCHEMY_POOL_TIMEOUT = 10 <commit_msg>Revert "adding slot_index to impression_stats_daily" This reverts commit 71202d2f9e2cafa5bf8ef9e0c6a355a8d65d5c7a.<commit_after>import os class DefaultConfig(object): """ Configuration suitable for use for development """ DEBUG = True APPLICATION_ROOT = None JSONIFY_PRETTYPRINT_REGULAR = True STATIC_ENABLED_ENVS = {"dev", "test"} ENVIRONMENT = "dev" SECRET_KEY = "moz-splice-development-key" TEMPLATE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates") STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static") FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") COUNTRY_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "iso3166.csv") LOCALE_FIXTURE_PATH = os.path.join(FIXTURES_DIR, "all-locales.mozilla-aurora") SQLALCHEMY_DATABASE_URI = "postgres://localhost/mozsplice" SQLALCHEMY_ECHO = False SQLALCHEMY_POOL_SIZE = 5 SQLALCHEMY_POOL_TIMEOUT = 10
a5b750b9800b60242e72d9d066a46f98b8a0325e
test/test_recordings.py
test/test_recordings.py
import pytest import json class TestRecordings: def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it before it has been processed") response = bob.get_recording_response(recording) print(" The response should have a JWT for the raw file") assert "downloadRawJWT" in response print(" But the response should not have a JWT for the processed file") assert "downloadFileJWT" not in response
import pytest import json class TestRecordings: def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it before it has been processed") response = bob.get_recording_response(recording) print(" The response should have a JWT for the raw file") assert "downloadRawJWT" in response print(" But the response should not have a JWT for the processed file") assert "downloadFileJWT" not in response def test_recording_doesnt_include_file_key(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it") recording_response = bob.get_recording(recording) print(" The recording response should not contain the rawFileKey") assert "rawFileKey" not in recording_response
Add test to make sure fileKey(s) aren't returned
Add test to make sure fileKey(s) aren't returned
Python
agpl-3.0
TheCacophonyProject/Full_Noise
import pytest import json class TestRecordings: def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it before it has been processed") response = bob.get_recording_response(recording) print(" The response should have a JWT for the raw file") assert "downloadRawJWT" in response print(" But the response should not have a JWT for the processed file") assert "downloadFileJWT" not in response Add test to make sure fileKey(s) aren't returned
import pytest import json class TestRecordings: def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it before it has been processed") response = bob.get_recording_response(recording) print(" The response should have a JWT for the raw file") assert "downloadRawJWT" in response print(" But the response should not have a JWT for the processed file") assert "downloadFileJWT" not in response def test_recording_doesnt_include_file_key(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it") recording_response = bob.get_recording(recording) print(" The recording response should not contain the rawFileKey") assert "rawFileKey" not in recording_response
<commit_before>import pytest import json class TestRecordings: def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it before it has been processed") response = bob.get_recording_response(recording) print(" The response should have a JWT for the raw file") assert "downloadRawJWT" in response print(" But the response should not have a JWT for the processed file") assert "downloadFileJWT" not in response <commit_msg>Add test to make sure fileKey(s) aren't returned<commit_after>
import pytest import json class TestRecordings: def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it before it has been processed") response = bob.get_recording_response(recording) print(" The response should have a JWT for the raw file") assert "downloadRawJWT" in response print(" But the response should not have a JWT for the processed file") assert "downloadFileJWT" not in response def test_recording_doesnt_include_file_key(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it") recording_response = bob.get_recording(recording) print(" The recording response should not contain the rawFileKey") assert "rawFileKey" not in recording_response
import pytest import json class TestRecordings: def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it before it has been processed") response = bob.get_recording_response(recording) print(" The response should have a JWT for the raw file") assert "downloadRawJWT" in response print(" But the response should not have a JWT for the processed file") assert "downloadFileJWT" not in response Add test to make sure fileKey(s) aren't returnedimport pytest import json class TestRecordings: def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it before it has been processed") response = bob.get_recording_response(recording) print(" The response should have a JWT for the raw file") assert "downloadRawJWT" in response print(" But the response should not have a JWT for the processed file") assert "downloadFileJWT" not in response def test_recording_doesnt_include_file_key(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it") recording_response = bob.get_recording(recording) print(" The recording response should not contain the rawFileKey") assert "rawFileKey" not in recording_response
<commit_before>import pytest import json class TestRecordings: def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it before it has been processed") response = bob.get_recording_response(recording) print(" The response should have a JWT for the raw file") assert "downloadRawJWT" in response print(" But the response should not have a JWT for the processed file") assert "downloadFileJWT" not in response <commit_msg>Add test to make sure fileKey(s) aren't returned<commit_after>import pytest import json class TestRecordings: def test_unprocessed_recording_doesnt_return_processed_jwt(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it before it has been processed") response = bob.get_recording_response(recording) print(" The response should have a JWT for the raw file") assert "downloadRawJWT" in response print(" But the response should not have a JWT for the processed file") assert "downloadFileJWT" not in response def test_recording_doesnt_include_file_key(self, helper): print("If a new user uploads a recording") bob = helper.given_new_user(self, "bob_limit") bobsGroup = helper.make_unique_group_name(self, "bobs_group") bob.create_group(bobsGroup) bobsDevice = helper.given_new_device(self, "bobs_device", bobsGroup) recording = bobsDevice.upload_recording() print("And then fetches it") recording_response = bob.get_recording(recording) print(" The recording response should not contain the rawFileKey") assert "rawFileKey" not in recording_response