commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
45bbd9c64fb19d22a04b19c3bc61081e38e154a3
|
mail_corpus.py
|
mail_corpus.py
|
import sys
import mailbox
import mail_parser
import nltk
import itertools
try:
import cPickle as pickle
except ImportError:
import pickle
def main():
"""Extract the texts of emails from a specified mailbox and
from a specified set of senders and write corpus.txt.
Usage: python mail_corpus.py output mboxfile email1@example.com email2@example.com
"""
mbox = mailbox.mbox(sys.argv[2])
addresses = set(sys.argv[3:])
f = open(sys.argv[1], "w")
for text in mail_parser.mail_texts(mbox, addresses):
print >> f, text
f.close()
if __name__ == '__main__':
main()
|
import sys
import mailbox
import mail_parser
import nltk
import itertools
try:
import cPickle as pickle
except ImportError:
import pickle
def main():
"""Extract the texts of emails from a specified mailbox and
from a specified set of senders and write corpus.txt.
Usage: python mail_corpus.py mboxfile email1@example.com email2@example.com
"""
mbox = mailbox.mbox(sys.argv[1])
addresses = set(sys.argv[2:])
f = open("corpus.txt", "w")
for text in mail_parser.mail_texts(mbox, addresses):
print >> f, text
f.close()
if __name__ == '__main__':
main()
|
Switch back to always write corpus.txt
|
Switch back to always write corpus.txt
|
Python
|
mit
|
RawPlutonium/BaymaxKE
|
import sys
import mailbox
import mail_parser
import nltk
import itertools
try:
import cPickle as pickle
except ImportError:
import pickle
def main():
"""Extract the texts of emails from a specified mailbox and
from a specified set of senders and write corpus.txt.
Usage: python mail_corpus.py output mboxfile email1@example.com email2@example.com
"""
mbox = mailbox.mbox(sys.argv[2])
addresses = set(sys.argv[3:])
f = open(sys.argv[1], "w")
for text in mail_parser.mail_texts(mbox, addresses):
print >> f, text
f.close()
if __name__ == '__main__':
main()
Switch back to always write corpus.txt
|
import sys
import mailbox
import mail_parser
import nltk
import itertools
try:
import cPickle as pickle
except ImportError:
import pickle
def main():
"""Extract the texts of emails from a specified mailbox and
from a specified set of senders and write corpus.txt.
Usage: python mail_corpus.py mboxfile email1@example.com email2@example.com
"""
mbox = mailbox.mbox(sys.argv[1])
addresses = set(sys.argv[2:])
f = open("corpus.txt", "w")
for text in mail_parser.mail_texts(mbox, addresses):
print >> f, text
f.close()
if __name__ == '__main__':
main()
|
<commit_before>import sys
import mailbox
import mail_parser
import nltk
import itertools
try:
import cPickle as pickle
except ImportError:
import pickle
def main():
"""Extract the texts of emails from a specified mailbox and
from a specified set of senders and write corpus.txt.
Usage: python mail_corpus.py output mboxfile email1@example.com email2@example.com
"""
mbox = mailbox.mbox(sys.argv[2])
addresses = set(sys.argv[3:])
f = open(sys.argv[1], "w")
for text in mail_parser.mail_texts(mbox, addresses):
print >> f, text
f.close()
if __name__ == '__main__':
main()
<commit_msg>Switch back to always write corpus.txt<commit_after>
|
import sys
import mailbox
import mail_parser
import nltk
import itertools
try:
import cPickle as pickle
except ImportError:
import pickle
def main():
"""Extract the texts of emails from a specified mailbox and
from a specified set of senders and write corpus.txt.
Usage: python mail_corpus.py mboxfile email1@example.com email2@example.com
"""
mbox = mailbox.mbox(sys.argv[1])
addresses = set(sys.argv[2:])
f = open("corpus.txt", "w")
for text in mail_parser.mail_texts(mbox, addresses):
print >> f, text
f.close()
if __name__ == '__main__':
main()
|
import sys
import mailbox
import mail_parser
import nltk
import itertools
try:
import cPickle as pickle
except ImportError:
import pickle
def main():
"""Extract the texts of emails from a specified mailbox and
from a specified set of senders and write corpus.txt.
Usage: python mail_corpus.py output mboxfile email1@example.com email2@example.com
"""
mbox = mailbox.mbox(sys.argv[2])
addresses = set(sys.argv[3:])
f = open(sys.argv[1], "w")
for text in mail_parser.mail_texts(mbox, addresses):
print >> f, text
f.close()
if __name__ == '__main__':
main()
Switch back to always write corpus.txtimport sys
import mailbox
import mail_parser
import nltk
import itertools
try:
import cPickle as pickle
except ImportError:
import pickle
def main():
"""Extract the texts of emails from a specified mailbox and
from a specified set of senders and write corpus.txt.
Usage: python mail_corpus.py mboxfile email1@example.com email2@example.com
"""
mbox = mailbox.mbox(sys.argv[1])
addresses = set(sys.argv[2:])
f = open("corpus.txt", "w")
for text in mail_parser.mail_texts(mbox, addresses):
print >> f, text
f.close()
if __name__ == '__main__':
main()
|
<commit_before>import sys
import mailbox
import mail_parser
import nltk
import itertools
try:
import cPickle as pickle
except ImportError:
import pickle
def main():
"""Extract the texts of emails from a specified mailbox and
from a specified set of senders and write corpus.txt.
Usage: python mail_corpus.py output mboxfile email1@example.com email2@example.com
"""
mbox = mailbox.mbox(sys.argv[2])
addresses = set(sys.argv[3:])
f = open(sys.argv[1], "w")
for text in mail_parser.mail_texts(mbox, addresses):
print >> f, text
f.close()
if __name__ == '__main__':
main()
<commit_msg>Switch back to always write corpus.txt<commit_after>import sys
import mailbox
import mail_parser
import nltk
import itertools
try:
import cPickle as pickle
except ImportError:
import pickle
def main():
"""Extract the texts of emails from a specified mailbox and
from a specified set of senders and write corpus.txt.
Usage: python mail_corpus.py mboxfile email1@example.com email2@example.com
"""
mbox = mailbox.mbox(sys.argv[1])
addresses = set(sys.argv[2:])
f = open("corpus.txt", "w")
for text in mail_parser.mail_texts(mbox, addresses):
print >> f, text
f.close()
if __name__ == '__main__':
main()
|
458f402c1aafdccfeba287933b05baaa2bae3eb3
|
model/oauth.py
|
model/oauth.py
|
# -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
|
# -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}api/drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
|
Fix GoogleDrive OAuth callback URL in OAuth module.
|
Fix GoogleDrive OAuth callback URL in OAuth module.
|
Python
|
mit
|
supistar/Botnyan
|
# -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
Fix GoogleDrive OAuth callback URL in OAuth module.
|
# -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}api/drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
|
<commit_before># -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
<commit_msg>Fix GoogleDrive OAuth callback URL in OAuth module.<commit_after>
|
# -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}api/drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
|
# -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
Fix GoogleDrive OAuth callback URL in OAuth module.# -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}api/drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
|
<commit_before># -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
<commit_msg>Fix GoogleDrive OAuth callback URL in OAuth module.<commit_after># -*- encoding:utf8 -*-
import os
from oauth2client.client import OAuth2WebServerFlow
class OAuth:
def __init__(self):
pass
def get_flow(self):
scope = 'https://www.googleapis.com/auth/drive'
try:
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
base_url = os.environ['BOTNYAN_BASE_URL']
separator = "/"
if base_url.endswith("/"):
separator = ""
redirect_url = "{0}{1}api/drive/callback".format(base_url, separator)
flow = OAuth2WebServerFlow(client_id=client_id,
client_secret=client_secret,
scope=scope,
redirect_uri=redirect_url)
return flow
except:
return None
|
8eb9360e575d57b2414468c8ec6c895baf239d63
|
mwikiircbot.py
|
mwikiircbot.py
|
import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
elif len(args) > 1:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
|
import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
else:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
|
Remove unnecessary conditional in argument parsing
|
Remove unnecessary conditional in argument parsing
|
Python
|
mit
|
fenhl/mwikiircbot
|
import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
elif len(args) > 1:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
Remove unnecessary conditional in argument parsing
|
import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
else:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
|
<commit_before>import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
elif len(args) > 1:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
<commit_msg>Remove unnecessary conditional in argument parsing<commit_after>
|
import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
else:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
|
import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
elif len(args) > 1:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
Remove unnecessary conditional in argument parsingimport ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
else:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
|
<commit_before>import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
elif len(args) > 1:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
<commit_msg>Remove unnecessary conditional in argument parsing<commit_after>import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
args = args[:]
parsemode = ["host"]
host = None
name = "MediaWiki"
channels = []
while len(args) > 0:
if len(parsemode) < 1:
if args[0] == "-n":
parsemode.insert(0, "name")
else:
channels.append(args[0])
else:
if parsemode[0] == "name":
name = args[0]
elif parsemode[0] == "host":
host = args[0]
parsemode = parsemode[1:]
args = args[1:]
if host == None:
print("Usage: " + cmd + " [-n <name>] <host> <channel> [<channel> ...]")
return
else:
Handler(host=host, name=name channels=channels)
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
|
3fd7c331273f9fadacae1fcb0ff51b9817b009e3
|
telethon/network/connection/tcpfull.py
|
telethon/network/connection/tcpfull.py
|
import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
|
import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
async def connect(self):
await super().connect()
self._send_counter = 0 # Important or Telegram won't reply
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
|
Fix automatic reconnect (e.g. on bad auth key)
|
Fix automatic reconnect (e.g. on bad auth key)
This took more time than it should have to debug.
|
Python
|
mit
|
LonamiWebs/Telethon,expectocode/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon
|
import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
Fix automatic reconnect (e.g. on bad auth key)
This took more time than it should have to debug.
|
import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
async def connect(self):
await super().connect()
self._send_counter = 0 # Important or Telegram won't reply
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
|
<commit_before>import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
<commit_msg>Fix automatic reconnect (e.g. on bad auth key)
This took more time than it should have to debug.<commit_after>
|
import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
async def connect(self):
await super().connect()
self._send_counter = 0 # Important or Telegram won't reply
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
|
import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
Fix automatic reconnect (e.g. on bad auth key)
This took more time than it should have to debug.import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
async def connect(self):
await super().connect()
self._send_counter = 0 # Important or Telegram won't reply
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
|
<commit_before>import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
<commit_msg>Fix automatic reconnect (e.g. on bad auth key)
This took more time than it should have to debug.<commit_after>import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop):
super().__init__(ip, port, loop=loop)
self._send_counter = 0
async def connect(self):
await super().connect()
self._send_counter = 0 # Important or Telegram won't reply
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
|
7435d508ae95c69dcb596e74f62bfb030011201f
|
tests/general/test_required_folders.py
|
tests/general/test_required_folders.py
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = 'bob@bob.com'
access_token = None
imap_endpoint = None
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = 'bob@bob.com'
access_token = None
imap_endpoint = None
sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
Update mock Account in tests.
|
Update mock Account in tests.
|
Python
|
agpl-3.0
|
jobscore/sync-engine,jobscore/sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,closeio/nylas,nylas/sync-engine,nylas/sync-engine,closeio/nylas
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = 'bob@bob.com'
access_token = None
imap_endpoint = None
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
Update mock Account in tests.
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = 'bob@bob.com'
access_token = None
imap_endpoint = None
sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
<commit_before>""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = 'bob@bob.com'
access_token = None
imap_endpoint = None
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
<commit_msg>Update mock Account in tests.<commit_after>
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = 'bob@bob.com'
access_token = None
imap_endpoint = None
sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = 'bob@bob.com'
access_token = None
imap_endpoint = None
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
Update mock Account in tests.""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = 'bob@bob.com'
access_token = None
imap_endpoint = None
sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
<commit_before>""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = 'bob@bob.com'
access_token = None
imap_endpoint = None
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
<commit_msg>Update mock Account in tests.<commit_after>""" Test that the All Mail folder is enabled for Gmail. """
import pytest
from inbox.auth.gmail import GmailAuthHandler
from inbox.basicauth import GmailSettingError
from inbox.crispin import GmailCrispinClient
class AccountStub(object):
id = 0
email_address = 'bob@bob.com'
access_token = None
imap_endpoint = None
sync_state = 'running'
def new_token(self):
return ('foo', 22)
def validate_token(self, new_token):
return True
class ConnectionStub(object):
def logout(self):
pass
def get_auth_handler(monkeypatch, folders):
g = GmailAuthHandler('gmail')
def mock_connect(a):
return ConnectionStub()
g.connect_account = mock_connect
monkeypatch.setattr(GmailCrispinClient, 'folder_names',
lambda x: folders)
return g
def test_all_mail_missing(monkeypatch):
"""
Test that validate_folders throws a GmailSettingError if All Mail
is not in the list of folders.
"""
g = get_auth_handler(monkeypatch, {'inbox': 'INBOX'})
with pytest.raises(GmailSettingError):
g.verify_account(AccountStub())
def test_all_mail_present(monkeypatch):
"""
Test that the validate_folders passes if All Mail is present.
"""
g = get_auth_handler(monkeypatch, {'all': 'ALL', 'inbox': 'INBOX',
'trash': 'TRASH'})
assert g.verify_account(AccountStub())
|
d72882dfa24e1dfd8d1b85103cbc5388e4af3266
|
api/base/exceptions.py
|
api/base/exceptions.py
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
def dict_error_formatting(errors, error):
for key, value in error.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
if response:
message = response.data
if isinstance(message, dict):
dict_error_formatting(errors, message)
elif isinstance(message, (list, tuple)):
for error in message:
if isinstance(error, (dict)):
dict_error_formatting(errors, error)
else:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
Handle case for error formatting where errors are a list of dictionaries (as you would see in bulk create).
|
Handle case for error formatting where errors are a list of dictionaries (as you would see in bulk create).
|
Python
|
apache-2.0
|
jnayak1/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,abought/osf.io,SSJohns/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,GageGaskins/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,baylee-d/osf.io,felliott/osf.io,caseyrygt/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,samchrisinger/osf.io,cslzchen/osf.io,mluke93/osf.io,adlius/osf.io,billyhunt/osf.io,mattclark/osf.io,ticklemepierce/osf.io,crcresearch/osf.io,jnayak1/osf.io,erinspace/osf.io,SSJohns/osf.io,KAsante95/osf.io,GageGaskins/osf.io,amyshi188/osf.io,emetsger/osf.io,ticklemepierce/osf.io,Ghalko/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,wearpants/osf.io,rdhyee/osf.io,hmoco/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,ZobairAlijan/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,abought/osf.io,saradbowman/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,doublebits/osf.io,asanfilippo7/osf.io,zachjanicki/osf.io,caneruguz/osf.io,wearpants/osf.io,KAsante95/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,chrisseto/osf.io,emetsger/osf.io,danielneis/osf.io,billyhunt/osf.io,jnayak1/osf.io,mluo613/osf.io,abought/osf.io,kwierman/osf.io,mluke93/osf.io,acshi/osf.io,cslzchen/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,jnayak1/osf.io,alexschiller/osf.io,zamattiac/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,samanehsan/osf.io,chennan47/osf.io,samanehsan/osf.io,sloria/osf.io,emetsger/osf.io,doublebits/osf.io,emetsger/osf.io,sloria/osf.io,felliott/osf.io,TomBaxter/osf.io,TomBaxter/osf.io,felliott/osf.io,laurenrevere/osf.io,acshi/osf.io,asanfilippo7/osf.io,aaxelb/osf.io,caseyrygt/osf.io,caseyrygt/osf.io,binoculars/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,hmoco/osf.io,RomanZWang/osf.io,mfraezz/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,hmoco/osf.io,brandonPurvis/osf.io,doublebits/osf.io,chennan47/osf.io,Ghalko/osf.io,hmoco/osf.io,mluke93/osf.io,samchrisinger/osf.io,crcresearch/osf.io,mattclark/osf.io,ticklemepierce/osf.io,wearpants/osf.io,zamattiac/osf.io,DanielSBrown/osf.io,wearpants/osf.io,aaxelb/osf.io,kwierman/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,amyshi188/osf.io,saradbowman/osf.io,binoculars/osf.io,TomHeatwole/osf.io,kwierman/osf.io,felliott/osf.io,samanehsan/osf.io,pattisdr/osf.io,caneruguz/osf.io,danielneis/osf.io,aaxelb/osf.io,chrisseto/osf.io,rdhyee/osf.io,mluo613/osf.io,kch8qx/osf.io,billyhunt/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,GageGaskins/osf.io,erinspace/osf.io,pattisdr/osf.io,samchrisinger/osf.io,chennan47/osf.io,caseyrygt/osf.io,alexschiller/osf.io,Ghalko/osf.io,kwierman/osf.io,baylee-d/osf.io,leb2dg/osf.io,adlius/osf.io,mfraezz/osf.io,abought/osf.io,KAsante95/osf.io,ticklemepierce/osf.io,acshi/osf.io,leb2dg/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,icereval/osf.io,sloria/osf.io,acshi/osf.io,billyhunt/osf.io,Nesiehr/osf.io,kch8qx/osf.io,chrisseto/osf.io,asanfilippo7/osf.io,kch8qx/osf.io,alexschiller/osf.io,cwisecarver/osf.io,mfraezz/osf.io,adlius/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,billyhunt/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,caseyrollins/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,KAsante95/osf.io,SSJohns/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,doublebits/osf.io,zachjanicki/osf.io,amyshi188/osf.io,zamattiac/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,ZobairAlijan/osf.io,icereval/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,acshi/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,danielneis/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,adlius/osf.io,Johnetordoff/osf.io,ZobairAlijan/osf.io,cslzchen/osf.io,samanehsan/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,aaxelb/osf.io,icereval/osf.io,monikagrabowska/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,kch8qx/osf.io,mluo613/osf.io,doublebits/osf.io,rdhyee/osf.io,caseyrollins/osf.io,leb2dg/osf.io,mluo613/osf.io,binoculars/osf.io,kch8qx/osf.io,danielneis/osf.io,erinspace/osf.io,mluo613/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
Handle case for error formatting where errors are a list of dictionaries (as you would see in bulk create).
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
def dict_error_formatting(errors, error):
for key, value in error.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
if response:
message = response.data
if isinstance(message, dict):
dict_error_formatting(errors, message)
elif isinstance(message, (list, tuple)):
for error in message:
if isinstance(error, (dict)):
dict_error_formatting(errors, error)
else:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
<commit_before>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
<commit_msg>Handle case for error formatting where errors are a list of dictionaries (as you would see in bulk create).<commit_after>
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
def dict_error_formatting(errors, error):
for key, value in error.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
if response:
message = response.data
if isinstance(message, dict):
dict_error_formatting(errors, message)
elif isinstance(message, (list, tuple)):
for error in message:
if isinstance(error, (dict)):
dict_error_formatting(errors, error)
else:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
Handle case for error formatting where errors are a list of dictionaries (as you would see in bulk create).
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
def dict_error_formatting(errors, error):
for key, value in error.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
if response:
message = response.data
if isinstance(message, dict):
dict_error_formatting(errors, message)
elif isinstance(message, (list, tuple)):
for error in message:
if isinstance(error, (dict)):
dict_error_formatting(errors, error)
else:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
<commit_before>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
<commit_msg>Handle case for error formatting where errors are a list of dictionaries (as you would see in bulk create).<commit_after>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
def dict_error_formatting(errors, error):
for key, value in error.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
if response:
message = response.data
if isinstance(message, dict):
dict_error_formatting(errors, message)
elif isinstance(message, (list, tuple)):
for error in message:
if isinstance(error, (dict)):
dict_error_formatting(errors, error)
else:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
5c52fca74c061bad6625fb93ce920eccdf4741de
|
tabpy-server/setup.py
|
tabpy-server/setup.py
|
import versioneer
try:
from setuptools import setup
except ImportError as err:
print("Missing Python module requirement: setuptools.")
raise err
setup(
name='tabpy-server',
version=versioneer.get_version(),
description='Web server Tableau uses to run Python scripts.',
url='https://github.com/tableau/TabPy',
author='Tableau',
author_email='github@tableau.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.5',
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=['tabpy_server',
'tabpy_server.common',
'tabpy_server.management',
'tabpy_server.psws',
'tabpy_server.static'],
package_data={'tabpy_server.static': ['*.*'],
'tabpy_server': ['startup.*', 'state.ini']},
license='MIT',
# TODO Add tabpy_tools dependency when published on github
install_requires=[
'backports_abc',
'cloudpickle',
'configparser',
'decorator',
'future',
'futures',
'genson',
'jsonschema>=2.3.0',
'mock',
'numpy',
'python-dateutil',
'pyOpenSSL',
'requests',
'singledispatch',
'simplejson',
'tornado==5.1.1',
'Tornado-JSON'
],
cmdclass=versioneer.get_cmdclass(),
)
|
import versioneer
try:
from setuptools import setup
except ImportError as err:
print("Missing Python module requirement: setuptools.")
raise err
setup(
name='tabpy-server',
version=versioneer.get_version(),
description='Web server Tableau uses to run Python scripts.',
url='https://github.com/tableau/TabPy',
author='Tableau',
author_email='github@tableau.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.5',
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=['tabpy_server',
'tabpy_server.common',
'tabpy_server.management',
'tabpy_server.psws',
'tabpy_server.static'],
package_data={'tabpy_server.static': ['*.*'],
'tabpy_server': ['startup.*', 'state.ini']},
license='MIT',
install_requires=[
'backports_abc',
'cloudpickle',
'configparser',
'decorator',
'future',
'futures',
'genson',
'jsonschema>=2.3.0',
'mock',
'numpy',
'python-dateutil',
'pyOpenSSL',
'requests',
'singledispatch',
'simplejson',
'tornado==5.1.1',
'Tornado-JSON'
],
cmdclass=versioneer.get_cmdclass(),
)
|
Address TODO comments in code
|
Address TODO comments in code
|
Python
|
mit
|
tableau/TabPy,tableau/TabPy,tableau/TabPy
|
import versioneer
try:
from setuptools import setup
except ImportError as err:
print("Missing Python module requirement: setuptools.")
raise err
setup(
name='tabpy-server',
version=versioneer.get_version(),
description='Web server Tableau uses to run Python scripts.',
url='https://github.com/tableau/TabPy',
author='Tableau',
author_email='github@tableau.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.5',
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=['tabpy_server',
'tabpy_server.common',
'tabpy_server.management',
'tabpy_server.psws',
'tabpy_server.static'],
package_data={'tabpy_server.static': ['*.*'],
'tabpy_server': ['startup.*', 'state.ini']},
license='MIT',
# TODO Add tabpy_tools dependency when published on github
install_requires=[
'backports_abc',
'cloudpickle',
'configparser',
'decorator',
'future',
'futures',
'genson',
'jsonschema>=2.3.0',
'mock',
'numpy',
'python-dateutil',
'pyOpenSSL',
'requests',
'singledispatch',
'simplejson',
'tornado==5.1.1',
'Tornado-JSON'
],
cmdclass=versioneer.get_cmdclass(),
)
Address TODO comments in code
|
import versioneer
try:
from setuptools import setup
except ImportError as err:
print("Missing Python module requirement: setuptools.")
raise err
setup(
name='tabpy-server',
version=versioneer.get_version(),
description='Web server Tableau uses to run Python scripts.',
url='https://github.com/tableau/TabPy',
author='Tableau',
author_email='github@tableau.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.5',
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=['tabpy_server',
'tabpy_server.common',
'tabpy_server.management',
'tabpy_server.psws',
'tabpy_server.static'],
package_data={'tabpy_server.static': ['*.*'],
'tabpy_server': ['startup.*', 'state.ini']},
license='MIT',
install_requires=[
'backports_abc',
'cloudpickle',
'configparser',
'decorator',
'future',
'futures',
'genson',
'jsonschema>=2.3.0',
'mock',
'numpy',
'python-dateutil',
'pyOpenSSL',
'requests',
'singledispatch',
'simplejson',
'tornado==5.1.1',
'Tornado-JSON'
],
cmdclass=versioneer.get_cmdclass(),
)
|
<commit_before>import versioneer
try:
from setuptools import setup
except ImportError as err:
print("Missing Python module requirement: setuptools.")
raise err
setup(
name='tabpy-server',
version=versioneer.get_version(),
description='Web server Tableau uses to run Python scripts.',
url='https://github.com/tableau/TabPy',
author='Tableau',
author_email='github@tableau.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.5',
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=['tabpy_server',
'tabpy_server.common',
'tabpy_server.management',
'tabpy_server.psws',
'tabpy_server.static'],
package_data={'tabpy_server.static': ['*.*'],
'tabpy_server': ['startup.*', 'state.ini']},
license='MIT',
# TODO Add tabpy_tools dependency when published on github
install_requires=[
'backports_abc',
'cloudpickle',
'configparser',
'decorator',
'future',
'futures',
'genson',
'jsonschema>=2.3.0',
'mock',
'numpy',
'python-dateutil',
'pyOpenSSL',
'requests',
'singledispatch',
'simplejson',
'tornado==5.1.1',
'Tornado-JSON'
],
cmdclass=versioneer.get_cmdclass(),
)
<commit_msg>Address TODO comments in code<commit_after>
|
import versioneer
try:
from setuptools import setup
except ImportError as err:
print("Missing Python module requirement: setuptools.")
raise err
setup(
name='tabpy-server',
version=versioneer.get_version(),
description='Web server Tableau uses to run Python scripts.',
url='https://github.com/tableau/TabPy',
author='Tableau',
author_email='github@tableau.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.5',
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=['tabpy_server',
'tabpy_server.common',
'tabpy_server.management',
'tabpy_server.psws',
'tabpy_server.static'],
package_data={'tabpy_server.static': ['*.*'],
'tabpy_server': ['startup.*', 'state.ini']},
license='MIT',
install_requires=[
'backports_abc',
'cloudpickle',
'configparser',
'decorator',
'future',
'futures',
'genson',
'jsonschema>=2.3.0',
'mock',
'numpy',
'python-dateutil',
'pyOpenSSL',
'requests',
'singledispatch',
'simplejson',
'tornado==5.1.1',
'Tornado-JSON'
],
cmdclass=versioneer.get_cmdclass(),
)
|
import versioneer
try:
from setuptools import setup
except ImportError as err:
print("Missing Python module requirement: setuptools.")
raise err
setup(
name='tabpy-server',
version=versioneer.get_version(),
description='Web server Tableau uses to run Python scripts.',
url='https://github.com/tableau/TabPy',
author='Tableau',
author_email='github@tableau.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.5',
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=['tabpy_server',
'tabpy_server.common',
'tabpy_server.management',
'tabpy_server.psws',
'tabpy_server.static'],
package_data={'tabpy_server.static': ['*.*'],
'tabpy_server': ['startup.*', 'state.ini']},
license='MIT',
# TODO Add tabpy_tools dependency when published on github
install_requires=[
'backports_abc',
'cloudpickle',
'configparser',
'decorator',
'future',
'futures',
'genson',
'jsonschema>=2.3.0',
'mock',
'numpy',
'python-dateutil',
'pyOpenSSL',
'requests',
'singledispatch',
'simplejson',
'tornado==5.1.1',
'Tornado-JSON'
],
cmdclass=versioneer.get_cmdclass(),
)
Address TODO comments in codeimport versioneer
try:
from setuptools import setup
except ImportError as err:
print("Missing Python module requirement: setuptools.")
raise err
setup(
name='tabpy-server',
version=versioneer.get_version(),
description='Web server Tableau uses to run Python scripts.',
url='https://github.com/tableau/TabPy',
author='Tableau',
author_email='github@tableau.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.5',
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=['tabpy_server',
'tabpy_server.common',
'tabpy_server.management',
'tabpy_server.psws',
'tabpy_server.static'],
package_data={'tabpy_server.static': ['*.*'],
'tabpy_server': ['startup.*', 'state.ini']},
license='MIT',
install_requires=[
'backports_abc',
'cloudpickle',
'configparser',
'decorator',
'future',
'futures',
'genson',
'jsonschema>=2.3.0',
'mock',
'numpy',
'python-dateutil',
'pyOpenSSL',
'requests',
'singledispatch',
'simplejson',
'tornado==5.1.1',
'Tornado-JSON'
],
cmdclass=versioneer.get_cmdclass(),
)
|
<commit_before>import versioneer
try:
from setuptools import setup
except ImportError as err:
print("Missing Python module requirement: setuptools.")
raise err
setup(
name='tabpy-server',
version=versioneer.get_version(),
description='Web server Tableau uses to run Python scripts.',
url='https://github.com/tableau/TabPy',
author='Tableau',
author_email='github@tableau.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.5',
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=['tabpy_server',
'tabpy_server.common',
'tabpy_server.management',
'tabpy_server.psws',
'tabpy_server.static'],
package_data={'tabpy_server.static': ['*.*'],
'tabpy_server': ['startup.*', 'state.ini']},
license='MIT',
# TODO Add tabpy_tools dependency when published on github
install_requires=[
'backports_abc',
'cloudpickle',
'configparser',
'decorator',
'future',
'futures',
'genson',
'jsonschema>=2.3.0',
'mock',
'numpy',
'python-dateutil',
'pyOpenSSL',
'requests',
'singledispatch',
'simplejson',
'tornado==5.1.1',
'Tornado-JSON'
],
cmdclass=versioneer.get_cmdclass(),
)
<commit_msg>Address TODO comments in code<commit_after>import versioneer
try:
from setuptools import setup
except ImportError as err:
print("Missing Python module requirement: setuptools.")
raise err
setup(
name='tabpy-server',
version=versioneer.get_version(),
description='Web server Tableau uses to run Python scripts.',
url='https://github.com/tableau/TabPy',
author='Tableau',
author_email='github@tableau.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.5',
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Information Analysis",
],
packages=['tabpy_server',
'tabpy_server.common',
'tabpy_server.management',
'tabpy_server.psws',
'tabpy_server.static'],
package_data={'tabpy_server.static': ['*.*'],
'tabpy_server': ['startup.*', 'state.ini']},
license='MIT',
install_requires=[
'backports_abc',
'cloudpickle',
'configparser',
'decorator',
'future',
'futures',
'genson',
'jsonschema>=2.3.0',
'mock',
'numpy',
'python-dateutil',
'pyOpenSSL',
'requests',
'singledispatch',
'simplejson',
'tornado==5.1.1',
'Tornado-JSON'
],
cmdclass=versioneer.get_cmdclass(),
)
|
79d7ba8e3b590bc4ecdc4ae2b8808f14093902d0
|
pikos/logging.py
|
pikos/logging.py
|
from __future__ import absolute_import
import inspect
import os
import sys
import psutil
from collections import namedtuple
from functools import wraps
from pikos.abstract_monitors import AbstractMonitor
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(AbstractMonitor):
_fields = FunctionRecord._fields
def __init__(self, recorder):
''' Initialize the logger class.
Parameters
----------
function : callable
The callable to profile
output : str
The file in which to store profiling results.
'''
super(FunctionLogger, self).__init__(None)
self._recorder = recorder
self._process = None
self._old_profile_function = None
def __call__(self, function):
self._item = function
@wraps(function)
def wrapper(*args, **kwds):
return self.run(*args, **kwds)
return wrapper
def setup(self):
self._recorder.prepare(self._fields)
self._process = psutil.Process(os.getpid())
self._old_profile_function = sys.getprofile()
sys.setprofile(self.on_function_event)
def teardown(self):
sys.setprofile(self._old_profile_function)
self._process = None
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
|
from __future__ import absolute_import
import inspect
from collections import namedtuple
from pikos._profile_functions import ProfileFunctions
from pikos._trace_functions import TraceFunctions
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(object):
_fields = FunctionRecord._fields
def __init__(self, recorder):
self._recorder = recorder
self._profiler = ProfileFunctions()
def __enter__(self):
self._recorder.prepare(self._fields)
self._profiler.set(self.on_function_event)
def __exit__(self, exc_type, exc_val, exc_tb):
self._profiler.unset()
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
|
Make the FunctionLogger a context manager
|
Make the FunctionLogger a context manager
|
Python
|
bsd-3-clause
|
enthought/pikos,enthought/pikos,enthought/pikos
|
from __future__ import absolute_import
import inspect
import os
import sys
import psutil
from collections import namedtuple
from functools import wraps
from pikos.abstract_monitors import AbstractMonitor
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(AbstractMonitor):
_fields = FunctionRecord._fields
def __init__(self, recorder):
''' Initialize the logger class.
Parameters
----------
function : callable
The callable to profile
output : str
The file in which to store profiling results.
'''
super(FunctionLogger, self).__init__(None)
self._recorder = recorder
self._process = None
self._old_profile_function = None
def __call__(self, function):
self._item = function
@wraps(function)
def wrapper(*args, **kwds):
return self.run(*args, **kwds)
return wrapper
def setup(self):
self._recorder.prepare(self._fields)
self._process = psutil.Process(os.getpid())
self._old_profile_function = sys.getprofile()
sys.setprofile(self.on_function_event)
def teardown(self):
sys.setprofile(self._old_profile_function)
self._process = None
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
Make the FunctionLogger a context manager
|
from __future__ import absolute_import
import inspect
from collections import namedtuple
from pikos._profile_functions import ProfileFunctions
from pikos._trace_functions import TraceFunctions
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(object):
_fields = FunctionRecord._fields
def __init__(self, recorder):
self._recorder = recorder
self._profiler = ProfileFunctions()
def __enter__(self):
self._recorder.prepare(self._fields)
self._profiler.set(self.on_function_event)
def __exit__(self, exc_type, exc_val, exc_tb):
self._profiler.unset()
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
|
<commit_before>from __future__ import absolute_import
import inspect
import os
import sys
import psutil
from collections import namedtuple
from functools import wraps
from pikos.abstract_monitors import AbstractMonitor
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(AbstractMonitor):
_fields = FunctionRecord._fields
def __init__(self, recorder):
''' Initialize the logger class.
Parameters
----------
function : callable
The callable to profile
output : str
The file in which to store profiling results.
'''
super(FunctionLogger, self).__init__(None)
self._recorder = recorder
self._process = None
self._old_profile_function = None
def __call__(self, function):
self._item = function
@wraps(function)
def wrapper(*args, **kwds):
return self.run(*args, **kwds)
return wrapper
def setup(self):
self._recorder.prepare(self._fields)
self._process = psutil.Process(os.getpid())
self._old_profile_function = sys.getprofile()
sys.setprofile(self.on_function_event)
def teardown(self):
sys.setprofile(self._old_profile_function)
self._process = None
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
<commit_msg>Make the FunctionLogger a context manager<commit_after>
|
from __future__ import absolute_import
import inspect
from collections import namedtuple
from pikos._profile_functions import ProfileFunctions
from pikos._trace_functions import TraceFunctions
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(object):
_fields = FunctionRecord._fields
def __init__(self, recorder):
self._recorder = recorder
self._profiler = ProfileFunctions()
def __enter__(self):
self._recorder.prepare(self._fields)
self._profiler.set(self.on_function_event)
def __exit__(self, exc_type, exc_val, exc_tb):
self._profiler.unset()
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
|
from __future__ import absolute_import
import inspect
import os
import sys
import psutil
from collections import namedtuple
from functools import wraps
from pikos.abstract_monitors import AbstractMonitor
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(AbstractMonitor):
_fields = FunctionRecord._fields
def __init__(self, recorder):
''' Initialize the logger class.
Parameters
----------
function : callable
The callable to profile
output : str
The file in which to store profiling results.
'''
super(FunctionLogger, self).__init__(None)
self._recorder = recorder
self._process = None
self._old_profile_function = None
def __call__(self, function):
self._item = function
@wraps(function)
def wrapper(*args, **kwds):
return self.run(*args, **kwds)
return wrapper
def setup(self):
self._recorder.prepare(self._fields)
self._process = psutil.Process(os.getpid())
self._old_profile_function = sys.getprofile()
sys.setprofile(self.on_function_event)
def teardown(self):
sys.setprofile(self._old_profile_function)
self._process = None
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
Make the FunctionLogger a context managerfrom __future__ import absolute_import
import inspect
from collections import namedtuple
from pikos._profile_functions import ProfileFunctions
from pikos._trace_functions import TraceFunctions
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(object):
_fields = FunctionRecord._fields
def __init__(self, recorder):
self._recorder = recorder
self._profiler = ProfileFunctions()
def __enter__(self):
self._recorder.prepare(self._fields)
self._profiler.set(self.on_function_event)
def __exit__(self, exc_type, exc_val, exc_tb):
self._profiler.unset()
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
|
<commit_before>from __future__ import absolute_import
import inspect
import os
import sys
import psutil
from collections import namedtuple
from functools import wraps
from pikos.abstract_monitors import AbstractMonitor
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(AbstractMonitor):
_fields = FunctionRecord._fields
def __init__(self, recorder):
''' Initialize the logger class.
Parameters
----------
function : callable
The callable to profile
output : str
The file in which to store profiling results.
'''
super(FunctionLogger, self).__init__(None)
self._recorder = recorder
self._process = None
self._old_profile_function = None
def __call__(self, function):
self._item = function
@wraps(function)
def wrapper(*args, **kwds):
return self.run(*args, **kwds)
return wrapper
def setup(self):
self._recorder.prepare(self._fields)
self._process = psutil.Process(os.getpid())
self._old_profile_function = sys.getprofile()
sys.setprofile(self.on_function_event)
def teardown(self):
sys.setprofile(self._old_profile_function)
self._process = None
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
<commit_msg>Make the FunctionLogger a context manager<commit_after>from __future__ import absolute_import
import inspect
from collections import namedtuple
from pikos._profile_functions import ProfileFunctions
from pikos._trace_functions import TraceFunctions
__all__ = [
'FunctionLogger',
'FunctionRecord',
]
FunctionRecord = namedtuple('FunctionRecord',
['type', 'filename', 'lineNo', 'function'])
class FunctionLogger(object):
_fields = FunctionRecord._fields
def __init__(self, recorder):
self._recorder = recorder
self._profiler = ProfileFunctions()
def __enter__(self):
self._recorder.prepare(self._fields)
self._profiler.set(self.on_function_event)
def __exit__(self, exc_type, exc_val, exc_tb):
self._profiler.unset()
self._recorder.finalize()
def on_function_event(self, frame, event, arg):
filename, lineno, function, _, _ = \
inspect.getframeinfo(frame, context=0)
if event.startswith('c_'):
function = arg.__name__
record = FunctionRecord(event, filename, lineno, function)
self._recorder.record(record)
|
f2e0136fe617ff3948b47daa7c35102076deb671
|
peering/api/urls.py
|
peering/api/urls.py
|
from rest_framework import routers
from . import views
class PeeringRootView(routers.APIRootView):
"""
Peering API root view
"""
def get_view_name(self):
return "Peering"
router = routers.DefaultRouter()
router.APIRootView = PeeringRootView
router.register(r"_choices", views.PeeringFieldChoicesViewSet, basename="field-choice")
router.register(r"autonomous-systems", views.AutonomousSystemViewSet)
router.register(r"communities", views.CommunityViewSet)
router.register(r"direct-peering-sessions", views.DirectPeeringSessionViewSet)
router.register(r"internet-exchanges", views.InternetExchangeViewSet)
router.register(
r"internet-exchange-peering-sessions", views.InternetExchangePeeringSessionViewSet
)
router.register(r"routers", views.RouterViewSet)
router.register(r"routing-policies", views.RoutingPolicyViewSet)
router.register(r"templates", views.ConfigurationTemplateViewSet)
app_name = "peering-api"
urlpatterns = router.urls
|
from rest_framework import routers
from . import views
class PeeringRootView(routers.APIRootView):
"""
Peering API root view
"""
def get_view_name(self):
return "Peering"
router = routers.DefaultRouter()
router.APIRootView = PeeringRootView
router.register(r"_choices", views.PeeringFieldChoicesViewSet, base_name="field-choice")
router.register(r"autonomous-systems", views.AutonomousSystemViewSet)
router.register(r"communities", views.CommunityViewSet)
router.register(r"direct-peering-sessions", views.DirectPeeringSessionViewSet)
router.register(r"internet-exchanges", views.InternetExchangeViewSet)
router.register(
r"internet-exchange-peering-sessions", views.InternetExchangePeeringSessionViewSet
)
router.register(r"routers", views.RouterViewSet)
router.register(r"routing-policies", views.RoutingPolicyViewSet)
router.register(r"templates", views.ConfigurationTemplateViewSet)
app_name = "peering-api"
urlpatterns = router.urls
|
Fix typo in API URLs.
|
Fix typo in API URLs.
|
Python
|
apache-2.0
|
respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager
|
from rest_framework import routers
from . import views
class PeeringRootView(routers.APIRootView):
"""
Peering API root view
"""
def get_view_name(self):
return "Peering"
router = routers.DefaultRouter()
router.APIRootView = PeeringRootView
router.register(r"_choices", views.PeeringFieldChoicesViewSet, basename="field-choice")
router.register(r"autonomous-systems", views.AutonomousSystemViewSet)
router.register(r"communities", views.CommunityViewSet)
router.register(r"direct-peering-sessions", views.DirectPeeringSessionViewSet)
router.register(r"internet-exchanges", views.InternetExchangeViewSet)
router.register(
r"internet-exchange-peering-sessions", views.InternetExchangePeeringSessionViewSet
)
router.register(r"routers", views.RouterViewSet)
router.register(r"routing-policies", views.RoutingPolicyViewSet)
router.register(r"templates", views.ConfigurationTemplateViewSet)
app_name = "peering-api"
urlpatterns = router.urls
Fix typo in API URLs.
|
from rest_framework import routers
from . import views
class PeeringRootView(routers.APIRootView):
"""
Peering API root view
"""
def get_view_name(self):
return "Peering"
router = routers.DefaultRouter()
router.APIRootView = PeeringRootView
router.register(r"_choices", views.PeeringFieldChoicesViewSet, base_name="field-choice")
router.register(r"autonomous-systems", views.AutonomousSystemViewSet)
router.register(r"communities", views.CommunityViewSet)
router.register(r"direct-peering-sessions", views.DirectPeeringSessionViewSet)
router.register(r"internet-exchanges", views.InternetExchangeViewSet)
router.register(
r"internet-exchange-peering-sessions", views.InternetExchangePeeringSessionViewSet
)
router.register(r"routers", views.RouterViewSet)
router.register(r"routing-policies", views.RoutingPolicyViewSet)
router.register(r"templates", views.ConfigurationTemplateViewSet)
app_name = "peering-api"
urlpatterns = router.urls
|
<commit_before>from rest_framework import routers
from . import views
class PeeringRootView(routers.APIRootView):
"""
Peering API root view
"""
def get_view_name(self):
return "Peering"
router = routers.DefaultRouter()
router.APIRootView = PeeringRootView
router.register(r"_choices", views.PeeringFieldChoicesViewSet, basename="field-choice")
router.register(r"autonomous-systems", views.AutonomousSystemViewSet)
router.register(r"communities", views.CommunityViewSet)
router.register(r"direct-peering-sessions", views.DirectPeeringSessionViewSet)
router.register(r"internet-exchanges", views.InternetExchangeViewSet)
router.register(
r"internet-exchange-peering-sessions", views.InternetExchangePeeringSessionViewSet
)
router.register(r"routers", views.RouterViewSet)
router.register(r"routing-policies", views.RoutingPolicyViewSet)
router.register(r"templates", views.ConfigurationTemplateViewSet)
app_name = "peering-api"
urlpatterns = router.urls
<commit_msg>Fix typo in API URLs.<commit_after>
|
from rest_framework import routers
from . import views
class PeeringRootView(routers.APIRootView):
"""
Peering API root view
"""
def get_view_name(self):
return "Peering"
router = routers.DefaultRouter()
router.APIRootView = PeeringRootView
router.register(r"_choices", views.PeeringFieldChoicesViewSet, base_name="field-choice")
router.register(r"autonomous-systems", views.AutonomousSystemViewSet)
router.register(r"communities", views.CommunityViewSet)
router.register(r"direct-peering-sessions", views.DirectPeeringSessionViewSet)
router.register(r"internet-exchanges", views.InternetExchangeViewSet)
router.register(
r"internet-exchange-peering-sessions", views.InternetExchangePeeringSessionViewSet
)
router.register(r"routers", views.RouterViewSet)
router.register(r"routing-policies", views.RoutingPolicyViewSet)
router.register(r"templates", views.ConfigurationTemplateViewSet)
app_name = "peering-api"
urlpatterns = router.urls
|
from rest_framework import routers
from . import views
class PeeringRootView(routers.APIRootView):
"""
Peering API root view
"""
def get_view_name(self):
return "Peering"
router = routers.DefaultRouter()
router.APIRootView = PeeringRootView
router.register(r"_choices", views.PeeringFieldChoicesViewSet, basename="field-choice")
router.register(r"autonomous-systems", views.AutonomousSystemViewSet)
router.register(r"communities", views.CommunityViewSet)
router.register(r"direct-peering-sessions", views.DirectPeeringSessionViewSet)
router.register(r"internet-exchanges", views.InternetExchangeViewSet)
router.register(
r"internet-exchange-peering-sessions", views.InternetExchangePeeringSessionViewSet
)
router.register(r"routers", views.RouterViewSet)
router.register(r"routing-policies", views.RoutingPolicyViewSet)
router.register(r"templates", views.ConfigurationTemplateViewSet)
app_name = "peering-api"
urlpatterns = router.urls
Fix typo in API URLs.from rest_framework import routers
from . import views
class PeeringRootView(routers.APIRootView):
"""
Peering API root view
"""
def get_view_name(self):
return "Peering"
router = routers.DefaultRouter()
router.APIRootView = PeeringRootView
router.register(r"_choices", views.PeeringFieldChoicesViewSet, base_name="field-choice")
router.register(r"autonomous-systems", views.AutonomousSystemViewSet)
router.register(r"communities", views.CommunityViewSet)
router.register(r"direct-peering-sessions", views.DirectPeeringSessionViewSet)
router.register(r"internet-exchanges", views.InternetExchangeViewSet)
router.register(
r"internet-exchange-peering-sessions", views.InternetExchangePeeringSessionViewSet
)
router.register(r"routers", views.RouterViewSet)
router.register(r"routing-policies", views.RoutingPolicyViewSet)
router.register(r"templates", views.ConfigurationTemplateViewSet)
app_name = "peering-api"
urlpatterns = router.urls
|
<commit_before>from rest_framework import routers
from . import views
class PeeringRootView(routers.APIRootView):
"""
Peering API root view
"""
def get_view_name(self):
return "Peering"
router = routers.DefaultRouter()
router.APIRootView = PeeringRootView
router.register(r"_choices", views.PeeringFieldChoicesViewSet, basename="field-choice")
router.register(r"autonomous-systems", views.AutonomousSystemViewSet)
router.register(r"communities", views.CommunityViewSet)
router.register(r"direct-peering-sessions", views.DirectPeeringSessionViewSet)
router.register(r"internet-exchanges", views.InternetExchangeViewSet)
router.register(
r"internet-exchange-peering-sessions", views.InternetExchangePeeringSessionViewSet
)
router.register(r"routers", views.RouterViewSet)
router.register(r"routing-policies", views.RoutingPolicyViewSet)
router.register(r"templates", views.ConfigurationTemplateViewSet)
app_name = "peering-api"
urlpatterns = router.urls
<commit_msg>Fix typo in API URLs.<commit_after>from rest_framework import routers
from . import views
class PeeringRootView(routers.APIRootView):
"""
Peering API root view
"""
def get_view_name(self):
return "Peering"
router = routers.DefaultRouter()
router.APIRootView = PeeringRootView
router.register(r"_choices", views.PeeringFieldChoicesViewSet, base_name="field-choice")
router.register(r"autonomous-systems", views.AutonomousSystemViewSet)
router.register(r"communities", views.CommunityViewSet)
router.register(r"direct-peering-sessions", views.DirectPeeringSessionViewSet)
router.register(r"internet-exchanges", views.InternetExchangeViewSet)
router.register(
r"internet-exchange-peering-sessions", views.InternetExchangePeeringSessionViewSet
)
router.register(r"routers", views.RouterViewSet)
router.register(r"routing-policies", views.RoutingPolicyViewSet)
router.register(r"templates", views.ConfigurationTemplateViewSet)
app_name = "peering-api"
urlpatterns = router.urls
|
b9654ffbbd1c2057d1ff377a0190b115f568d080
|
knights/defaulttags.py
|
knights/defaulttags.py
|
from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
|
from .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
|
Rewrite 'now' tag to use BasicNode
|
Rewrite 'now' tag to use BasicNode
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
Rewrite 'now' tag to use BasicNode
|
from .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
|
<commit_before>from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
<commit_msg>Rewrite 'now' tag to use BasicNode<commit_after>
|
from .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
|
from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
Rewrite 'now' tag to use BasicNodefrom .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
|
<commit_before>from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
<commit_msg>Rewrite 'now' tag to use BasicNode<commit_after>from .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
|
d610e03ef113d37d516bd9432bd3f43f3d443563
|
tests/test_commands.py
|
tests/test_commands.py
|
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
|
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
def test_assert_calls_twice(self):
with assert_calls('git'):
call(['git'])
with self.assertRaises(AssertionError):
with assert_calls('git'):
pass
|
Add failing test for gh-5
|
Add failing test for gh-5
|
Python
|
bsd-3-clause
|
jupyter/testpath
|
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
Add failing test for gh-5
|
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
def test_assert_calls_twice(self):
with assert_calls('git'):
call(['git'])
with self.assertRaises(AssertionError):
with assert_calls('git'):
pass
|
<commit_before>import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
<commit_msg>Add failing test for gh-5<commit_after>
|
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
def test_assert_calls_twice(self):
with assert_calls('git'):
call(['git'])
with self.assertRaises(AssertionError):
with assert_calls('git'):
pass
|
import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
Add failing test for gh-5import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
def test_assert_calls_twice(self):
with assert_calls('git'):
call(['git'])
with self.assertRaises(AssertionError):
with assert_calls('git'):
pass
|
<commit_before>import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
<commit_msg>Add failing test for gh-5<commit_after>import os
from subprocess import call
import unittest
from testpath.commands import *
class CommandsTests(unittest.TestCase):
def test_assert_calls(self):
initial_path = os.environ['PATH']
with assert_calls('foobar'):
call(['foobar'])
with self.assertRaises(AssertionError):
with assert_calls('foo'):
pass
# The context manager should clean up $PATH again
self.assertEqual(os.environ['PATH'], initial_path)
def test_assert_calls_with_args(self):
with assert_calls('foo', ['bar', 'baz']):
call(['foo', 'bar', 'baz'])
with self.assertRaises(AssertionError):
with assert_calls('cheese', ['crackers']):
call(['cheese', 'biscuits'])
call(['cheese', 'wine'])
def test_assert_calls_twice(self):
with assert_calls('git'):
call(['git'])
with self.assertRaises(AssertionError):
with assert_calls('git'):
pass
|
ae2b253289191b41b602f6d31865b2f8806b6eb3
|
scikits/image/__init__.py
|
scikits/image/__init__.py
|
"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.join(_osp.dirname(__file__), 'data')
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
|
"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
|
Use absolute path for scikits.image.data_dir.
|
Use absolute path for scikits.image.data_dir.
|
Python
|
bsd-3-clause
|
WarrenWeckesser/scikits-image,blink1073/scikit-image,Britefury/scikit-image,michaelpacer/scikit-image,GaelVaroquaux/scikits.image,ajaybhat/scikit-image,SamHames/scikit-image,ofgulban/scikit-image,emmanuelle/scikits.image,almarklein/scikit-image,chintak/scikit-image,robintw/scikit-image,oew1v07/scikit-image,ofgulban/scikit-image,Britefury/scikit-image,bsipocz/scikit-image,michaelpacer/scikit-image,ajaybhat/scikit-image,michaelaye/scikit-image,oew1v07/scikit-image,Midafi/scikit-image,almarklein/scikit-image,rjeli/scikit-image,paalge/scikit-image,chriscrosscutler/scikit-image,warmspringwinds/scikit-image,keflavich/scikit-image,rjeli/scikit-image,jwiggins/scikit-image,michaelaye/scikit-image,emmanuelle/scikits.image,pratapvardhan/scikit-image,WarrenWeckesser/scikits-image,paalge/scikit-image,blink1073/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,ofgulban/scikit-image,youprofit/scikit-image,Midafi/scikit-image,pratapvardhan/scikit-image,GaZ3ll3/scikit-image,almarklein/scikit-image,SamHames/scikit-image,bennlich/scikit-image,emon10005/scikit-image,GaZ3ll3/scikit-image,juliusbierk/scikit-image,chintak/scikit-image,vighneshbirodkar/scikit-image,newville/scikit-image,newville/scikit-image,emon10005/scikit-image,vighneshbirodkar/scikit-image,dpshelio/scikit-image,dpshelio/scikit-image,keflavich/scikit-image,Hiyorimi/scikit-image,almarklein/scikit-image,ClinicalGraphics/scikit-image,youprofit/scikit-image,emmanuelle/scikits.image,emmanuelle/scikits.image,rjeli/scikit-image,bennlich/scikit-image,chintak/scikit-image,GaelVaroquaux/scikits.image,juliusbierk/scikit-image,SamHames/scikit-image,robintw/scikit-image,SamHames/scikit-image,bsipocz/scikit-image,warmspringwinds/scikit-image,vighneshbirodkar/scikit-image,chriscrosscutler/scikit-image,paalge/scikit-image,jwiggins/scikit-image,Hiyorimi/scikit-image
|
"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.join(_osp.dirname(__file__), 'data')
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
Use absolute path for scikits.image.data_dir.
|
"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
|
<commit_before>"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.join(_osp.dirname(__file__), 'data')
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
<commit_msg>Use absolute path for scikits.image.data_dir.<commit_after>
|
"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
|
"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.join(_osp.dirname(__file__), 'data')
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
Use absolute path for scikits.image.data_dir."""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
|
<commit_before>"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.join(_osp.dirname(__file__), 'data')
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
<commit_msg>Use absolute path for scikits.image.data_dir.<commit_after>"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
|
7a8c29ddabb31c16e908552fb0dd8558a4700b41
|
tests/test_fields.py
|
tests/test_fields.py
|
import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
|
import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{0} {1}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
|
Make test compatible with Python 2.6
|
Make test compatible with Python 2.6
|
Python
|
mit
|
bulv1ne/moarjson,bulv1ne/moarjson
|
import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
Make test compatible with Python 2.6
|
import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{0} {1}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
|
<commit_before>import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
<commit_msg>Make test compatible with Python 2.6<commit_after>
|
import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{0} {1}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
|
import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
Make test compatible with Python 2.6import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{0} {1}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
|
<commit_before>import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
<commit_msg>Make test compatible with Python 2.6<commit_after>import moarjson as json
class User(object):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
def full_name(self):
return '{0} {1}'.format(self.first_name, self.last_name)
json.register_with_fields(User, ('first_name', 'last_name', 'full_name',))
def test_fields():
user = User('James', 'Bond')
assert user.full_name() == 'James Bond'
test_dict = {'first_name': 'James',
'last_name': 'Bond',
'full_name': 'James Bond'}
json_dict = json.loads(json.dumps(user))
assert json_dict == test_dict
|
1977ab5bd97feb114dedd1619c89413f109f0480
|
tests/validate_test.py
|
tests/validate_test.py
|
from pytest import raises
from nirum.validate import (validate_boxed_type, validate_record_type,
validate_union_type)
def test_validate_boxed_type():
assert validate_boxed_type(3.14, float)
with raises(TypeError):
validate_boxed_type('hello', float)
def test_validate_record_type(fx_point, fx_record_type, fx_offset):
assert validate_record_type(fx_point)
with raises(TypeError):
assert validate_record_type(fx_record_type(left=fx_offset, top=1))
with raises(TypeError):
assert validate_record_type(fx_record_type(left=1, top=fx_offset))
def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point):
assert validate_union_type(fx_rectangle)
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(1, fx_point))
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(fx_point, 1))
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(1, 1))
|
from pytest import raises
from nirum.validate import (validate_boxed_type, validate_record_type,
validate_union_type)
def test_validate_boxed_type():
assert validate_boxed_type(3.14, float)
with raises(TypeError):
validate_boxed_type('hello', float)
def test_validate_record_type(fx_point, fx_record_type, fx_offset):
assert validate_record_type(fx_point)
with raises(TypeError):
validate_record_type(fx_record_type(left=fx_offset, top=1))
with raises(TypeError):
validate_record_type(fx_record_type(left=1, top=fx_offset))
def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point):
assert validate_union_type(fx_rectangle)
with raises(TypeError):
validate_union_type(fx_rectangle_type(1, fx_point))
with raises(TypeError):
validate_union_type(fx_rectangle_type(fx_point, 1))
with raises(TypeError):
validate_union_type(fx_rectangle_type(1, 1))
|
Remove assert in error-rasing test
|
Remove assert in error-rasing test
|
Python
|
mit
|
spoqa/nirum-python,spoqa/nirum-python
|
from pytest import raises
from nirum.validate import (validate_boxed_type, validate_record_type,
validate_union_type)
def test_validate_boxed_type():
assert validate_boxed_type(3.14, float)
with raises(TypeError):
validate_boxed_type('hello', float)
def test_validate_record_type(fx_point, fx_record_type, fx_offset):
assert validate_record_type(fx_point)
with raises(TypeError):
assert validate_record_type(fx_record_type(left=fx_offset, top=1))
with raises(TypeError):
assert validate_record_type(fx_record_type(left=1, top=fx_offset))
def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point):
assert validate_union_type(fx_rectangle)
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(1, fx_point))
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(fx_point, 1))
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(1, 1))
Remove assert in error-rasing test
|
from pytest import raises
from nirum.validate import (validate_boxed_type, validate_record_type,
validate_union_type)
def test_validate_boxed_type():
assert validate_boxed_type(3.14, float)
with raises(TypeError):
validate_boxed_type('hello', float)
def test_validate_record_type(fx_point, fx_record_type, fx_offset):
assert validate_record_type(fx_point)
with raises(TypeError):
validate_record_type(fx_record_type(left=fx_offset, top=1))
with raises(TypeError):
validate_record_type(fx_record_type(left=1, top=fx_offset))
def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point):
assert validate_union_type(fx_rectangle)
with raises(TypeError):
validate_union_type(fx_rectangle_type(1, fx_point))
with raises(TypeError):
validate_union_type(fx_rectangle_type(fx_point, 1))
with raises(TypeError):
validate_union_type(fx_rectangle_type(1, 1))
|
<commit_before>from pytest import raises
from nirum.validate import (validate_boxed_type, validate_record_type,
validate_union_type)
def test_validate_boxed_type():
assert validate_boxed_type(3.14, float)
with raises(TypeError):
validate_boxed_type('hello', float)
def test_validate_record_type(fx_point, fx_record_type, fx_offset):
assert validate_record_type(fx_point)
with raises(TypeError):
assert validate_record_type(fx_record_type(left=fx_offset, top=1))
with raises(TypeError):
assert validate_record_type(fx_record_type(left=1, top=fx_offset))
def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point):
assert validate_union_type(fx_rectangle)
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(1, fx_point))
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(fx_point, 1))
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(1, 1))
<commit_msg>Remove assert in error-rasing test<commit_after>
|
from pytest import raises
from nirum.validate import (validate_boxed_type, validate_record_type,
validate_union_type)
def test_validate_boxed_type():
assert validate_boxed_type(3.14, float)
with raises(TypeError):
validate_boxed_type('hello', float)
def test_validate_record_type(fx_point, fx_record_type, fx_offset):
assert validate_record_type(fx_point)
with raises(TypeError):
validate_record_type(fx_record_type(left=fx_offset, top=1))
with raises(TypeError):
validate_record_type(fx_record_type(left=1, top=fx_offset))
def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point):
assert validate_union_type(fx_rectangle)
with raises(TypeError):
validate_union_type(fx_rectangle_type(1, fx_point))
with raises(TypeError):
validate_union_type(fx_rectangle_type(fx_point, 1))
with raises(TypeError):
validate_union_type(fx_rectangle_type(1, 1))
|
from pytest import raises
from nirum.validate import (validate_boxed_type, validate_record_type,
validate_union_type)
def test_validate_boxed_type():
assert validate_boxed_type(3.14, float)
with raises(TypeError):
validate_boxed_type('hello', float)
def test_validate_record_type(fx_point, fx_record_type, fx_offset):
assert validate_record_type(fx_point)
with raises(TypeError):
assert validate_record_type(fx_record_type(left=fx_offset, top=1))
with raises(TypeError):
assert validate_record_type(fx_record_type(left=1, top=fx_offset))
def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point):
assert validate_union_type(fx_rectangle)
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(1, fx_point))
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(fx_point, 1))
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(1, 1))
Remove assert in error-rasing testfrom pytest import raises
from nirum.validate import (validate_boxed_type, validate_record_type,
validate_union_type)
def test_validate_boxed_type():
assert validate_boxed_type(3.14, float)
with raises(TypeError):
validate_boxed_type('hello', float)
def test_validate_record_type(fx_point, fx_record_type, fx_offset):
assert validate_record_type(fx_point)
with raises(TypeError):
validate_record_type(fx_record_type(left=fx_offset, top=1))
with raises(TypeError):
validate_record_type(fx_record_type(left=1, top=fx_offset))
def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point):
assert validate_union_type(fx_rectangle)
with raises(TypeError):
validate_union_type(fx_rectangle_type(1, fx_point))
with raises(TypeError):
validate_union_type(fx_rectangle_type(fx_point, 1))
with raises(TypeError):
validate_union_type(fx_rectangle_type(1, 1))
|
<commit_before>from pytest import raises
from nirum.validate import (validate_boxed_type, validate_record_type,
validate_union_type)
def test_validate_boxed_type():
assert validate_boxed_type(3.14, float)
with raises(TypeError):
validate_boxed_type('hello', float)
def test_validate_record_type(fx_point, fx_record_type, fx_offset):
assert validate_record_type(fx_point)
with raises(TypeError):
assert validate_record_type(fx_record_type(left=fx_offset, top=1))
with raises(TypeError):
assert validate_record_type(fx_record_type(left=1, top=fx_offset))
def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point):
assert validate_union_type(fx_rectangle)
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(1, fx_point))
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(fx_point, 1))
with raises(TypeError):
assert validate_union_type(fx_rectangle_type(1, 1))
<commit_msg>Remove assert in error-rasing test<commit_after>from pytest import raises
from nirum.validate import (validate_boxed_type, validate_record_type,
validate_union_type)
def test_validate_boxed_type():
assert validate_boxed_type(3.14, float)
with raises(TypeError):
validate_boxed_type('hello', float)
def test_validate_record_type(fx_point, fx_record_type, fx_offset):
assert validate_record_type(fx_point)
with raises(TypeError):
validate_record_type(fx_record_type(left=fx_offset, top=1))
with raises(TypeError):
validate_record_type(fx_record_type(left=1, top=fx_offset))
def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point):
assert validate_union_type(fx_rectangle)
with raises(TypeError):
validate_union_type(fx_rectangle_type(1, fx_point))
with raises(TypeError):
validate_union_type(fx_rectangle_type(fx_point, 1))
with raises(TypeError):
validate_union_type(fx_rectangle_type(1, 1))
|
8c116adaf0f571a4843173951880bd77bef702bb
|
django_enumfield/fields.py
|
django_enumfield/fields.py
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if value is None:
return value
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type in ('exact', 'lt', 'lte', 'gt', 'gte'):
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
def south_field_triple(self):
from south.modelsinspector import introspector
args, kwargs = introspector(self)
return ('django.db.models.fields.Integerfield', args, kwargs)
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if value is None:
return value
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type in ('exact', 'lt', 'lte', 'gt', 'gte'):
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
def south_field_triple(self):
from south.modelsinspector import introspector
args, kwargs = introspector(self)
return ('django.db.models.fields.IntegerField', args, kwargs)
|
Correct path in South triple definition.
|
Correct path in South triple definition.
|
Python
|
bsd-3-clause
|
playfire/django-enumfield
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if value is None:
return value
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type in ('exact', 'lt', 'lte', 'gt', 'gte'):
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
def south_field_triple(self):
from south.modelsinspector import introspector
args, kwargs = introspector(self)
return ('django.db.models.fields.Integerfield', args, kwargs)
Correct path in South triple definition.
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if value is None:
return value
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type in ('exact', 'lt', 'lte', 'gt', 'gte'):
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
def south_field_triple(self):
from south.modelsinspector import introspector
args, kwargs = introspector(self)
return ('django.db.models.fields.IntegerField', args, kwargs)
|
<commit_before>from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if value is None:
return value
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type in ('exact', 'lt', 'lte', 'gt', 'gte'):
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
def south_field_triple(self):
from south.modelsinspector import introspector
args, kwargs = introspector(self)
return ('django.db.models.fields.Integerfield', args, kwargs)
<commit_msg>Correct path in South triple definition.<commit_after>
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if value is None:
return value
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type in ('exact', 'lt', 'lte', 'gt', 'gte'):
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
def south_field_triple(self):
from south.modelsinspector import introspector
args, kwargs = introspector(self)
return ('django.db.models.fields.IntegerField', args, kwargs)
|
from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if value is None:
return value
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type in ('exact', 'lt', 'lte', 'gt', 'gte'):
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
def south_field_triple(self):
from south.modelsinspector import introspector
args, kwargs = introspector(self)
return ('django.db.models.fields.Integerfield', args, kwargs)
Correct path in South triple definition.from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if value is None:
return value
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type in ('exact', 'lt', 'lte', 'gt', 'gte'):
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
def south_field_triple(self):
from south.modelsinspector import introspector
args, kwargs = introspector(self)
return ('django.db.models.fields.IntegerField', args, kwargs)
|
<commit_before>from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if value is None:
return value
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type in ('exact', 'lt', 'lte', 'gt', 'gte'):
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
def south_field_triple(self):
from south.modelsinspector import introspector
args, kwargs = introspector(self)
return ('django.db.models.fields.Integerfield', args, kwargs)
<commit_msg>Correct path in South triple definition.<commit_after>from django.db import models
class EnumField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, enumeration, *args, **kwargs):
self.enumeration = enumeration
kwargs.setdefault('choices', enumeration.get_choices())
super(EnumField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'IntegerField'
def to_python(self, value):
return self.enumeration.to_item(value)
def get_db_prep_save(self, value, connection=None):
if value is None:
return value
return self.to_python(value).value
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=False):
def prepare(value):
v = self.to_python(value)
return self.get_db_prep_save(v, connection=connection)
if lookup_type in ('exact', 'lt', 'lte', 'gt', 'gte'):
return [prepare(value)]
elif lookup_type == 'in':
return [prepare(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Lookup type %r not supported." % lookup_type)
def south_field_triple(self):
from south.modelsinspector import introspector
args, kwargs = introspector(self)
return ('django.db.models.fields.IntegerField', args, kwargs)
|
b912c1a508640c7c351ed1d945bfeebdaa995332
|
djcelery/management/commands/celeryd.py
|
djcelery/management/commands/celeryd.py
|
"""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.run(**options)
|
"""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.check_args(args)
worker.run(**options)
|
Add requested call to check_args.
|
Add requested call to check_args.
|
Python
|
bsd-3-clause
|
Amanit/django-celery,digimarc/django-celery,iris-edu-int/django-celery,axiom-data-science/django-celery,celery/django-celery,CloudNcodeInc/django-celery,Amanit/django-celery,axiom-data-science/django-celery,georgewhewell/django-celery,CloudNcodeInc/django-celery,iris-edu-int/django-celery,digimarc/django-celery,celery/django-celery,tkanemoto/django-celery,CloudNcodeInc/django-celery,kanemra/django-celery,kanemra/django-celery,iris-edu-int/django-celery,tkanemoto/django-celery,georgewhewell/django-celery,georgewhewell/django-celery,Amanit/django-celery,digimarc/django-celery,tkanemoto/django-celery,celery/django-celery,axiom-data-science/django-celery,kanemra/django-celery
|
"""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.run(**options)
Add requested call to check_args.
|
"""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.check_args(args)
worker.run(**options)
|
<commit_before>"""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.run(**options)
<commit_msg>Add requested call to check_args.<commit_after>
|
"""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.check_args(args)
worker.run(**options)
|
"""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.run(**options)
Add requested call to check_args."""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.check_args(args)
worker.run(**options)
|
<commit_before>"""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.run(**options)
<commit_msg>Add requested call to check_args.<commit_after>"""
Start the celery daemon from the Django management command.
"""
from __future__ import absolute_import, unicode_literals
from celery.bin import worker
from djcelery.app import app
from djcelery.management.base import CeleryCommand
worker = worker.worker(app=app)
class Command(CeleryCommand):
"""Run the celery daemon."""
help = 'Old alias to the "celery worker" command.'
requires_model_validation = True
options = (CeleryCommand.options
+ worker.get_options()
+ worker.preload_options)
def handle(self, *args, **options):
worker.check_args(args)
worker.run(**options)
|
635a51c1ee8f5608de03351008f0d5aa9a116660
|
opps/images/templatetags/images_tags.py
|
opps/images/templatetags/images_tags.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign:
new['halign'] = image.halign
if image.valign:
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
if getattr(image, 'flip'):
new['flip'] = image.flip
if getattr(image, 'flop'):
new['flop'] = image.flop
if getattr(image, 'halign'):
new['halign'] = image.halign
if getattr(image, 'valign'):
new['valign'] = image.valign
if getattr(image, 'fit_in'):
new['fit_in'] = image.fit_in
if getattr(image, 'smart'):
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
Fix has no attribute on templatetags image_obj
|
Fix has no attribute on templatetags image_obj
|
Python
|
mit
|
jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign:
new['halign'] = image.halign
if image.valign:
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
Fix has no attribute on templatetags image_obj
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
if getattr(image, 'flip'):
new['flip'] = image.flip
if getattr(image, 'flop'):
new['flop'] = image.flop
if getattr(image, 'halign'):
new['halign'] = image.halign
if getattr(image, 'valign'):
new['valign'] = image.valign
if getattr(image, 'fit_in'):
new['fit_in'] = image.fit_in
if getattr(image, 'smart'):
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign:
new['halign'] = image.halign
if image.valign:
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
<commit_msg>Fix has no attribute on templatetags image_obj<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
if getattr(image, 'flip'):
new['flip'] = image.flip
if getattr(image, 'flop'):
new['flop'] = image.flop
if getattr(image, 'halign'):
new['halign'] = image.halign
if getattr(image, 'valign'):
new['valign'] = image.valign
if getattr(image, 'fit_in'):
new['fit_in'] = image.fit_in
if getattr(image, 'smart'):
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign:
new['halign'] = image.halign
if image.valign:
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
Fix has no attribute on templatetags image_obj#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
if getattr(image, 'flip'):
new['flip'] = image.flip
if getattr(image, 'flop'):
new['flop'] = image.flop
if getattr(image, 'halign'):
new['halign'] = image.halign
if getattr(image, 'valign'):
new['valign'] = image.valign
if getattr(image, 'fit_in'):
new['fit_in'] = image.fit_in
if getattr(image, 'smart'):
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign:
new['halign'] = image.halign
if image.valign:
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
<commit_msg>Fix has no attribute on templatetags image_obj<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
if getattr(image, 'flip'):
new['flip'] = image.flip
if getattr(image, 'flop'):
new['flop'] = image.flop
if getattr(image, 'halign'):
new['halign'] = image.halign
if getattr(image, 'valign'):
new['valign'] = image.valign
if getattr(image, 'fit_in'):
new['fit_in'] = image.fit_in
if getattr(image, 'smart'):
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
a1c4c7f8d07ba12494b55f988853d0804e657f9a
|
opps/images/templatetags/images_tags.py
|
opps/images/templatetags/images_tags.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign != "":
new['halign'] = image.halign
if image.valign != "":
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign != "":
new['halign'] = image.halign
if image.valign != "":
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
if image.crop_x1 > 0 or image.crop_x2 > 0 or image.crop_y1 > 0 or \
image.crop_y2 > 0:
new['crop'] = ((image.crop_x1,image.crop_y1),
(image.crop_x2,image.crop_y2))
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
Add image crop on templatetags image_obj
|
Add image crop on templatetags image_obj
|
Python
|
mit
|
jeanmask/opps,YACOWS/opps,opps/opps,williamroot/opps,opps/opps,williamroot/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,jeanmask/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign != "":
new['halign'] = image.halign
if image.valign != "":
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
Add image crop on templatetags image_obj
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign != "":
new['halign'] = image.halign
if image.valign != "":
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
if image.crop_x1 > 0 or image.crop_x2 > 0 or image.crop_y1 > 0 or \
image.crop_y2 > 0:
new['crop'] = ((image.crop_x1,image.crop_y1),
(image.crop_x2,image.crop_y2))
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign != "":
new['halign'] = image.halign
if image.valign != "":
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
<commit_msg>Add image crop on templatetags image_obj<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign != "":
new['halign'] = image.halign
if image.valign != "":
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
if image.crop_x1 > 0 or image.crop_x2 > 0 or image.crop_y1 > 0 or \
image.crop_y2 > 0:
new['crop'] = ((image.crop_x1,image.crop_y1),
(image.crop_x2,image.crop_y2))
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign != "":
new['halign'] = image.halign
if image.valign != "":
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
Add image crop on templatetags image_obj#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign != "":
new['halign'] = image.halign
if image.valign != "":
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
if image.crop_x1 > 0 or image.crop_x2 > 0 or image.crop_y1 > 0 or \
image.crop_y2 > 0:
new['crop'] = ((image.crop_x1,image.crop_y1),
(image.crop_x2,image.crop_y2))
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign != "":
new['halign'] = image.halign
if image.valign != "":
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
<commit_msg>Add image crop on templatetags image_obj<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign != "":
new['halign'] = image.halign
if image.valign != "":
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
if image.crop_x1 > 0 or image.crop_x2 > 0 or image.crop_y1 > 0 or \
image.crop_y2 > 0:
new['crop'] = ((image.crop_x1,image.crop_y1),
(image.crop_x2,image.crop_y2))
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
dabb67601fd977b2f3e97a601a76ec8dd576fa77
|
drivnal/remote_snapshot.py
|
drivnal/remote_snapshot.py
|
from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
|
from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return os.path.join(self.volume.log_dir, 'snapshot_%s.log' % self.id)
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
|
Add get log path to remote snapshot
|
Add get log path to remote snapshot
|
Python
|
agpl-3.0
|
drivnal/drivnal,drivnal/drivnal,drivnal/drivnal
|
from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
Add get log path to remote snapshot
|
from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return os.path.join(self.volume.log_dir, 'snapshot_%s.log' % self.id)
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
|
<commit_before>from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
<commit_msg>Add get log path to remote snapshot<commit_after>
|
from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return os.path.join(self.volume.log_dir, 'snapshot_%s.log' % self.id)
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
|
from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
Add get log path to remote snapshotfrom constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return os.path.join(self.volume.log_dir, 'snapshot_%s.log' % self.id)
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
|
<commit_before>from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return ''
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
<commit_msg>Add get log path to remote snapshot<commit_after>from constants import *
from core_snapshot import CoreSnapshot
import logging
logger = logging.getLogger(APP_NAME)
class RemoteSnapshot(CoreSnapshot):
def _get_path(self):
dir_name = str(self.id)
if self.state != COMPLETE:
dir_name = '%s.%s' % (dir_name, self.state)
return '%s@%s%s' % (self.volume.ssh_user, self.volume.ssh_path,
os.sep + os.path.join(SNAPSHOT_DIR, dir_name) + os.sep)
def _get_log_path(self):
return os.path.join(self.volume.log_dir, 'snapshot_%s.log' % self.id)
def _setup_snapshot(self, last_snapshot):
pass
def set_state(self, state):
if self.state == state:
return
self.state = state
|
58e2059c37d7464e7ab7a1681ea8f465e9378940
|
ukpostcode/__init__.py
|
ukpostcode/__init__.py
|
# coding: utf-8
# Copyright 2013 Alan Justino da Silva, Oscar Vilaplana, et. al.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__version__ = '0.1.0'
def formater(postcode):
'''
Format a UK Post Code to the official form. Expects a valid postcode.
'''
postcode = postcode.upper().replace(' ', '').strip()
inward = postcode[-3:]
outward = postcode[:-3]
return '%s %s' % (outward, inward)
|
# coding: utf-8
# Copyright 2013 Alan Justino da Silva, Oscar Vilaplana, et. al.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__version__ = '0.1.0'
def formater(postcode):
'''
Format a UK Post Code to the official form. Expects a valid postcode.
'''
postcode = postcode.upper().replace(' ', '').strip()
inward = postcode[-3:]
outward = postcode[:-3]
return '%s %s' % (outward, inward)
def validate(postcode):
'''
Returns True if the postcode is valid. False otherwise.
'''
raise NotImplementedError()
|
Prepare to code the validator
|
Prepare to code the validator
|
Python
|
apache-2.0
|
alanjds/pyukpostcode
|
# coding: utf-8
# Copyright 2013 Alan Justino da Silva, Oscar Vilaplana, et. al.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__version__ = '0.1.0'
def formater(postcode):
'''
Format a UK Post Code to the official form. Expects a valid postcode.
'''
postcode = postcode.upper().replace(' ', '').strip()
inward = postcode[-3:]
outward = postcode[:-3]
return '%s %s' % (outward, inward)
Prepare to code the validator
|
# coding: utf-8
# Copyright 2013 Alan Justino da Silva, Oscar Vilaplana, et. al.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__version__ = '0.1.0'
def formater(postcode):
'''
Format a UK Post Code to the official form. Expects a valid postcode.
'''
postcode = postcode.upper().replace(' ', '').strip()
inward = postcode[-3:]
outward = postcode[:-3]
return '%s %s' % (outward, inward)
def validate(postcode):
'''
Returns True if the postcode is valid. False otherwise.
'''
raise NotImplementedError()
|
<commit_before># coding: utf-8
# Copyright 2013 Alan Justino da Silva, Oscar Vilaplana, et. al.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__version__ = '0.1.0'
def formater(postcode):
'''
Format a UK Post Code to the official form. Expects a valid postcode.
'''
postcode = postcode.upper().replace(' ', '').strip()
inward = postcode[-3:]
outward = postcode[:-3]
return '%s %s' % (outward, inward)
<commit_msg>Prepare to code the validator<commit_after>
|
# coding: utf-8
# Copyright 2013 Alan Justino da Silva, Oscar Vilaplana, et. al.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__version__ = '0.1.0'
def formater(postcode):
'''
Format a UK Post Code to the official form. Expects a valid postcode.
'''
postcode = postcode.upper().replace(' ', '').strip()
inward = postcode[-3:]
outward = postcode[:-3]
return '%s %s' % (outward, inward)
def validate(postcode):
'''
Returns True if the postcode is valid. False otherwise.
'''
raise NotImplementedError()
|
# coding: utf-8
# Copyright 2013 Alan Justino da Silva, Oscar Vilaplana, et. al.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__version__ = '0.1.0'
def formater(postcode):
'''
Format a UK Post Code to the official form. Expects a valid postcode.
'''
postcode = postcode.upper().replace(' ', '').strip()
inward = postcode[-3:]
outward = postcode[:-3]
return '%s %s' % (outward, inward)
Prepare to code the validator# coding: utf-8
# Copyright 2013 Alan Justino da Silva, Oscar Vilaplana, et. al.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__version__ = '0.1.0'
def formater(postcode):
'''
Format a UK Post Code to the official form. Expects a valid postcode.
'''
postcode = postcode.upper().replace(' ', '').strip()
inward = postcode[-3:]
outward = postcode[:-3]
return '%s %s' % (outward, inward)
def validate(postcode):
'''
Returns True if the postcode is valid. False otherwise.
'''
raise NotImplementedError()
|
<commit_before># coding: utf-8
# Copyright 2013 Alan Justino da Silva, Oscar Vilaplana, et. al.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__version__ = '0.1.0'
def formater(postcode):
'''
Format a UK Post Code to the official form. Expects a valid postcode.
'''
postcode = postcode.upper().replace(' ', '').strip()
inward = postcode[-3:]
outward = postcode[:-3]
return '%s %s' % (outward, inward)
<commit_msg>Prepare to code the validator<commit_after># coding: utf-8
# Copyright 2013 Alan Justino da Silva, Oscar Vilaplana, et. al.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__version__ = '0.1.0'
def formater(postcode):
'''
Format a UK Post Code to the official form. Expects a valid postcode.
'''
postcode = postcode.upper().replace(' ', '').strip()
inward = postcode[-3:]
outward = postcode[:-3]
return '%s %s' % (outward, inward)
def validate(postcode):
'''
Returns True if the postcode is valid. False otherwise.
'''
raise NotImplementedError()
|
ce90af15b912080c15abcb6c3ea0b4b135dd43bb
|
Scraper.py
|
Scraper.py
|
from Database import Database
from Nyaa import Nyaa, NyaaEntry
import getopt
import os
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
nt = Nyaa()
db = Database(script_dir)
arguments = sys.argv[1:]
optlist, args = getopt.getopt(arguments, '', ['start='])
if len(optlist) > 0:
for opt, arg in optlist:
if opt == '--start':
start_entry = int(arg)
if 'start_entry' not in globals():
start_entry = db.last_entry + 1
for i in range(start_entry + 1, nt.last_entry + 1):
entry = NyaaEntry('http://www.nyaa.se/?page=view&tid={}'.format(i))
if entry.exists == True:
if entry.category in db.categories and entry.sub_category in db.sub_categories:
if entry.magnet == 0:
continue
print('Entry: {}, Name: {}'.format(i, entry.name))
db.write_torrent((i, entry.name, entry.magnet, db.categories[entry.category],
db.sub_categories[entry.sub_category], db.status[entry.status], entry.date, entry.time))
db.c.close()
|
from Database import Database
from Nyaa import Nyaa, NyaaEntry
import getopt
import os
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
nt = Nyaa()
db = Database(script_dir)
arguments = sys.argv[1:]
optlist, args = getopt.getopt(arguments, '', ['start='])
if len(optlist) > 0:
for opt, arg in optlist:
if opt == '--start':
start_entry = int(arg)
if 'start_entry' not in globals():
start_entry = db.last_entry + 1
for i in range(start_entry, nt.last_entry + 1):
entry = NyaaEntry('http://www.nyaa.se/?page=view&tid={}'.format(i))
if entry.exists == True:
if entry.category in db.categories and entry.sub_category in db.sub_categories:
if entry.magnet == 0:
continue
print('Entry: {}, Name: {}'.format(i, entry.name))
db.write_torrent((i, entry.name, entry.magnet, db.categories[entry.category],
db.sub_categories[entry.sub_category], db.status[entry.status], entry.date, entry.time))
db.c.close()
|
Fix scraper skipping an entry at start.
|
Fix scraper skipping an entry at start.
|
Python
|
mit
|
Hamuko/nyaamagnet
|
from Database import Database
from Nyaa import Nyaa, NyaaEntry
import getopt
import os
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
nt = Nyaa()
db = Database(script_dir)
arguments = sys.argv[1:]
optlist, args = getopt.getopt(arguments, '', ['start='])
if len(optlist) > 0:
for opt, arg in optlist:
if opt == '--start':
start_entry = int(arg)
if 'start_entry' not in globals():
start_entry = db.last_entry + 1
for i in range(start_entry + 1, nt.last_entry + 1):
entry = NyaaEntry('http://www.nyaa.se/?page=view&tid={}'.format(i))
if entry.exists == True:
if entry.category in db.categories and entry.sub_category in db.sub_categories:
if entry.magnet == 0:
continue
print('Entry: {}, Name: {}'.format(i, entry.name))
db.write_torrent((i, entry.name, entry.magnet, db.categories[entry.category],
db.sub_categories[entry.sub_category], db.status[entry.status], entry.date, entry.time))
db.c.close()Fix scraper skipping an entry at start.
|
from Database import Database
from Nyaa import Nyaa, NyaaEntry
import getopt
import os
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
nt = Nyaa()
db = Database(script_dir)
arguments = sys.argv[1:]
optlist, args = getopt.getopt(arguments, '', ['start='])
if len(optlist) > 0:
for opt, arg in optlist:
if opt == '--start':
start_entry = int(arg)
if 'start_entry' not in globals():
start_entry = db.last_entry + 1
for i in range(start_entry, nt.last_entry + 1):
entry = NyaaEntry('http://www.nyaa.se/?page=view&tid={}'.format(i))
if entry.exists == True:
if entry.category in db.categories and entry.sub_category in db.sub_categories:
if entry.magnet == 0:
continue
print('Entry: {}, Name: {}'.format(i, entry.name))
db.write_torrent((i, entry.name, entry.magnet, db.categories[entry.category],
db.sub_categories[entry.sub_category], db.status[entry.status], entry.date, entry.time))
db.c.close()
|
<commit_before>from Database import Database
from Nyaa import Nyaa, NyaaEntry
import getopt
import os
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
nt = Nyaa()
db = Database(script_dir)
arguments = sys.argv[1:]
optlist, args = getopt.getopt(arguments, '', ['start='])
if len(optlist) > 0:
for opt, arg in optlist:
if opt == '--start':
start_entry = int(arg)
if 'start_entry' not in globals():
start_entry = db.last_entry + 1
for i in range(start_entry + 1, nt.last_entry + 1):
entry = NyaaEntry('http://www.nyaa.se/?page=view&tid={}'.format(i))
if entry.exists == True:
if entry.category in db.categories and entry.sub_category in db.sub_categories:
if entry.magnet == 0:
continue
print('Entry: {}, Name: {}'.format(i, entry.name))
db.write_torrent((i, entry.name, entry.magnet, db.categories[entry.category],
db.sub_categories[entry.sub_category], db.status[entry.status], entry.date, entry.time))
db.c.close()<commit_msg>Fix scraper skipping an entry at start.<commit_after>
|
from Database import Database
from Nyaa import Nyaa, NyaaEntry
import getopt
import os
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
nt = Nyaa()
db = Database(script_dir)
arguments = sys.argv[1:]
optlist, args = getopt.getopt(arguments, '', ['start='])
if len(optlist) > 0:
for opt, arg in optlist:
if opt == '--start':
start_entry = int(arg)
if 'start_entry' not in globals():
start_entry = db.last_entry + 1
for i in range(start_entry, nt.last_entry + 1):
entry = NyaaEntry('http://www.nyaa.se/?page=view&tid={}'.format(i))
if entry.exists == True:
if entry.category in db.categories and entry.sub_category in db.sub_categories:
if entry.magnet == 0:
continue
print('Entry: {}, Name: {}'.format(i, entry.name))
db.write_torrent((i, entry.name, entry.magnet, db.categories[entry.category],
db.sub_categories[entry.sub_category], db.status[entry.status], entry.date, entry.time))
db.c.close()
|
from Database import Database
from Nyaa import Nyaa, NyaaEntry
import getopt
import os
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
nt = Nyaa()
db = Database(script_dir)
arguments = sys.argv[1:]
optlist, args = getopt.getopt(arguments, '', ['start='])
if len(optlist) > 0:
for opt, arg in optlist:
if opt == '--start':
start_entry = int(arg)
if 'start_entry' not in globals():
start_entry = db.last_entry + 1
for i in range(start_entry + 1, nt.last_entry + 1):
entry = NyaaEntry('http://www.nyaa.se/?page=view&tid={}'.format(i))
if entry.exists == True:
if entry.category in db.categories and entry.sub_category in db.sub_categories:
if entry.magnet == 0:
continue
print('Entry: {}, Name: {}'.format(i, entry.name))
db.write_torrent((i, entry.name, entry.magnet, db.categories[entry.category],
db.sub_categories[entry.sub_category], db.status[entry.status], entry.date, entry.time))
db.c.close()Fix scraper skipping an entry at start.from Database import Database
from Nyaa import Nyaa, NyaaEntry
import getopt
import os
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
nt = Nyaa()
db = Database(script_dir)
arguments = sys.argv[1:]
optlist, args = getopt.getopt(arguments, '', ['start='])
if len(optlist) > 0:
for opt, arg in optlist:
if opt == '--start':
start_entry = int(arg)
if 'start_entry' not in globals():
start_entry = db.last_entry + 1
for i in range(start_entry, nt.last_entry + 1):
entry = NyaaEntry('http://www.nyaa.se/?page=view&tid={}'.format(i))
if entry.exists == True:
if entry.category in db.categories and entry.sub_category in db.sub_categories:
if entry.magnet == 0:
continue
print('Entry: {}, Name: {}'.format(i, entry.name))
db.write_torrent((i, entry.name, entry.magnet, db.categories[entry.category],
db.sub_categories[entry.sub_category], db.status[entry.status], entry.date, entry.time))
db.c.close()
|
<commit_before>from Database import Database
from Nyaa import Nyaa, NyaaEntry
import getopt
import os
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
nt = Nyaa()
db = Database(script_dir)
arguments = sys.argv[1:]
optlist, args = getopt.getopt(arguments, '', ['start='])
if len(optlist) > 0:
for opt, arg in optlist:
if opt == '--start':
start_entry = int(arg)
if 'start_entry' not in globals():
start_entry = db.last_entry + 1
for i in range(start_entry + 1, nt.last_entry + 1):
entry = NyaaEntry('http://www.nyaa.se/?page=view&tid={}'.format(i))
if entry.exists == True:
if entry.category in db.categories and entry.sub_category in db.sub_categories:
if entry.magnet == 0:
continue
print('Entry: {}, Name: {}'.format(i, entry.name))
db.write_torrent((i, entry.name, entry.magnet, db.categories[entry.category],
db.sub_categories[entry.sub_category], db.status[entry.status], entry.date, entry.time))
db.c.close()<commit_msg>Fix scraper skipping an entry at start.<commit_after>from Database import Database
from Nyaa import Nyaa, NyaaEntry
import getopt
import os
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
nt = Nyaa()
db = Database(script_dir)
arguments = sys.argv[1:]
optlist, args = getopt.getopt(arguments, '', ['start='])
if len(optlist) > 0:
for opt, arg in optlist:
if opt == '--start':
start_entry = int(arg)
if 'start_entry' not in globals():
start_entry = db.last_entry + 1
for i in range(start_entry, nt.last_entry + 1):
entry = NyaaEntry('http://www.nyaa.se/?page=view&tid={}'.format(i))
if entry.exists == True:
if entry.category in db.categories and entry.sub_category in db.sub_categories:
if entry.magnet == 0:
continue
print('Entry: {}, Name: {}'.format(i, entry.name))
db.write_torrent((i, entry.name, entry.magnet, db.categories[entry.category],
db.sub_categories[entry.sub_category], db.status[entry.status], entry.date, entry.time))
db.c.close()
|
e7438009afe5d0372fa1c89c2d3407234662521b
|
src/compiler/nir/nir_builder_opcodes_h.py
|
src/compiler/nir/nir_builder_opcodes_h.py
|
#! /usr/bin/env python
template = """\
/* Copyright (C) 2015 Broadcom
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef _NIR_BUILDER_OPCODES_
#define _NIR_BUILDER_OPCODES_
% for name, opcode in sorted(opcodes.iteritems()):
ALU${opcode.num_inputs}(${name});
% endfor
#endif /* _NIR_BUILDER_OPCODES_ */"""
from nir_opcodes import opcodes
from mako.template import Template
print Template(template).render(opcodes=opcodes)
|
#! /usr/bin/env python
template = """\
/* Copyright (C) 2015 Broadcom
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef _NIR_BUILDER_OPCODES_
#define _NIR_BUILDER_OPCODES_
% for name, opcode in sorted(opcodes.iteritems()):
ALU${opcode.num_inputs}(${name})
% endfor
#endif /* _NIR_BUILDER_OPCODES_ */"""
from nir_opcodes import opcodes
from mako.template import Template
print Template(template).render(opcodes=opcodes)
|
Remove spurious ; after nir_builder functions.
|
nir: Remove spurious ; after nir_builder functions.
Makes -pedantic happy.
Reviewed-by: Connor Abbott <71178acffcc112b21e5858656e5751f5e4aa9364@gmail.com>
|
Python
|
mit
|
metora/MesaGLSLCompiler,metora/MesaGLSLCompiler,metora/MesaGLSLCompiler
|
#! /usr/bin/env python
template = """\
/* Copyright (C) 2015 Broadcom
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef _NIR_BUILDER_OPCODES_
#define _NIR_BUILDER_OPCODES_
% for name, opcode in sorted(opcodes.iteritems()):
ALU${opcode.num_inputs}(${name});
% endfor
#endif /* _NIR_BUILDER_OPCODES_ */"""
from nir_opcodes import opcodes
from mako.template import Template
print Template(template).render(opcodes=opcodes)
nir: Remove spurious ; after nir_builder functions.
Makes -pedantic happy.
Reviewed-by: Connor Abbott <71178acffcc112b21e5858656e5751f5e4aa9364@gmail.com>
|
#! /usr/bin/env python
template = """\
/* Copyright (C) 2015 Broadcom
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef _NIR_BUILDER_OPCODES_
#define _NIR_BUILDER_OPCODES_
% for name, opcode in sorted(opcodes.iteritems()):
ALU${opcode.num_inputs}(${name})
% endfor
#endif /* _NIR_BUILDER_OPCODES_ */"""
from nir_opcodes import opcodes
from mako.template import Template
print Template(template).render(opcodes=opcodes)
|
<commit_before>#! /usr/bin/env python
template = """\
/* Copyright (C) 2015 Broadcom
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef _NIR_BUILDER_OPCODES_
#define _NIR_BUILDER_OPCODES_
% for name, opcode in sorted(opcodes.iteritems()):
ALU${opcode.num_inputs}(${name});
% endfor
#endif /* _NIR_BUILDER_OPCODES_ */"""
from nir_opcodes import opcodes
from mako.template import Template
print Template(template).render(opcodes=opcodes)
<commit_msg>nir: Remove spurious ; after nir_builder functions.
Makes -pedantic happy.
Reviewed-by: Connor Abbott <71178acffcc112b21e5858656e5751f5e4aa9364@gmail.com><commit_after>
|
#! /usr/bin/env python
template = """\
/* Copyright (C) 2015 Broadcom
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef _NIR_BUILDER_OPCODES_
#define _NIR_BUILDER_OPCODES_
% for name, opcode in sorted(opcodes.iteritems()):
ALU${opcode.num_inputs}(${name})
% endfor
#endif /* _NIR_BUILDER_OPCODES_ */"""
from nir_opcodes import opcodes
from mako.template import Template
print Template(template).render(opcodes=opcodes)
|
#! /usr/bin/env python
template = """\
/* Copyright (C) 2015 Broadcom
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef _NIR_BUILDER_OPCODES_
#define _NIR_BUILDER_OPCODES_
% for name, opcode in sorted(opcodes.iteritems()):
ALU${opcode.num_inputs}(${name});
% endfor
#endif /* _NIR_BUILDER_OPCODES_ */"""
from nir_opcodes import opcodes
from mako.template import Template
print Template(template).render(opcodes=opcodes)
nir: Remove spurious ; after nir_builder functions.
Makes -pedantic happy.
Reviewed-by: Connor Abbott <71178acffcc112b21e5858656e5751f5e4aa9364@gmail.com>#! /usr/bin/env python
template = """\
/* Copyright (C) 2015 Broadcom
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef _NIR_BUILDER_OPCODES_
#define _NIR_BUILDER_OPCODES_
% for name, opcode in sorted(opcodes.iteritems()):
ALU${opcode.num_inputs}(${name})
% endfor
#endif /* _NIR_BUILDER_OPCODES_ */"""
from nir_opcodes import opcodes
from mako.template import Template
print Template(template).render(opcodes=opcodes)
|
<commit_before>#! /usr/bin/env python
template = """\
/* Copyright (C) 2015 Broadcom
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef _NIR_BUILDER_OPCODES_
#define _NIR_BUILDER_OPCODES_
% for name, opcode in sorted(opcodes.iteritems()):
ALU${opcode.num_inputs}(${name});
% endfor
#endif /* _NIR_BUILDER_OPCODES_ */"""
from nir_opcodes import opcodes
from mako.template import Template
print Template(template).render(opcodes=opcodes)
<commit_msg>nir: Remove spurious ; after nir_builder functions.
Makes -pedantic happy.
Reviewed-by: Connor Abbott <71178acffcc112b21e5858656e5751f5e4aa9364@gmail.com><commit_after>#! /usr/bin/env python
template = """\
/* Copyright (C) 2015 Broadcom
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef _NIR_BUILDER_OPCODES_
#define _NIR_BUILDER_OPCODES_
% for name, opcode in sorted(opcodes.iteritems()):
ALU${opcode.num_inputs}(${name})
% endfor
#endif /* _NIR_BUILDER_OPCODES_ */"""
from nir_opcodes import opcodes
from mako.template import Template
print Template(template).render(opcodes=opcodes)
|
f0cb99f5e986c11164c98eeea38ce54e91748833
|
tests/grammar_unified_tests.py
|
tests/grammar_unified_tests.py
|
from unittest import TestCase
from regparser.grammar.unified import *
class GrammarCommonTests(TestCase):
def test_depth1_p(self):
text = '(c)(2)(ii)(A)(<E T="03">2</E>)'
result = depth1_p.parseString(text)
self.assertEqual('c', result.p1)
self.assertEqual('2', result.p2)
self.assertEqual('ii', result.p3)
self.assertEqual('A', result.p4)
self.assertEqual('2', result.p5)
def test_marker_subpart_title(self):
# Typical case:
text = u'Subpart K\u2014Exportation'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'Exportation', result.subpart_title)
self.assertEqual(u'K', result.subpart)
# Reserved subpart:
text = u'Subpart J [Reserved]'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'[Reserved]', result.subpart_title)
self.assertEqual(u'J', result.subpart)
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from regparser.grammar.unified import *
class GrammarCommonTests(TestCase):
def test_depth1_p(self):
text = '(c)(2)(ii)(A)(<E T="03">2</E>)'
result = depth1_p.parseString(text)
self.assertEqual('c', result.p1)
self.assertEqual('2', result.p2)
self.assertEqual('ii', result.p3)
self.assertEqual('A', result.p4)
self.assertEqual('2', result.p5)
def test_marker_subpart_title(self):
# Typical case:
text = u'Subpart K\u2014Exportation'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'Exportation', result.subpart_title)
self.assertEqual(u'K', result.subpart)
# Reserved subpart:
text = u'Subpart J [Reserved]'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'[Reserved]', result.subpart_title)
self.assertEqual(u'J', result.subpart)
def test_marker_comment(self):
texts = [u'comment § 1004.3-4-i',
u'comment 1004.3-4-i',
u'comment 3-4-i',]
for t in texts:
result = marker_comment.parseString(t)
self.assertEqual("3", result.section)
self.assertEqual("4", result.c1)
|
Add tests for marker_comment from ascott1/appendix-ref
|
Add tests for marker_comment from ascott1/appendix-ref
Conflicts:
tests/grammar_unified_tests.py
|
Python
|
cc0-1.0
|
tadhg-ohiggins/regulations-parser,eregs/regulations-parser,tadhg-ohiggins/regulations-parser,eregs/regulations-parser,cmc333333/regulations-parser,cmc333333/regulations-parser
|
from unittest import TestCase
from regparser.grammar.unified import *
class GrammarCommonTests(TestCase):
def test_depth1_p(self):
text = '(c)(2)(ii)(A)(<E T="03">2</E>)'
result = depth1_p.parseString(text)
self.assertEqual('c', result.p1)
self.assertEqual('2', result.p2)
self.assertEqual('ii', result.p3)
self.assertEqual('A', result.p4)
self.assertEqual('2', result.p5)
def test_marker_subpart_title(self):
# Typical case:
text = u'Subpart K\u2014Exportation'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'Exportation', result.subpart_title)
self.assertEqual(u'K', result.subpart)
# Reserved subpart:
text = u'Subpart J [Reserved]'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'[Reserved]', result.subpart_title)
self.assertEqual(u'J', result.subpart)
Add tests for marker_comment from ascott1/appendix-ref
Conflicts:
tests/grammar_unified_tests.py
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from regparser.grammar.unified import *
class GrammarCommonTests(TestCase):
def test_depth1_p(self):
text = '(c)(2)(ii)(A)(<E T="03">2</E>)'
result = depth1_p.parseString(text)
self.assertEqual('c', result.p1)
self.assertEqual('2', result.p2)
self.assertEqual('ii', result.p3)
self.assertEqual('A', result.p4)
self.assertEqual('2', result.p5)
def test_marker_subpart_title(self):
# Typical case:
text = u'Subpart K\u2014Exportation'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'Exportation', result.subpart_title)
self.assertEqual(u'K', result.subpart)
# Reserved subpart:
text = u'Subpart J [Reserved]'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'[Reserved]', result.subpart_title)
self.assertEqual(u'J', result.subpart)
def test_marker_comment(self):
texts = [u'comment § 1004.3-4-i',
u'comment 1004.3-4-i',
u'comment 3-4-i',]
for t in texts:
result = marker_comment.parseString(t)
self.assertEqual("3", result.section)
self.assertEqual("4", result.c1)
|
<commit_before>from unittest import TestCase
from regparser.grammar.unified import *
class GrammarCommonTests(TestCase):
def test_depth1_p(self):
text = '(c)(2)(ii)(A)(<E T="03">2</E>)'
result = depth1_p.parseString(text)
self.assertEqual('c', result.p1)
self.assertEqual('2', result.p2)
self.assertEqual('ii', result.p3)
self.assertEqual('A', result.p4)
self.assertEqual('2', result.p5)
def test_marker_subpart_title(self):
# Typical case:
text = u'Subpart K\u2014Exportation'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'Exportation', result.subpart_title)
self.assertEqual(u'K', result.subpart)
# Reserved subpart:
text = u'Subpart J [Reserved]'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'[Reserved]', result.subpart_title)
self.assertEqual(u'J', result.subpart)
<commit_msg>Add tests for marker_comment from ascott1/appendix-ref
Conflicts:
tests/grammar_unified_tests.py<commit_after>
|
# -*- coding: utf-8 -*-
from unittest import TestCase
from regparser.grammar.unified import *
class GrammarCommonTests(TestCase):
def test_depth1_p(self):
text = '(c)(2)(ii)(A)(<E T="03">2</E>)'
result = depth1_p.parseString(text)
self.assertEqual('c', result.p1)
self.assertEqual('2', result.p2)
self.assertEqual('ii', result.p3)
self.assertEqual('A', result.p4)
self.assertEqual('2', result.p5)
def test_marker_subpart_title(self):
# Typical case:
text = u'Subpart K\u2014Exportation'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'Exportation', result.subpart_title)
self.assertEqual(u'K', result.subpart)
# Reserved subpart:
text = u'Subpart J [Reserved]'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'[Reserved]', result.subpart_title)
self.assertEqual(u'J', result.subpart)
def test_marker_comment(self):
texts = [u'comment § 1004.3-4-i',
u'comment 1004.3-4-i',
u'comment 3-4-i',]
for t in texts:
result = marker_comment.parseString(t)
self.assertEqual("3", result.section)
self.assertEqual("4", result.c1)
|
from unittest import TestCase
from regparser.grammar.unified import *
class GrammarCommonTests(TestCase):
def test_depth1_p(self):
text = '(c)(2)(ii)(A)(<E T="03">2</E>)'
result = depth1_p.parseString(text)
self.assertEqual('c', result.p1)
self.assertEqual('2', result.p2)
self.assertEqual('ii', result.p3)
self.assertEqual('A', result.p4)
self.assertEqual('2', result.p5)
def test_marker_subpart_title(self):
# Typical case:
text = u'Subpart K\u2014Exportation'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'Exportation', result.subpart_title)
self.assertEqual(u'K', result.subpart)
# Reserved subpart:
text = u'Subpart J [Reserved]'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'[Reserved]', result.subpart_title)
self.assertEqual(u'J', result.subpart)
Add tests for marker_comment from ascott1/appendix-ref
Conflicts:
tests/grammar_unified_tests.py# -*- coding: utf-8 -*-
from unittest import TestCase
from regparser.grammar.unified import *
class GrammarCommonTests(TestCase):
def test_depth1_p(self):
text = '(c)(2)(ii)(A)(<E T="03">2</E>)'
result = depth1_p.parseString(text)
self.assertEqual('c', result.p1)
self.assertEqual('2', result.p2)
self.assertEqual('ii', result.p3)
self.assertEqual('A', result.p4)
self.assertEqual('2', result.p5)
def test_marker_subpart_title(self):
# Typical case:
text = u'Subpart K\u2014Exportation'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'Exportation', result.subpart_title)
self.assertEqual(u'K', result.subpart)
# Reserved subpart:
text = u'Subpart J [Reserved]'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'[Reserved]', result.subpart_title)
self.assertEqual(u'J', result.subpart)
def test_marker_comment(self):
texts = [u'comment § 1004.3-4-i',
u'comment 1004.3-4-i',
u'comment 3-4-i',]
for t in texts:
result = marker_comment.parseString(t)
self.assertEqual("3", result.section)
self.assertEqual("4", result.c1)
|
<commit_before>from unittest import TestCase
from regparser.grammar.unified import *
class GrammarCommonTests(TestCase):
def test_depth1_p(self):
text = '(c)(2)(ii)(A)(<E T="03">2</E>)'
result = depth1_p.parseString(text)
self.assertEqual('c', result.p1)
self.assertEqual('2', result.p2)
self.assertEqual('ii', result.p3)
self.assertEqual('A', result.p4)
self.assertEqual('2', result.p5)
def test_marker_subpart_title(self):
# Typical case:
text = u'Subpart K\u2014Exportation'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'Exportation', result.subpart_title)
self.assertEqual(u'K', result.subpart)
# Reserved subpart:
text = u'Subpart J [Reserved]'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'[Reserved]', result.subpart_title)
self.assertEqual(u'J', result.subpart)
<commit_msg>Add tests for marker_comment from ascott1/appendix-ref
Conflicts:
tests/grammar_unified_tests.py<commit_after># -*- coding: utf-8 -*-
from unittest import TestCase
from regparser.grammar.unified import *
class GrammarCommonTests(TestCase):
def test_depth1_p(self):
text = '(c)(2)(ii)(A)(<E T="03">2</E>)'
result = depth1_p.parseString(text)
self.assertEqual('c', result.p1)
self.assertEqual('2', result.p2)
self.assertEqual('ii', result.p3)
self.assertEqual('A', result.p4)
self.assertEqual('2', result.p5)
def test_marker_subpart_title(self):
# Typical case:
text = u'Subpart K\u2014Exportation'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'Exportation', result.subpart_title)
self.assertEqual(u'K', result.subpart)
# Reserved subpart:
text = u'Subpart J [Reserved]'
result = marker_subpart_title.parseString(text)
self.assertEqual(u'[Reserved]', result.subpart_title)
self.assertEqual(u'J', result.subpart)
def test_marker_comment(self):
texts = [u'comment § 1004.3-4-i',
u'comment 1004.3-4-i',
u'comment 3-4-i',]
for t in texts:
result = marker_comment.parseString(t)
self.assertEqual("3", result.section)
self.assertEqual("4", result.c1)
|
81adecffdf4d9cafa86b7bc9862ce580327c4b6c
|
mongo_pool/__init__.py
|
mongo_pool/__init__.py
|
from mongo_pool import MongoPool
from pkg_resources import get_distribution, DistributionNotFound
import os.path
__all__ = ['mongo_pool']
try:
_dist = get_distribution('mongo-pool')
if not __file__.startswith(os.path.join(_dist.location, 'mongo_pool')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
|
from .mongo_pool import MongoPool
from pkg_resources import get_distribution, DistributionNotFound
import os.path
__all__ = ['mongo_pool']
try:
_dist = get_distribution('mongo-pool')
if not __file__.startswith(os.path.join(_dist.location, 'mongo_pool')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
|
Update relative import to work with python 3.6. ANL-10319
|
Update relative import to work with python 3.6. ANL-10319
|
Python
|
apache-2.0
|
uberVU/mongo-pool,uberVU/mongo-pool
|
from mongo_pool import MongoPool
from pkg_resources import get_distribution, DistributionNotFound
import os.path
__all__ = ['mongo_pool']
try:
_dist = get_distribution('mongo-pool')
if not __file__.startswith(os.path.join(_dist.location, 'mongo_pool')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
Update relative import to work with python 3.6. ANL-10319
|
from .mongo_pool import MongoPool
from pkg_resources import get_distribution, DistributionNotFound
import os.path
__all__ = ['mongo_pool']
try:
_dist = get_distribution('mongo-pool')
if not __file__.startswith(os.path.join(_dist.location, 'mongo_pool')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
|
<commit_before>from mongo_pool import MongoPool
from pkg_resources import get_distribution, DistributionNotFound
import os.path
__all__ = ['mongo_pool']
try:
_dist = get_distribution('mongo-pool')
if not __file__.startswith(os.path.join(_dist.location, 'mongo_pool')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
<commit_msg>Update relative import to work with python 3.6. ANL-10319<commit_after>
|
from .mongo_pool import MongoPool
from pkg_resources import get_distribution, DistributionNotFound
import os.path
__all__ = ['mongo_pool']
try:
_dist = get_distribution('mongo-pool')
if not __file__.startswith(os.path.join(_dist.location, 'mongo_pool')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
|
from mongo_pool import MongoPool
from pkg_resources import get_distribution, DistributionNotFound
import os.path
__all__ = ['mongo_pool']
try:
_dist = get_distribution('mongo-pool')
if not __file__.startswith(os.path.join(_dist.location, 'mongo_pool')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
Update relative import to work with python 3.6. ANL-10319from .mongo_pool import MongoPool
from pkg_resources import get_distribution, DistributionNotFound
import os.path
__all__ = ['mongo_pool']
try:
_dist = get_distribution('mongo-pool')
if not __file__.startswith(os.path.join(_dist.location, 'mongo_pool')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
|
<commit_before>from mongo_pool import MongoPool
from pkg_resources import get_distribution, DistributionNotFound
import os.path
__all__ = ['mongo_pool']
try:
_dist = get_distribution('mongo-pool')
if not __file__.startswith(os.path.join(_dist.location, 'mongo_pool')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
<commit_msg>Update relative import to work with python 3.6. ANL-10319<commit_after>from .mongo_pool import MongoPool
from pkg_resources import get_distribution, DistributionNotFound
import os.path
__all__ = ['mongo_pool']
try:
_dist = get_distribution('mongo-pool')
if not __file__.startswith(os.path.join(_dist.location, 'mongo_pool')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
|
4c95c238cd198779b7019a72b412ce20ddf865bd
|
alg_gcd.py
|
alg_gcd.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def gcd(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(1).
"""
while n != 0:
m, n = n, m % n
return m
def main():
print('gcd(4, 2): {}'.format(gcd(4, 2)))
print('gcd(2, 4): {}'.format(gcd(2, 4)))
print('gcd(10, 4): {}'.format(gcd(10, 4)))
print('gcd(4, 10): {}'.format(gcd(4, 10)))
print('gcd(3, 4): {}'.format(gcd(3, 4)))
print('gcd(4, 3): {}'.format(gcd(4, 3)))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def gcd_recur(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(m%n).
"""
if n == 0:
return m
return gcd_recur(n, m % n)
def gcd_iter(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(1).
"""
while n != 0:
m, n = n, m % n
return m
def main():
import time
start_time = time.time()
print('gcd_recur(4, 2): {}'.format(gcd_recur(4, 2)))
print('gcd_recur(2, 4): {}'.format(gcd_recur(2, 4)))
print('gcd_recur(10, 4): {}'.format(gcd_recur(10, 4)))
print('gcd_recur(4, 10): {}'.format(gcd_recur(4, 10)))
print('gcd_recur(3, 4): {}'.format(gcd_recur(3, 4)))
print('gcd_recur(4, 3): {}'.format(gcd_recur(4, 3)))
print('Time:', time.time() - start_time)
start_time = time.time()
print('gcd_iter(4, 2): {}'.format(gcd_iter(4, 2)))
print('gcd_iter(2, 4): {}'.format(gcd_iter(2, 4)))
print('gcd_iter(10, 4): {}'.format(gcd_iter(10, 4)))
print('gcd_iter(4, 10): {}'.format(gcd_iter(4, 10)))
print('gcd_iter(3, 4): {}'.format(gcd_iter(3, 4)))
print('gcd_iter(4, 3): {}'.format(gcd_iter(4, 3)))
print('Time:', time.time() - start_time)
if __name__ == '__main__':
main()
|
Complete gcd recur sol w/ time/space complexity
|
Complete gcd recur sol w/ time/space complexity
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def gcd(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(1).
"""
while n != 0:
m, n = n, m % n
return m
def main():
print('gcd(4, 2): {}'.format(gcd(4, 2)))
print('gcd(2, 4): {}'.format(gcd(2, 4)))
print('gcd(10, 4): {}'.format(gcd(10, 4)))
print('gcd(4, 10): {}'.format(gcd(4, 10)))
print('gcd(3, 4): {}'.format(gcd(3, 4)))
print('gcd(4, 3): {}'.format(gcd(4, 3)))
if __name__ == '__main__':
main()
Complete gcd recur sol w/ time/space complexity
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def gcd_recur(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(m%n).
"""
if n == 0:
return m
return gcd_recur(n, m % n)
def gcd_iter(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(1).
"""
while n != 0:
m, n = n, m % n
return m
def main():
import time
start_time = time.time()
print('gcd_recur(4, 2): {}'.format(gcd_recur(4, 2)))
print('gcd_recur(2, 4): {}'.format(gcd_recur(2, 4)))
print('gcd_recur(10, 4): {}'.format(gcd_recur(10, 4)))
print('gcd_recur(4, 10): {}'.format(gcd_recur(4, 10)))
print('gcd_recur(3, 4): {}'.format(gcd_recur(3, 4)))
print('gcd_recur(4, 3): {}'.format(gcd_recur(4, 3)))
print('Time:', time.time() - start_time)
start_time = time.time()
print('gcd_iter(4, 2): {}'.format(gcd_iter(4, 2)))
print('gcd_iter(2, 4): {}'.format(gcd_iter(2, 4)))
print('gcd_iter(10, 4): {}'.format(gcd_iter(10, 4)))
print('gcd_iter(4, 10): {}'.format(gcd_iter(4, 10)))
print('gcd_iter(3, 4): {}'.format(gcd_iter(3, 4)))
print('gcd_iter(4, 3): {}'.format(gcd_iter(4, 3)))
print('Time:', time.time() - start_time)
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def gcd(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(1).
"""
while n != 0:
m, n = n, m % n
return m
def main():
print('gcd(4, 2): {}'.format(gcd(4, 2)))
print('gcd(2, 4): {}'.format(gcd(2, 4)))
print('gcd(10, 4): {}'.format(gcd(10, 4)))
print('gcd(4, 10): {}'.format(gcd(4, 10)))
print('gcd(3, 4): {}'.format(gcd(3, 4)))
print('gcd(4, 3): {}'.format(gcd(4, 3)))
if __name__ == '__main__':
main()
<commit_msg>Complete gcd recur sol w/ time/space complexity<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def gcd_recur(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(m%n).
"""
if n == 0:
return m
return gcd_recur(n, m % n)
def gcd_iter(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(1).
"""
while n != 0:
m, n = n, m % n
return m
def main():
import time
start_time = time.time()
print('gcd_recur(4, 2): {}'.format(gcd_recur(4, 2)))
print('gcd_recur(2, 4): {}'.format(gcd_recur(2, 4)))
print('gcd_recur(10, 4): {}'.format(gcd_recur(10, 4)))
print('gcd_recur(4, 10): {}'.format(gcd_recur(4, 10)))
print('gcd_recur(3, 4): {}'.format(gcd_recur(3, 4)))
print('gcd_recur(4, 3): {}'.format(gcd_recur(4, 3)))
print('Time:', time.time() - start_time)
start_time = time.time()
print('gcd_iter(4, 2): {}'.format(gcd_iter(4, 2)))
print('gcd_iter(2, 4): {}'.format(gcd_iter(2, 4)))
print('gcd_iter(10, 4): {}'.format(gcd_iter(10, 4)))
print('gcd_iter(4, 10): {}'.format(gcd_iter(4, 10)))
print('gcd_iter(3, 4): {}'.format(gcd_iter(3, 4)))
print('gcd_iter(4, 3): {}'.format(gcd_iter(4, 3)))
print('Time:', time.time() - start_time)
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def gcd(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(1).
"""
while n != 0:
m, n = n, m % n
return m
def main():
print('gcd(4, 2): {}'.format(gcd(4, 2)))
print('gcd(2, 4): {}'.format(gcd(2, 4)))
print('gcd(10, 4): {}'.format(gcd(10, 4)))
print('gcd(4, 10): {}'.format(gcd(4, 10)))
print('gcd(3, 4): {}'.format(gcd(3, 4)))
print('gcd(4, 3): {}'.format(gcd(4, 3)))
if __name__ == '__main__':
main()
Complete gcd recur sol w/ time/space complexityfrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def gcd_recur(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(m%n).
"""
if n == 0:
return m
return gcd_recur(n, m % n)
def gcd_iter(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(1).
"""
while n != 0:
m, n = n, m % n
return m
def main():
import time
start_time = time.time()
print('gcd_recur(4, 2): {}'.format(gcd_recur(4, 2)))
print('gcd_recur(2, 4): {}'.format(gcd_recur(2, 4)))
print('gcd_recur(10, 4): {}'.format(gcd_recur(10, 4)))
print('gcd_recur(4, 10): {}'.format(gcd_recur(4, 10)))
print('gcd_recur(3, 4): {}'.format(gcd_recur(3, 4)))
print('gcd_recur(4, 3): {}'.format(gcd_recur(4, 3)))
print('Time:', time.time() - start_time)
start_time = time.time()
print('gcd_iter(4, 2): {}'.format(gcd_iter(4, 2)))
print('gcd_iter(2, 4): {}'.format(gcd_iter(2, 4)))
print('gcd_iter(10, 4): {}'.format(gcd_iter(10, 4)))
print('gcd_iter(4, 10): {}'.format(gcd_iter(4, 10)))
print('gcd_iter(3, 4): {}'.format(gcd_iter(3, 4)))
print('gcd_iter(4, 3): {}'.format(gcd_iter(4, 3)))
print('Time:', time.time() - start_time)
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def gcd(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(1).
"""
while n != 0:
m, n = n, m % n
return m
def main():
print('gcd(4, 2): {}'.format(gcd(4, 2)))
print('gcd(2, 4): {}'.format(gcd(2, 4)))
print('gcd(10, 4): {}'.format(gcd(10, 4)))
print('gcd(4, 10): {}'.format(gcd(4, 10)))
print('gcd(3, 4): {}'.format(gcd(3, 4)))
print('gcd(4, 3): {}'.format(gcd(4, 3)))
if __name__ == '__main__':
main()
<commit_msg>Complete gcd recur sol w/ time/space complexity<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def gcd_recur(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(m%n).
"""
if n == 0:
return m
return gcd_recur(n, m % n)
def gcd_iter(m, n):
"""Greatest Common Divisor (GCD) by Euclid's Algorithm.
Time complexity: O(m%n).
Space complexity: O(1).
"""
while n != 0:
m, n = n, m % n
return m
def main():
import time
start_time = time.time()
print('gcd_recur(4, 2): {}'.format(gcd_recur(4, 2)))
print('gcd_recur(2, 4): {}'.format(gcd_recur(2, 4)))
print('gcd_recur(10, 4): {}'.format(gcd_recur(10, 4)))
print('gcd_recur(4, 10): {}'.format(gcd_recur(4, 10)))
print('gcd_recur(3, 4): {}'.format(gcd_recur(3, 4)))
print('gcd_recur(4, 3): {}'.format(gcd_recur(4, 3)))
print('Time:', time.time() - start_time)
start_time = time.time()
print('gcd_iter(4, 2): {}'.format(gcd_iter(4, 2)))
print('gcd_iter(2, 4): {}'.format(gcd_iter(2, 4)))
print('gcd_iter(10, 4): {}'.format(gcd_iter(10, 4)))
print('gcd_iter(4, 10): {}'.format(gcd_iter(4, 10)))
print('gcd_iter(3, 4): {}'.format(gcd_iter(3, 4)))
print('gcd_iter(4, 3): {}'.format(gcd_iter(4, 3)))
print('Time:', time.time() - start_time)
if __name__ == '__main__':
main()
|
3290d532f3dd9c1e24921c4b80aeb6e860bc86a8
|
spock/plugins/__init__.py
|
spock/plugins/__init__.py
|
from spock.plugins.core import auth, event, net, ticker, timer
from spock.plugins.helpers import clientinfo, entities, interact, inventory,\
keepalive, movement, physics, respawn, start, world
from spock.plugins.base import PluginBase # noqa
core_plugins = [
('auth', auth.AuthPlugin),
('event', event.EventPlugin),
('net', net.NetPlugin),
('ticker', ticker.TickerPlugin),
('timers', timer.TimerPlugin),
]
helper_plugins = [
('clientinfo', clientinfo.ClientInfoPlugin),
('entities', entities.EntitiesPlugin),
('interact', interact.InteractPlugin),
('inventory', inventory.InventoryPlugin),
('keepalive', keepalive.KeepalivePlugin),
('movement', movement.MovementPlugin),
('physics', physics.PhysicsPlugin),
('respawn', respawn.RespawnPlugin),
('start', start.StartPlugin),
('world', world.WorldPlugin),
]
default_plugins = core_plugins + helper_plugins
|
from spock.plugins.core import auth, event, net, ticker, timer
from spock.plugins.helpers import chat, clientinfo, entities, interact, \
inventory, keepalive, movement, physics, respawn, start, world
from spock.plugins.base import PluginBase # noqa
core_plugins = [
('auth', auth.AuthPlugin),
('event', event.EventPlugin),
('net', net.NetPlugin),
('ticker', ticker.TickerPlugin),
('timers', timer.TimerPlugin),
]
helper_plugins = [
('chat', chat.ChatPlugin),
('clientinfo', clientinfo.ClientInfoPlugin),
('entities', entities.EntitiesPlugin),
('interact', interact.InteractPlugin),
('inventory', inventory.InventoryPlugin),
('keepalive', keepalive.KeepalivePlugin),
('movement', movement.MovementPlugin),
('physics', physics.PhysicsPlugin),
('respawn', respawn.RespawnPlugin),
('start', start.StartPlugin),
('world', world.WorldPlugin),
]
default_plugins = core_plugins + helper_plugins
|
Fix removal of chat plugin
|
Fix removal of chat plugin
|
Python
|
mit
|
nickelpro/SpockBot,MrSwiss/SpockBot,Gjum/SpockBot,SpockBotMC/SpockBot,gamingrobot/SpockBot,luken/SpockBot
|
from spock.plugins.core import auth, event, net, ticker, timer
from spock.plugins.helpers import clientinfo, entities, interact, inventory,\
keepalive, movement, physics, respawn, start, world
from spock.plugins.base import PluginBase # noqa
core_plugins = [
('auth', auth.AuthPlugin),
('event', event.EventPlugin),
('net', net.NetPlugin),
('ticker', ticker.TickerPlugin),
('timers', timer.TimerPlugin),
]
helper_plugins = [
('clientinfo', clientinfo.ClientInfoPlugin),
('entities', entities.EntitiesPlugin),
('interact', interact.InteractPlugin),
('inventory', inventory.InventoryPlugin),
('keepalive', keepalive.KeepalivePlugin),
('movement', movement.MovementPlugin),
('physics', physics.PhysicsPlugin),
('respawn', respawn.RespawnPlugin),
('start', start.StartPlugin),
('world', world.WorldPlugin),
]
default_plugins = core_plugins + helper_plugins
Fix removal of chat plugin
|
from spock.plugins.core import auth, event, net, ticker, timer
from spock.plugins.helpers import chat, clientinfo, entities, interact, \
inventory, keepalive, movement, physics, respawn, start, world
from spock.plugins.base import PluginBase # noqa
core_plugins = [
('auth', auth.AuthPlugin),
('event', event.EventPlugin),
('net', net.NetPlugin),
('ticker', ticker.TickerPlugin),
('timers', timer.TimerPlugin),
]
helper_plugins = [
('chat', chat.ChatPlugin),
('clientinfo', clientinfo.ClientInfoPlugin),
('entities', entities.EntitiesPlugin),
('interact', interact.InteractPlugin),
('inventory', inventory.InventoryPlugin),
('keepalive', keepalive.KeepalivePlugin),
('movement', movement.MovementPlugin),
('physics', physics.PhysicsPlugin),
('respawn', respawn.RespawnPlugin),
('start', start.StartPlugin),
('world', world.WorldPlugin),
]
default_plugins = core_plugins + helper_plugins
|
<commit_before>from spock.plugins.core import auth, event, net, ticker, timer
from spock.plugins.helpers import clientinfo, entities, interact, inventory,\
keepalive, movement, physics, respawn, start, world
from spock.plugins.base import PluginBase # noqa
core_plugins = [
('auth', auth.AuthPlugin),
('event', event.EventPlugin),
('net', net.NetPlugin),
('ticker', ticker.TickerPlugin),
('timers', timer.TimerPlugin),
]
helper_plugins = [
('clientinfo', clientinfo.ClientInfoPlugin),
('entities', entities.EntitiesPlugin),
('interact', interact.InteractPlugin),
('inventory', inventory.InventoryPlugin),
('keepalive', keepalive.KeepalivePlugin),
('movement', movement.MovementPlugin),
('physics', physics.PhysicsPlugin),
('respawn', respawn.RespawnPlugin),
('start', start.StartPlugin),
('world', world.WorldPlugin),
]
default_plugins = core_plugins + helper_plugins
<commit_msg>Fix removal of chat plugin<commit_after>
|
from spock.plugins.core import auth, event, net, ticker, timer
from spock.plugins.helpers import chat, clientinfo, entities, interact, \
inventory, keepalive, movement, physics, respawn, start, world
from spock.plugins.base import PluginBase # noqa
core_plugins = [
('auth', auth.AuthPlugin),
('event', event.EventPlugin),
('net', net.NetPlugin),
('ticker', ticker.TickerPlugin),
('timers', timer.TimerPlugin),
]
helper_plugins = [
('chat', chat.ChatPlugin),
('clientinfo', clientinfo.ClientInfoPlugin),
('entities', entities.EntitiesPlugin),
('interact', interact.InteractPlugin),
('inventory', inventory.InventoryPlugin),
('keepalive', keepalive.KeepalivePlugin),
('movement', movement.MovementPlugin),
('physics', physics.PhysicsPlugin),
('respawn', respawn.RespawnPlugin),
('start', start.StartPlugin),
('world', world.WorldPlugin),
]
default_plugins = core_plugins + helper_plugins
|
from spock.plugins.core import auth, event, net, ticker, timer
from spock.plugins.helpers import clientinfo, entities, interact, inventory,\
keepalive, movement, physics, respawn, start, world
from spock.plugins.base import PluginBase # noqa
core_plugins = [
('auth', auth.AuthPlugin),
('event', event.EventPlugin),
('net', net.NetPlugin),
('ticker', ticker.TickerPlugin),
('timers', timer.TimerPlugin),
]
helper_plugins = [
('clientinfo', clientinfo.ClientInfoPlugin),
('entities', entities.EntitiesPlugin),
('interact', interact.InteractPlugin),
('inventory', inventory.InventoryPlugin),
('keepalive', keepalive.KeepalivePlugin),
('movement', movement.MovementPlugin),
('physics', physics.PhysicsPlugin),
('respawn', respawn.RespawnPlugin),
('start', start.StartPlugin),
('world', world.WorldPlugin),
]
default_plugins = core_plugins + helper_plugins
Fix removal of chat pluginfrom spock.plugins.core import auth, event, net, ticker, timer
from spock.plugins.helpers import chat, clientinfo, entities, interact, \
inventory, keepalive, movement, physics, respawn, start, world
from spock.plugins.base import PluginBase # noqa
core_plugins = [
('auth', auth.AuthPlugin),
('event', event.EventPlugin),
('net', net.NetPlugin),
('ticker', ticker.TickerPlugin),
('timers', timer.TimerPlugin),
]
helper_plugins = [
('chat', chat.ChatPlugin),
('clientinfo', clientinfo.ClientInfoPlugin),
('entities', entities.EntitiesPlugin),
('interact', interact.InteractPlugin),
('inventory', inventory.InventoryPlugin),
('keepalive', keepalive.KeepalivePlugin),
('movement', movement.MovementPlugin),
('physics', physics.PhysicsPlugin),
('respawn', respawn.RespawnPlugin),
('start', start.StartPlugin),
('world', world.WorldPlugin),
]
default_plugins = core_plugins + helper_plugins
|
<commit_before>from spock.plugins.core import auth, event, net, ticker, timer
from spock.plugins.helpers import clientinfo, entities, interact, inventory,\
keepalive, movement, physics, respawn, start, world
from spock.plugins.base import PluginBase # noqa
core_plugins = [
('auth', auth.AuthPlugin),
('event', event.EventPlugin),
('net', net.NetPlugin),
('ticker', ticker.TickerPlugin),
('timers', timer.TimerPlugin),
]
helper_plugins = [
('clientinfo', clientinfo.ClientInfoPlugin),
('entities', entities.EntitiesPlugin),
('interact', interact.InteractPlugin),
('inventory', inventory.InventoryPlugin),
('keepalive', keepalive.KeepalivePlugin),
('movement', movement.MovementPlugin),
('physics', physics.PhysicsPlugin),
('respawn', respawn.RespawnPlugin),
('start', start.StartPlugin),
('world', world.WorldPlugin),
]
default_plugins = core_plugins + helper_plugins
<commit_msg>Fix removal of chat plugin<commit_after>from spock.plugins.core import auth, event, net, ticker, timer
from spock.plugins.helpers import chat, clientinfo, entities, interact, \
inventory, keepalive, movement, physics, respawn, start, world
from spock.plugins.base import PluginBase # noqa
core_plugins = [
('auth', auth.AuthPlugin),
('event', event.EventPlugin),
('net', net.NetPlugin),
('ticker', ticker.TickerPlugin),
('timers', timer.TimerPlugin),
]
helper_plugins = [
('chat', chat.ChatPlugin),
('clientinfo', clientinfo.ClientInfoPlugin),
('entities', entities.EntitiesPlugin),
('interact', interact.InteractPlugin),
('inventory', inventory.InventoryPlugin),
('keepalive', keepalive.KeepalivePlugin),
('movement', movement.MovementPlugin),
('physics', physics.PhysicsPlugin),
('respawn', respawn.RespawnPlugin),
('start', start.StartPlugin),
('world', world.WorldPlugin),
]
default_plugins = core_plugins + helper_plugins
|
80a3efa11f8f2913d3ce90668b4e9c7eaa5ada9c
|
fabfile.py
|
fabfile.py
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INTERNAL_IPS': ('127.0.0.1', ),
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_layout',
'armstrong.core.arm_layout.tests.arm_layout_support'
),
'SITE_ID': 1,
}
main_app = "arm_layout"
tested_apps = (main_app, )
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INTERNAL_IPS': ('127.0.0.1', ),
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_layout',
'armstrong.core.arm_layout.tests.arm_layout_support'
),
'SITE_ID': 1,
}
main_app = "arm_layout"
tested_apps = (main_app, )
import django
if django.VERSION >= (1, 6): # use the old test runner for now
settings['TEST_RUNNER'] = 'django.test.simple.DjangoTestSuiteRunner'
|
Fix testing in Django 1.6 by using the old test runner.
|
Fix testing in Django 1.6 by using the old test runner.
|
Python
|
apache-2.0
|
armstrong/armstrong.core.arm_layout,armstrong/armstrong.core.arm_layout
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INTERNAL_IPS': ('127.0.0.1', ),
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_layout',
'armstrong.core.arm_layout.tests.arm_layout_support'
),
'SITE_ID': 1,
}
main_app = "arm_layout"
tested_apps = (main_app, )
Fix testing in Django 1.6 by using the old test runner.
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INTERNAL_IPS': ('127.0.0.1', ),
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_layout',
'armstrong.core.arm_layout.tests.arm_layout_support'
),
'SITE_ID': 1,
}
main_app = "arm_layout"
tested_apps = (main_app, )
import django
if django.VERSION >= (1, 6): # use the old test runner for now
settings['TEST_RUNNER'] = 'django.test.simple.DjangoTestSuiteRunner'
|
<commit_before>from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INTERNAL_IPS': ('127.0.0.1', ),
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_layout',
'armstrong.core.arm_layout.tests.arm_layout_support'
),
'SITE_ID': 1,
}
main_app = "arm_layout"
tested_apps = (main_app, )
<commit_msg>Fix testing in Django 1.6 by using the old test runner.<commit_after>
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INTERNAL_IPS': ('127.0.0.1', ),
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_layout',
'armstrong.core.arm_layout.tests.arm_layout_support'
),
'SITE_ID': 1,
}
main_app = "arm_layout"
tested_apps = (main_app, )
import django
if django.VERSION >= (1, 6): # use the old test runner for now
settings['TEST_RUNNER'] = 'django.test.simple.DjangoTestSuiteRunner'
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INTERNAL_IPS': ('127.0.0.1', ),
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_layout',
'armstrong.core.arm_layout.tests.arm_layout_support'
),
'SITE_ID': 1,
}
main_app = "arm_layout"
tested_apps = (main_app, )
Fix testing in Django 1.6 by using the old test runner.from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INTERNAL_IPS': ('127.0.0.1', ),
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_layout',
'armstrong.core.arm_layout.tests.arm_layout_support'
),
'SITE_ID': 1,
}
main_app = "arm_layout"
tested_apps = (main_app, )
import django
if django.VERSION >= (1, 6): # use the old test runner for now
settings['TEST_RUNNER'] = 'django.test.simple.DjangoTestSuiteRunner'
|
<commit_before>from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INTERNAL_IPS': ('127.0.0.1', ),
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_layout',
'armstrong.core.arm_layout.tests.arm_layout_support'
),
'SITE_ID': 1,
}
main_app = "arm_layout"
tested_apps = (main_app, )
<commit_msg>Fix testing in Django 1.6 by using the old test runner.<commit_after>from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INTERNAL_IPS': ('127.0.0.1', ),
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_layout',
'armstrong.core.arm_layout.tests.arm_layout_support'
),
'SITE_ID': 1,
}
main_app = "arm_layout"
tested_apps = (main_app, )
import django
if django.VERSION >= (1, 6): # use the old test runner for now
settings['TEST_RUNNER'] = 'django.test.simple.DjangoTestSuiteRunner'
|
9135bd99f5668adeb7d8bfbd4e317f58f640e6f5
|
tests/test_strings.py
|
tests/test_strings.py
|
"""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
|
"""Tests for string searches"""
from nose import SkipTest
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
class RepeatedResultTests(SingleFileTestCase):
# Putting code on the first line triggers the bug:
source = """int main(int argc, char* argv[]) {
return 0;
}
"""
def test_repeated_results(self):
"""Make sure we don't get the same line back twice."""
raise SkipTest
self.found_lines_eq('int',
'<b>int</b> main(<b>int</b> argc, char* argv[]) {')
|
Add a skipped failing test for the result-repetition bug.
|
Add a skipped failing test for the result-repetition bug.
|
Python
|
mit
|
pombredanne/dxr,srenatus/dxr,gartung/dxr,nrc/dxr,gartung/dxr,kleintom/dxr,srenatus/dxr,srenatus/dxr,srenatus/dxr,bozzmob/dxr,jbradberry/dxr,KiemVM/Mozilla--dxr,jbradberry/dxr,jbradberry/dxr,jay-z007/dxr,kleintom/dxr,nrc/dxr,bozzmob/dxr,jbradberry/dxr,gartung/dxr,nrc/dxr,gartung/dxr,bozzmob/dxr,kleintom/dxr,pelmers/dxr,erikrose/dxr,pombredanne/dxr,pombredanne/dxr,jay-z007/dxr,KiemVM/Mozilla--dxr,jay-z007/dxr,bozzmob/dxr,kleintom/dxr,jbradberry/dxr,kleintom/dxr,nrc/dxr,bozzmob/dxr,pelmers/dxr,erikrose/dxr,pelmers/dxr,pelmers/dxr,erikrose/dxr,gartung/dxr,jay-z007/dxr,gartung/dxr,srenatus/dxr,nrc/dxr,KiemVM/Mozilla--dxr,gartung/dxr,pelmers/dxr,kleintom/dxr,pelmers/dxr,jay-z007/dxr,pombredanne/dxr,jbradberry/dxr,jay-z007/dxr,bozzmob/dxr,bozzmob/dxr,pombredanne/dxr,jbradberry/dxr,kleintom/dxr,pombredanne/dxr,KiemVM/Mozilla--dxr,pombredanne/dxr,srenatus/dxr,KiemVM/Mozilla--dxr,erikrose/dxr,erikrose/dxr,pelmers/dxr,jay-z007/dxr,nrc/dxr,KiemVM/Mozilla--dxr
|
"""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
Add a skipped failing test for the result-repetition bug.
|
"""Tests for string searches"""
from nose import SkipTest
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
class RepeatedResultTests(SingleFileTestCase):
# Putting code on the first line triggers the bug:
source = """int main(int argc, char* argv[]) {
return 0;
}
"""
def test_repeated_results(self):
"""Make sure we don't get the same line back twice."""
raise SkipTest
self.found_lines_eq('int',
'<b>int</b> main(<b>int</b> argc, char* argv[]) {')
|
<commit_before>"""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
<commit_msg>Add a skipped failing test for the result-repetition bug.<commit_after>
|
"""Tests for string searches"""
from nose import SkipTest
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
class RepeatedResultTests(SingleFileTestCase):
# Putting code on the first line triggers the bug:
source = """int main(int argc, char* argv[]) {
return 0;
}
"""
def test_repeated_results(self):
"""Make sure we don't get the same line back twice."""
raise SkipTest
self.found_lines_eq('int',
'<b>int</b> main(<b>int</b> argc, char* argv[]) {')
|
"""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
Add a skipped failing test for the result-repetition bug."""Tests for string searches"""
from nose import SkipTest
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
class RepeatedResultTests(SingleFileTestCase):
# Putting code on the first line triggers the bug:
source = """int main(int argc, char* argv[]) {
return 0;
}
"""
def test_repeated_results(self):
"""Make sure we don't get the same line back twice."""
raise SkipTest
self.found_lines_eq('int',
'<b>int</b> main(<b>int</b> argc, char* argv[]) {')
|
<commit_before>"""Tests for string searches"""
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
<commit_msg>Add a skipped failing test for the result-repetition bug.<commit_after>"""Tests for string searches"""
from nose import SkipTest
from nose.tools import eq_
from dxr.testing import SingleFileTestCase, MINIMAL_MAIN
class StringTests(SingleFileTestCase):
source = """
void main_idea() {
}
""" + MINIMAL_MAIN
def test_negated_phrase(self):
"""Make sure a negated phrase search doesn't crash."""
eq_(self.search_results('void -"int"'), [])
class RepeatedResultTests(SingleFileTestCase):
# Putting code on the first line triggers the bug:
source = """int main(int argc, char* argv[]) {
return 0;
}
"""
def test_repeated_results(self):
"""Make sure we don't get the same line back twice."""
raise SkipTest
self.found_lines_eq('int',
'<b>int</b> main(<b>int</b> argc, char* argv[]) {')
|
1f09af5b3b785133dce83cd5a00fde7f69fdf410
|
apps/config/powerline.symlink/segments/custom.py
|
apps/config/powerline.symlink/segments/custom.py
|
from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
if bat._get_capacity(pl) < max_percent:
return bat.battery(pl)
return []
|
from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
capacity, ac_powered = bat._get_battery_status(pl)
if capacity < max_percent:
return bat.battery(pl)
return []
|
Update powerline battery wrapper for new API
|
Update powerline battery wrapper for new API
|
Python
|
mit
|
tchajed/dotfiles-osx,tchajed/dotfiles-osx,tchajed/dotfiles-osx
|
from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
if bat._get_capacity(pl) < max_percent:
return bat.battery(pl)
return []
Update powerline battery wrapper for new API
|
from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
capacity, ac_powered = bat._get_battery_status(pl)
if capacity < max_percent:
return bat.battery(pl)
return []
|
<commit_before>from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
if bat._get_capacity(pl) < max_percent:
return bat.battery(pl)
return []
<commit_msg>Update powerline battery wrapper for new API<commit_after>
|
from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
capacity, ac_powered = bat._get_battery_status(pl)
if capacity < max_percent:
return bat.battery(pl)
return []
|
from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
if bat._get_capacity(pl) < max_percent:
return bat.battery(pl)
return []
Update powerline battery wrapper for new APIfrom powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
capacity, ac_powered = bat._get_battery_status(pl)
if capacity < max_percent:
return bat.battery(pl)
return []
|
<commit_before>from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
if bat._get_capacity(pl) < max_percent:
return bat.battery(pl)
return []
<commit_msg>Update powerline battery wrapper for new API<commit_after>from powerline.segments.common import bat, sys
def system_load(pl, num_avgs=3):
return sys.system_load(pl)[:num_avgs]
def battery(pl, max_percent=101):
capacity, ac_powered = bat._get_battery_status(pl)
if capacity < max_percent:
return bat.battery(pl)
return []
|
a3d58cc1feeca734898098920e5c7195632d408b
|
atompos/atompos/middleware/logging_middleware.py
|
atompos/atompos/middleware/logging_middleware.py
|
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
|
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
if not hasattr(request, 'timer'):
request.timer = time()
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
|
Fix for request timer not always working.
|
Fix for request timer not always working.
|
Python
|
mit
|
jimivdw/OAPoC,bertrand-caron/OAPoC,bertrand-caron/OAPoC
|
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return responseFix for request timer not always working.
|
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
if not hasattr(request, 'timer'):
request.timer = time()
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
|
<commit_before>from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response<commit_msg>Fix for request timer not always working.<commit_after>
|
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
if not hasattr(request, 'timer'):
request.timer = time()
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
|
from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return responseFix for request timer not always working.from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
if not hasattr(request, 'timer'):
request.timer = time()
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
|
<commit_before>from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response<commit_msg>Fix for request timer not always working.<commit_after>from time import time
from logging import getLogger
# From: https://djangosnippets.org/snippets/1866/
def sizify(value):
"""
Simple kb/mb/gb size snippet
"""
#value = ing(value)
if value < 512:
ext = 'B'
elif value < 512000:
value = value / 1024.0
ext = 'kB'
elif value < 4194304000:
value = value / 1048576.0
ext = 'MB'
else:
value = value / 1073741824.0
ext = 'GB'
return '%s %s' % (str(round(value, 2)), ext)
class LoggingMiddleware(object):
def __init__(self):
# arguably poor taste to use django's logger
self.logger = getLogger('django')
def process_request(self, request):
request.timer = time()
return None
def process_response(self, request, response):
if not hasattr(request, 'timer'):
request.timer = time()
self.logger.info(
'%s %s %s %s [%s] (%.0f ms)',
request.META["SERVER_PROTOCOL"],
request.method,
request.get_full_path(),
response.status_code,
sizify(len(response.content)),
(time() - request.timer) * 1000.
)
return response
|
b4bdd8e20b82f8016030037712094f257af9221f
|
cinder/db/sqlalchemy/migrate_repo/versions/006_snapshots_add_provider_location.py
|
cinder/db/sqlalchemy/migrate_repo/versions/006_snapshots_add_provider_location.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
provider_location.drop()
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
snapshots.update().values(provider_location=None).execute()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
snapshots.drop_column(provider_location)
|
Fix provider_location column add for PSQL
|
Fix provider_location column add for PSQL
Migration 006 (commit 690cae58e6bbac5758ea2f7b60774c797d28fba5)
didn't work properly for postgres,
this patch corrects the upgrade by ensuring the execute
is performed and the value is initialized to None.
Since we haven't released a milestone etc with this migration in the
code it should be safe to just fix it here and submit.
Change-Id: I10a09aed3470c35c8ebbe22f29aa511592167c35
|
Python
|
apache-2.0
|
nexusriot/cinder,github-borat/cinder,mahak/cinder,CloudServer/cinder,eharney/cinder,spring-week-topos/cinder-week,blueboxgroup/cinder,potsmaster/cinder,julianwang/cinder,github-borat/cinder,Datera/cinder,j-griffith/cinder,cloudbau/cinder,cloudbase/cinder,redhat-openstack/cinder,NeCTAR-RC/cinder,rakeshmi/cinder,abusse/cinder,winndows/cinder,abusse/cinder,dims/cinder,rickerc/cinder_audit,petrutlucian94/cinder,duhzecca/cinder,julianwang/cinder,ntt-sic/cinder,maelnor/cinder,apporc/cinder,nikesh-mahalka/cinder,tlakshman26/cinder-new-branch,phenoxim/cinder,nikesh-mahalka/cinder,JioCloud/cinder,rickerc/cinder_audit,winndows/cinder,Akrog/cinder,Paul-Ezell/cinder-1,Hybrid-Cloud/cinder,Paul-Ezell/cinder-1,alex8866/cinder,tlakshman26/cinder-https-changes,tlakshman26/cinder-bug-fix-volume-conversion-full,inkerra/cinder,dims/cinder,hguemar/cinder,scottdangelo/RemoveVolumeMangerLocks,j-griffith/cinder,leilihh/cinder,Thingee/cinder,takeshineshiro/cinder,Datera/cinder,bswartz/cinder,nexusriot/cinder,phenoxim/cinder,NetApp/cinder,tlakshman26/cinder-new-branch,openstack/cinder,Thingee/cinder,bswartz/cinder,mahak/cinder,hguemar/cinder,Thingee/cinder,ntt-sic/cinder,leilihh/cinder,saeki-masaki/cinder,blueboxgroup/cinder,ge0rgi/cinder,alex8866/cinder,scality/cinder,spring-week-topos/cinder-week,maelnor/cinder,petrutlucian94/cinder,potsmaster/cinder,Akrog/cinder,scottdangelo/RemoveVolumeMangerLocks,rakeshmi/cinder,Nexenta/cinder,redhat-openstack/cinder,duhzecca/cinder,Nexenta/cinder,NetApp/cinder,Accelerite/cinder,openstack/cinder,eharney/cinder,manojhirway/ExistingImagesOnNFS,JioCloud/cinder,manojhirway/ExistingImagesOnNFS,sasukeh/cinder,saeki-masaki/cinder,NeCTAR-RC/cinder,cloudbase/cinder,Accelerite/cinder,sasukeh/cinder,takeshineshiro/cinder,Hybrid-Cloud/cinder,CloudServer/cinder,tlakshman26/cinder-https-changes,inkerra/cinder,scality/cinder,tlakshman26/cinder-bug-fix-volume-conversion-full,cloudbau/cinder,apporc/cinder
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
provider_location.drop()
Fix provider_location column add for PSQL
Migration 006 (commit 690cae58e6bbac5758ea2f7b60774c797d28fba5)
didn't work properly for postgres,
this patch corrects the upgrade by ensuring the execute
is performed and the value is initialized to None.
Since we haven't released a milestone etc with this migration in the
code it should be safe to just fix it here and submit.
Change-Id: I10a09aed3470c35c8ebbe22f29aa511592167c35
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
snapshots.update().values(provider_location=None).execute()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
snapshots.drop_column(provider_location)
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
provider_location.drop()
<commit_msg>Fix provider_location column add for PSQL
Migration 006 (commit 690cae58e6bbac5758ea2f7b60774c797d28fba5)
didn't work properly for postgres,
this patch corrects the upgrade by ensuring the execute
is performed and the value is initialized to None.
Since we haven't released a milestone etc with this migration in the
code it should be safe to just fix it here and submit.
Change-Id: I10a09aed3470c35c8ebbe22f29aa511592167c35<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
snapshots.update().values(provider_location=None).execute()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
snapshots.drop_column(provider_location)
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
provider_location.drop()
Fix provider_location column add for PSQL
Migration 006 (commit 690cae58e6bbac5758ea2f7b60774c797d28fba5)
didn't work properly for postgres,
this patch corrects the upgrade by ensuring the execute
is performed and the value is initialized to None.
Since we haven't released a milestone etc with this migration in the
code it should be safe to just fix it here and submit.
Change-Id: I10a09aed3470c35c8ebbe22f29aa511592167c35# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
snapshots.update().values(provider_location=None).execute()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
snapshots.drop_column(provider_location)
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
provider_location.drop()
<commit_msg>Fix provider_location column add for PSQL
Migration 006 (commit 690cae58e6bbac5758ea2f7b60774c797d28fba5)
didn't work properly for postgres,
this patch corrects the upgrade by ensuring the execute
is performed and the value is initialized to None.
Since we haven't released a milestone etc with this migration in the
code it should be safe to just fix it here and submit.
Change-Id: I10a09aed3470c35c8ebbe22f29aa511592167c35<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = Column('provider_location', String(255))
snapshots.create_column(provider_location)
snapshots.update().values(provider_location=None).execute()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
snapshots = Table('snapshots', meta, autoload=True)
provider_location = snapshots.columns.provider_location
snapshots.drop_column(provider_location)
|
af479a26689619d070f82560286f5a399b661098
|
thecut/forms/utils.py
|
thecut/forms/utils.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
def add_css_class(widget, css_class):
css_classes = widget.attrs.get('class', '').split()
if not css_class in css_classes:
css_classes.append(css_class)
widget.attrs.update({'class': ' '.join(css_classes)})
return widget
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
def add_css_class(widget, css_class):
css_classes = widget.attrs.get('class', '').split()
if css_class not in css_classes:
css_classes.append(css_class)
widget.attrs.update({'class': ' '.join(css_classes)})
return widget
|
Make if statement flake8 compliant
|
Make if statement flake8 compliant
|
Python
|
apache-2.0
|
thecut/thecut-forms,thecut/thecut-forms
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
def add_css_class(widget, css_class):
css_classes = widget.attrs.get('class', '').split()
if not css_class in css_classes:
css_classes.append(css_class)
widget.attrs.update({'class': ' '.join(css_classes)})
return widget
Make if statement flake8 compliant
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
def add_css_class(widget, css_class):
css_classes = widget.attrs.get('class', '').split()
if css_class not in css_classes:
css_classes.append(css_class)
widget.attrs.update({'class': ' '.join(css_classes)})
return widget
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
def add_css_class(widget, css_class):
css_classes = widget.attrs.get('class', '').split()
if not css_class in css_classes:
css_classes.append(css_class)
widget.attrs.update({'class': ' '.join(css_classes)})
return widget
<commit_msg>Make if statement flake8 compliant<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
def add_css_class(widget, css_class):
css_classes = widget.attrs.get('class', '').split()
if css_class not in css_classes:
css_classes.append(css_class)
widget.attrs.update({'class': ' '.join(css_classes)})
return widget
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
def add_css_class(widget, css_class):
css_classes = widget.attrs.get('class', '').split()
if not css_class in css_classes:
css_classes.append(css_class)
widget.attrs.update({'class': ' '.join(css_classes)})
return widget
Make if statement flake8 compliant# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
def add_css_class(widget, css_class):
css_classes = widget.attrs.get('class', '').split()
if css_class not in css_classes:
css_classes.append(css_class)
widget.attrs.update({'class': ' '.join(css_classes)})
return widget
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
def add_css_class(widget, css_class):
css_classes = widget.attrs.get('class', '').split()
if not css_class in css_classes:
css_classes.append(css_class)
widget.attrs.update({'class': ' '.join(css_classes)})
return widget
<commit_msg>Make if statement flake8 compliant<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
def add_css_class(widget, css_class):
css_classes = widget.attrs.get('class', '').split()
if css_class not in css_classes:
css_classes.append(css_class)
widget.attrs.update({'class': ' '.join(css_classes)})
return widget
|
fec6e5f05dfc9c6e5e7768de0cde246124aab072
|
pysellus/stock_integrations/terminal.py
|
pysellus/stock_integrations/terminal.py
|
import pprint
from pysellus.interfaces import AbstractIntegration
class TerminalIntegration(AbstractIntegration):
def on_next(self, message):
print('Assert error: in {test_name} -> {1}'.format(
message['test_name'],
message['expect_function']
))
print('Got:')
pprint.pprint(message['element'])
def on_error(self, error_message):
print('Runtime Error: In {0} -> {1}'.format(
error_message['test_name'],
error_message['expect_function']
))
print('Got:')
pprint.pprint(error_message['error'])
def on_completed(self):
print("All tests done.")
|
import pprint
from pysellus.interfaces import AbstractIntegration
class TerminalIntegration(AbstractIntegration):
def on_next(self, message):
print('Assert error: in {0} -> {1}'.format(
message['test_name'],
message['expect_function']
))
print('Got:')
pprint.pprint(message['element'])
def on_error(self, error_message):
print('Runtime Error: In {0} -> {1}'.format(
error_message['test_name'],
error_message['expect_function']
))
print('Got:')
pprint.pprint(error_message['error'])
def on_completed(self):
print("All tests done.")
|
Fix KeyError on Terminal integration
|
Fix KeyError on Terminal integration
|
Python
|
mit
|
angelsanz/pysellus,cgvarela/pysellus,ergl/pysellus,Pysellus/pysellus
|
import pprint
from pysellus.interfaces import AbstractIntegration
class TerminalIntegration(AbstractIntegration):
def on_next(self, message):
print('Assert error: in {test_name} -> {1}'.format(
message['test_name'],
message['expect_function']
))
print('Got:')
pprint.pprint(message['element'])
def on_error(self, error_message):
print('Runtime Error: In {0} -> {1}'.format(
error_message['test_name'],
error_message['expect_function']
))
print('Got:')
pprint.pprint(error_message['error'])
def on_completed(self):
print("All tests done.")
Fix KeyError on Terminal integration
|
import pprint
from pysellus.interfaces import AbstractIntegration
class TerminalIntegration(AbstractIntegration):
def on_next(self, message):
print('Assert error: in {0} -> {1}'.format(
message['test_name'],
message['expect_function']
))
print('Got:')
pprint.pprint(message['element'])
def on_error(self, error_message):
print('Runtime Error: In {0} -> {1}'.format(
error_message['test_name'],
error_message['expect_function']
))
print('Got:')
pprint.pprint(error_message['error'])
def on_completed(self):
print("All tests done.")
|
<commit_before>import pprint
from pysellus.interfaces import AbstractIntegration
class TerminalIntegration(AbstractIntegration):
def on_next(self, message):
print('Assert error: in {test_name} -> {1}'.format(
message['test_name'],
message['expect_function']
))
print('Got:')
pprint.pprint(message['element'])
def on_error(self, error_message):
print('Runtime Error: In {0} -> {1}'.format(
error_message['test_name'],
error_message['expect_function']
))
print('Got:')
pprint.pprint(error_message['error'])
def on_completed(self):
print("All tests done.")
<commit_msg>Fix KeyError on Terminal integration<commit_after>
|
import pprint
from pysellus.interfaces import AbstractIntegration
class TerminalIntegration(AbstractIntegration):
def on_next(self, message):
print('Assert error: in {0} -> {1}'.format(
message['test_name'],
message['expect_function']
))
print('Got:')
pprint.pprint(message['element'])
def on_error(self, error_message):
print('Runtime Error: In {0} -> {1}'.format(
error_message['test_name'],
error_message['expect_function']
))
print('Got:')
pprint.pprint(error_message['error'])
def on_completed(self):
print("All tests done.")
|
import pprint
from pysellus.interfaces import AbstractIntegration
class TerminalIntegration(AbstractIntegration):
def on_next(self, message):
print('Assert error: in {test_name} -> {1}'.format(
message['test_name'],
message['expect_function']
))
print('Got:')
pprint.pprint(message['element'])
def on_error(self, error_message):
print('Runtime Error: In {0} -> {1}'.format(
error_message['test_name'],
error_message['expect_function']
))
print('Got:')
pprint.pprint(error_message['error'])
def on_completed(self):
print("All tests done.")
Fix KeyError on Terminal integrationimport pprint
from pysellus.interfaces import AbstractIntegration
class TerminalIntegration(AbstractIntegration):
def on_next(self, message):
print('Assert error: in {0} -> {1}'.format(
message['test_name'],
message['expect_function']
))
print('Got:')
pprint.pprint(message['element'])
def on_error(self, error_message):
print('Runtime Error: In {0} -> {1}'.format(
error_message['test_name'],
error_message['expect_function']
))
print('Got:')
pprint.pprint(error_message['error'])
def on_completed(self):
print("All tests done.")
|
<commit_before>import pprint
from pysellus.interfaces import AbstractIntegration
class TerminalIntegration(AbstractIntegration):
def on_next(self, message):
print('Assert error: in {test_name} -> {1}'.format(
message['test_name'],
message['expect_function']
))
print('Got:')
pprint.pprint(message['element'])
def on_error(self, error_message):
print('Runtime Error: In {0} -> {1}'.format(
error_message['test_name'],
error_message['expect_function']
))
print('Got:')
pprint.pprint(error_message['error'])
def on_completed(self):
print("All tests done.")
<commit_msg>Fix KeyError on Terminal integration<commit_after>import pprint
from pysellus.interfaces import AbstractIntegration
class TerminalIntegration(AbstractIntegration):
def on_next(self, message):
print('Assert error: in {0} -> {1}'.format(
message['test_name'],
message['expect_function']
))
print('Got:')
pprint.pprint(message['element'])
def on_error(self, error_message):
print('Runtime Error: In {0} -> {1}'.format(
error_message['test_name'],
error_message['expect_function']
))
print('Got:')
pprint.pprint(error_message['error'])
def on_completed(self):
print("All tests done.")
|
8866de1785cc6961d2111f1e0f55b781a7de660d
|
_markerlib/__init__.py
|
_markerlib/__init__.py
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret, as_function
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret
|
Remove missing import (since b62968cd2666)
|
Remove missing import (since b62968cd2666)
--HG--
branch : distribute
extra : rebase_source : d1190f895d794dfcb838f7eb40a60ab07b8b309e
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret, as_function
Remove missing import (since b62968cd2666)
--HG--
branch : distribute
extra : rebase_source : d1190f895d794dfcb838f7eb40a60ab07b8b309e
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret
|
<commit_before>"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret, as_function
<commit_msg>Remove missing import (since b62968cd2666)
--HG--
branch : distribute
extra : rebase_source : d1190f895d794dfcb838f7eb40a60ab07b8b309e<commit_after>
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret
|
"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret, as_function
Remove missing import (since b62968cd2666)
--HG--
branch : distribute
extra : rebase_source : d1190f895d794dfcb838f7eb40a60ab07b8b309e"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret
|
<commit_before>"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret, as_function
<commit_msg>Remove missing import (since b62968cd2666)
--HG--
branch : distribute
extra : rebase_source : d1190f895d794dfcb838f7eb40a60ab07b8b309e<commit_after>"""Used by pkg_resources to interpret PEP 345 environment markers."""
from _markerlib.markers import default_environment, compile, interpret
|
c45c86fb573bff8fc4c6470f5dfc83e27c638aa4
|
base/ajax.py
|
base/ajax.py
|
import json
from django.contrib.auth import get_user_model
from dajaxice.decorators import dajaxice_register
@dajaxice_register(method="GET")
def getuser(request, query):
User = get_user_model()
qs = User.objects.filter(username__icontains=query) |\
User.objects.filter(first_name__icontains=query) |\
User.objects.filter(last_name__icontains=query)
result = [
{'username': u.username,
'label': '%s (%s)' % (u.get_full_name(), u.username)}
for u in qs.distinct()]
return json.dumps(result)
|
import json
from django.contrib.auth import get_user_model
from dajaxice.decorators import dajaxice_register
@dajaxice_register(method="GET")
def getuser(request, query):
User = get_user_model()
qs = User.objects.filter(username__icontains=query) |\
User.objects.filter(first_name__icontains=query) |\
User.objects.filter(last_name__icontains=query)
qs = qs.exclude(id=request.user.id).exclude(id=-1)
result = [
{'username': u.username,
'label': '%s (%s)' % (u.get_full_name(), u.username)}
for u in qs.distinct()]
return json.dumps(result)
|
Exclude self and anonymous from message recipients in autocomplete.
|
Exclude self and anonymous from message recipients in autocomplete.
|
Python
|
bsd-3-clause
|
ugoertz/django-familio,ugoertz/django-familio,ugoertz/django-familio,ugoertz/django-familio
|
import json
from django.contrib.auth import get_user_model
from dajaxice.decorators import dajaxice_register
@dajaxice_register(method="GET")
def getuser(request, query):
User = get_user_model()
qs = User.objects.filter(username__icontains=query) |\
User.objects.filter(first_name__icontains=query) |\
User.objects.filter(last_name__icontains=query)
result = [
{'username': u.username,
'label': '%s (%s)' % (u.get_full_name(), u.username)}
for u in qs.distinct()]
return json.dumps(result)
Exclude self and anonymous from message recipients in autocomplete.
|
import json
from django.contrib.auth import get_user_model
from dajaxice.decorators import dajaxice_register
@dajaxice_register(method="GET")
def getuser(request, query):
User = get_user_model()
qs = User.objects.filter(username__icontains=query) |\
User.objects.filter(first_name__icontains=query) |\
User.objects.filter(last_name__icontains=query)
qs = qs.exclude(id=request.user.id).exclude(id=-1)
result = [
{'username': u.username,
'label': '%s (%s)' % (u.get_full_name(), u.username)}
for u in qs.distinct()]
return json.dumps(result)
|
<commit_before>import json
from django.contrib.auth import get_user_model
from dajaxice.decorators import dajaxice_register
@dajaxice_register(method="GET")
def getuser(request, query):
User = get_user_model()
qs = User.objects.filter(username__icontains=query) |\
User.objects.filter(first_name__icontains=query) |\
User.objects.filter(last_name__icontains=query)
result = [
{'username': u.username,
'label': '%s (%s)' % (u.get_full_name(), u.username)}
for u in qs.distinct()]
return json.dumps(result)
<commit_msg>Exclude self and anonymous from message recipients in autocomplete.<commit_after>
|
import json
from django.contrib.auth import get_user_model
from dajaxice.decorators import dajaxice_register
@dajaxice_register(method="GET")
def getuser(request, query):
User = get_user_model()
qs = User.objects.filter(username__icontains=query) |\
User.objects.filter(first_name__icontains=query) |\
User.objects.filter(last_name__icontains=query)
qs = qs.exclude(id=request.user.id).exclude(id=-1)
result = [
{'username': u.username,
'label': '%s (%s)' % (u.get_full_name(), u.username)}
for u in qs.distinct()]
return json.dumps(result)
|
import json
from django.contrib.auth import get_user_model
from dajaxice.decorators import dajaxice_register
@dajaxice_register(method="GET")
def getuser(request, query):
User = get_user_model()
qs = User.objects.filter(username__icontains=query) |\
User.objects.filter(first_name__icontains=query) |\
User.objects.filter(last_name__icontains=query)
result = [
{'username': u.username,
'label': '%s (%s)' % (u.get_full_name(), u.username)}
for u in qs.distinct()]
return json.dumps(result)
Exclude self and anonymous from message recipients in autocomplete.import json
from django.contrib.auth import get_user_model
from dajaxice.decorators import dajaxice_register
@dajaxice_register(method="GET")
def getuser(request, query):
User = get_user_model()
qs = User.objects.filter(username__icontains=query) |\
User.objects.filter(first_name__icontains=query) |\
User.objects.filter(last_name__icontains=query)
qs = qs.exclude(id=request.user.id).exclude(id=-1)
result = [
{'username': u.username,
'label': '%s (%s)' % (u.get_full_name(), u.username)}
for u in qs.distinct()]
return json.dumps(result)
|
<commit_before>import json
from django.contrib.auth import get_user_model
from dajaxice.decorators import dajaxice_register
@dajaxice_register(method="GET")
def getuser(request, query):
User = get_user_model()
qs = User.objects.filter(username__icontains=query) |\
User.objects.filter(first_name__icontains=query) |\
User.objects.filter(last_name__icontains=query)
result = [
{'username': u.username,
'label': '%s (%s)' % (u.get_full_name(), u.username)}
for u in qs.distinct()]
return json.dumps(result)
<commit_msg>Exclude self and anonymous from message recipients in autocomplete.<commit_after>import json
from django.contrib.auth import get_user_model
from dajaxice.decorators import dajaxice_register
@dajaxice_register(method="GET")
def getuser(request, query):
User = get_user_model()
qs = User.objects.filter(username__icontains=query) |\
User.objects.filter(first_name__icontains=query) |\
User.objects.filter(last_name__icontains=query)
qs = qs.exclude(id=request.user.id).exclude(id=-1)
result = [
{'username': u.username,
'label': '%s (%s)' % (u.get_full_name(), u.username)}
for u in qs.distinct()]
return json.dumps(result)
|
baf81686e7d7d681b3e5354f0ddaafc83b91a20c
|
examples/pypi.py
|
examples/pypi.py
|
#!/usr/bin/env python
# Search PyPI, the Python Package Index, and retrieve latest mechanize tarball.
# This is just to demonstrate mechanize: You should use easy_install to do
# this, not this silly script.
import sys
import os
import urlparse
import mechanize
def download_mechanize():
browser = mechanize.Browser(factory=mechanize.RobustFactory())
browser.set_handle_robots(False)
browser.open("http://pypi.python.org/pypi")
browser.follow_link(text="Package Index", nr=0)
browser.select_form(name="searchform")
browser.form["term"] = "mechanize"
browser.submit()
browser.follow_link(text_regex="mechanize-?(.*)")
link = browser.find_link(text_regex=r"\.tar\.gz")
filename = os.path.basename(urlparse.urlsplit(link.url).path)
if os.path.exists(filename):
sys.exit("%s already exists, not grabbing" % filename)
browser.retrieve(link.url, filename)
if __name__ == "__main__":
download_mechanize()
|
#!/usr/bin/env python
# Search PyPI, the Python Package Index, and retrieve latest mechanize tarball.
# This is just to demonstrate mechanize: You should use easy_install to do
# this, not this silly script.
import sys
import os
import urlparse
import mechanize
def download_mechanize():
browser = mechanize.Browser(factory=mechanize.RobustFactory())
browser.set_handle_robots(False)
browser.open("http://pypi.python.org/pypi")
browser.follow_link(text="Package Index", nr=0)
browser.select_form(name="searchform")
browser.form["term"] = "mechanize"
browser.submit()
browser.follow_link(text_regex="mechanize-?(.*)")
link = browser.find_link(text_regex=r"\.tar\.gz")
filename = os.path.basename(urlparse.urlsplit(link.url)[2])
if os.path.exists(filename):
sys.exit("%s already exists, not grabbing" % filename)
browser.retrieve(link.url, filename)
if __name__ == "__main__":
download_mechanize()
|
Fix a Python 2.4 incompatibility
|
Fix a Python 2.4 incompatibility
|
Python
|
bsd-3-clause
|
python-mechanize/mechanize,python-mechanize/mechanize
|
#!/usr/bin/env python
# Search PyPI, the Python Package Index, and retrieve latest mechanize tarball.
# This is just to demonstrate mechanize: You should use easy_install to do
# this, not this silly script.
import sys
import os
import urlparse
import mechanize
def download_mechanize():
browser = mechanize.Browser(factory=mechanize.RobustFactory())
browser.set_handle_robots(False)
browser.open("http://pypi.python.org/pypi")
browser.follow_link(text="Package Index", nr=0)
browser.select_form(name="searchform")
browser.form["term"] = "mechanize"
browser.submit()
browser.follow_link(text_regex="mechanize-?(.*)")
link = browser.find_link(text_regex=r"\.tar\.gz")
filename = os.path.basename(urlparse.urlsplit(link.url).path)
if os.path.exists(filename):
sys.exit("%s already exists, not grabbing" % filename)
browser.retrieve(link.url, filename)
if __name__ == "__main__":
download_mechanize()
Fix a Python 2.4 incompatibility
|
#!/usr/bin/env python
# Search PyPI, the Python Package Index, and retrieve latest mechanize tarball.
# This is just to demonstrate mechanize: You should use easy_install to do
# this, not this silly script.
import sys
import os
import urlparse
import mechanize
def download_mechanize():
browser = mechanize.Browser(factory=mechanize.RobustFactory())
browser.set_handle_robots(False)
browser.open("http://pypi.python.org/pypi")
browser.follow_link(text="Package Index", nr=0)
browser.select_form(name="searchform")
browser.form["term"] = "mechanize"
browser.submit()
browser.follow_link(text_regex="mechanize-?(.*)")
link = browser.find_link(text_regex=r"\.tar\.gz")
filename = os.path.basename(urlparse.urlsplit(link.url)[2])
if os.path.exists(filename):
sys.exit("%s already exists, not grabbing" % filename)
browser.retrieve(link.url, filename)
if __name__ == "__main__":
download_mechanize()
|
<commit_before>#!/usr/bin/env python
# Search PyPI, the Python Package Index, and retrieve latest mechanize tarball.
# This is just to demonstrate mechanize: You should use easy_install to do
# this, not this silly script.
import sys
import os
import urlparse
import mechanize
def download_mechanize():
browser = mechanize.Browser(factory=mechanize.RobustFactory())
browser.set_handle_robots(False)
browser.open("http://pypi.python.org/pypi")
browser.follow_link(text="Package Index", nr=0)
browser.select_form(name="searchform")
browser.form["term"] = "mechanize"
browser.submit()
browser.follow_link(text_regex="mechanize-?(.*)")
link = browser.find_link(text_regex=r"\.tar\.gz")
filename = os.path.basename(urlparse.urlsplit(link.url).path)
if os.path.exists(filename):
sys.exit("%s already exists, not grabbing" % filename)
browser.retrieve(link.url, filename)
if __name__ == "__main__":
download_mechanize()
<commit_msg>Fix a Python 2.4 incompatibility<commit_after>
|
#!/usr/bin/env python
# Search PyPI, the Python Package Index, and retrieve latest mechanize tarball.
# This is just to demonstrate mechanize: You should use easy_install to do
# this, not this silly script.
import sys
import os
import urlparse
import mechanize
def download_mechanize():
browser = mechanize.Browser(factory=mechanize.RobustFactory())
browser.set_handle_robots(False)
browser.open("http://pypi.python.org/pypi")
browser.follow_link(text="Package Index", nr=0)
browser.select_form(name="searchform")
browser.form["term"] = "mechanize"
browser.submit()
browser.follow_link(text_regex="mechanize-?(.*)")
link = browser.find_link(text_regex=r"\.tar\.gz")
filename = os.path.basename(urlparse.urlsplit(link.url)[2])
if os.path.exists(filename):
sys.exit("%s already exists, not grabbing" % filename)
browser.retrieve(link.url, filename)
if __name__ == "__main__":
download_mechanize()
|
#!/usr/bin/env python
# Search PyPI, the Python Package Index, and retrieve latest mechanize tarball.
# This is just to demonstrate mechanize: You should use easy_install to do
# this, not this silly script.
import sys
import os
import urlparse
import mechanize
def download_mechanize():
browser = mechanize.Browser(factory=mechanize.RobustFactory())
browser.set_handle_robots(False)
browser.open("http://pypi.python.org/pypi")
browser.follow_link(text="Package Index", nr=0)
browser.select_form(name="searchform")
browser.form["term"] = "mechanize"
browser.submit()
browser.follow_link(text_regex="mechanize-?(.*)")
link = browser.find_link(text_regex=r"\.tar\.gz")
filename = os.path.basename(urlparse.urlsplit(link.url).path)
if os.path.exists(filename):
sys.exit("%s already exists, not grabbing" % filename)
browser.retrieve(link.url, filename)
if __name__ == "__main__":
download_mechanize()
Fix a Python 2.4 incompatibility#!/usr/bin/env python
# Search PyPI, the Python Package Index, and retrieve latest mechanize tarball.
# This is just to demonstrate mechanize: You should use easy_install to do
# this, not this silly script.
import sys
import os
import urlparse
import mechanize
def download_mechanize():
browser = mechanize.Browser(factory=mechanize.RobustFactory())
browser.set_handle_robots(False)
browser.open("http://pypi.python.org/pypi")
browser.follow_link(text="Package Index", nr=0)
browser.select_form(name="searchform")
browser.form["term"] = "mechanize"
browser.submit()
browser.follow_link(text_regex="mechanize-?(.*)")
link = browser.find_link(text_regex=r"\.tar\.gz")
filename = os.path.basename(urlparse.urlsplit(link.url)[2])
if os.path.exists(filename):
sys.exit("%s already exists, not grabbing" % filename)
browser.retrieve(link.url, filename)
if __name__ == "__main__":
download_mechanize()
|
<commit_before>#!/usr/bin/env python
# Search PyPI, the Python Package Index, and retrieve latest mechanize tarball.
# This is just to demonstrate mechanize: You should use easy_install to do
# this, not this silly script.
import sys
import os
import urlparse
import mechanize
def download_mechanize():
browser = mechanize.Browser(factory=mechanize.RobustFactory())
browser.set_handle_robots(False)
browser.open("http://pypi.python.org/pypi")
browser.follow_link(text="Package Index", nr=0)
browser.select_form(name="searchform")
browser.form["term"] = "mechanize"
browser.submit()
browser.follow_link(text_regex="mechanize-?(.*)")
link = browser.find_link(text_regex=r"\.tar\.gz")
filename = os.path.basename(urlparse.urlsplit(link.url).path)
if os.path.exists(filename):
sys.exit("%s already exists, not grabbing" % filename)
browser.retrieve(link.url, filename)
if __name__ == "__main__":
download_mechanize()
<commit_msg>Fix a Python 2.4 incompatibility<commit_after>#!/usr/bin/env python
# Search PyPI, the Python Package Index, and retrieve latest mechanize tarball.
# This is just to demonstrate mechanize: You should use easy_install to do
# this, not this silly script.
import sys
import os
import urlparse
import mechanize
def download_mechanize():
browser = mechanize.Browser(factory=mechanize.RobustFactory())
browser.set_handle_robots(False)
browser.open("http://pypi.python.org/pypi")
browser.follow_link(text="Package Index", nr=0)
browser.select_form(name="searchform")
browser.form["term"] = "mechanize"
browser.submit()
browser.follow_link(text_regex="mechanize-?(.*)")
link = browser.find_link(text_regex=r"\.tar\.gz")
filename = os.path.basename(urlparse.urlsplit(link.url)[2])
if os.path.exists(filename):
sys.exit("%s already exists, not grabbing" % filename)
browser.retrieve(link.url, filename)
if __name__ == "__main__":
download_mechanize()
|
47e21119621f211b0cac47972f5ad7ca92ffd950
|
flaskext/urls.py
|
flaskext/urls.py
|
# -*- coding: utf-8 -*-
"""
flaskext.urls
~~~~~~~~~~~~~
A collection of URL-related functions for Flask applications.
:copyright: (c) 2010 by Steve Losh.
:license: MIT, see LICENSE for more details.
"""
|
# -*- coding: utf-8 -*-
"""
flaskext.urls
~~~~~~~~~~~~~
A collection of URL-related functions for Flask applications.
:copyright: (c) 2010 by Steve Losh.
:license: MIT, see LICENSE for more details.
"""
from flask import url_for
from werkzeug.routing import BuildError
def permalink(function):
def inner(*args, **kwargs):
endpoint, values = function(*args, **kwargs)
try:
return url_for(endpoint, **values)
except BuildError:
return
return inner
|
Add the actual functionality, lol.
|
Add the actual functionality, lol.
|
Python
|
mit
|
sjl/flask-urls,sjl/flask-urls
|
# -*- coding: utf-8 -*-
"""
flaskext.urls
~~~~~~~~~~~~~
A collection of URL-related functions for Flask applications.
:copyright: (c) 2010 by Steve Losh.
:license: MIT, see LICENSE for more details.
"""
Add the actual functionality, lol.
|
# -*- coding: utf-8 -*-
"""
flaskext.urls
~~~~~~~~~~~~~
A collection of URL-related functions for Flask applications.
:copyright: (c) 2010 by Steve Losh.
:license: MIT, see LICENSE for more details.
"""
from flask import url_for
from werkzeug.routing import BuildError
def permalink(function):
def inner(*args, **kwargs):
endpoint, values = function(*args, **kwargs)
try:
return url_for(endpoint, **values)
except BuildError:
return
return inner
|
<commit_before># -*- coding: utf-8 -*-
"""
flaskext.urls
~~~~~~~~~~~~~
A collection of URL-related functions for Flask applications.
:copyright: (c) 2010 by Steve Losh.
:license: MIT, see LICENSE for more details.
"""
<commit_msg>Add the actual functionality, lol.<commit_after>
|
# -*- coding: utf-8 -*-
"""
flaskext.urls
~~~~~~~~~~~~~
A collection of URL-related functions for Flask applications.
:copyright: (c) 2010 by Steve Losh.
:license: MIT, see LICENSE for more details.
"""
from flask import url_for
from werkzeug.routing import BuildError
def permalink(function):
def inner(*args, **kwargs):
endpoint, values = function(*args, **kwargs)
try:
return url_for(endpoint, **values)
except BuildError:
return
return inner
|
# -*- coding: utf-8 -*-
"""
flaskext.urls
~~~~~~~~~~~~~
A collection of URL-related functions for Flask applications.
:copyright: (c) 2010 by Steve Losh.
:license: MIT, see LICENSE for more details.
"""
Add the actual functionality, lol.# -*- coding: utf-8 -*-
"""
flaskext.urls
~~~~~~~~~~~~~
A collection of URL-related functions for Flask applications.
:copyright: (c) 2010 by Steve Losh.
:license: MIT, see LICENSE for more details.
"""
from flask import url_for
from werkzeug.routing import BuildError
def permalink(function):
def inner(*args, **kwargs):
endpoint, values = function(*args, **kwargs)
try:
return url_for(endpoint, **values)
except BuildError:
return
return inner
|
<commit_before># -*- coding: utf-8 -*-
"""
flaskext.urls
~~~~~~~~~~~~~
A collection of URL-related functions for Flask applications.
:copyright: (c) 2010 by Steve Losh.
:license: MIT, see LICENSE for more details.
"""
<commit_msg>Add the actual functionality, lol.<commit_after># -*- coding: utf-8 -*-
"""
flaskext.urls
~~~~~~~~~~~~~
A collection of URL-related functions for Flask applications.
:copyright: (c) 2010 by Steve Losh.
:license: MIT, see LICENSE for more details.
"""
from flask import url_for
from werkzeug.routing import BuildError
def permalink(function):
def inner(*args, **kwargs):
endpoint, values = function(*args, **kwargs)
try:
return url_for(endpoint, **values)
except BuildError:
return
return inner
|
a24b2b303c1cd5e9f43353d55cc6b9d07b37b7f4
|
ephemeral-cluster.py
|
ephemeral-cluster.py
|
#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and images after command completion (regardless of success or failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Starting ephemeral cluster: {0}\n'.format(project))
try:
sys.exit(subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:]))
finally:
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
|
#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
|
Fix forwarding ephemeral cluster exit code.
|
Fix forwarding ephemeral cluster exit code.
Summary: Also improves logging a little bit.
Test Plan:
$ python ephemeral-cluster.py run --rm --entrypoint=bash pgshovel -c "exit 10"
$ test $? -eq 10
Reviewers: jeff, tail
Reviewed By: tail
Differential Revision: http://phabricator.local.disqus.net/D19564
|
Python
|
apache-2.0
|
fuziontech/pgshovel,disqus/pgshovel,fuziontech/pgshovel,fuziontech/pgshovel,disqus/pgshovel
|
#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and images after command completion (regardless of success or failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Starting ephemeral cluster: {0}\n'.format(project))
try:
sys.exit(subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:]))
finally:
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
Fix forwarding ephemeral cluster exit code.
Summary: Also improves logging a little bit.
Test Plan:
$ python ephemeral-cluster.py run --rm --entrypoint=bash pgshovel -c "exit 10"
$ test $? -eq 10
Reviewers: jeff, tail
Reviewed By: tail
Differential Revision: http://phabricator.local.disqus.net/D19564
|
#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
|
<commit_before>#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and images after command completion (regardless of success or failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Starting ephemeral cluster: {0}\n'.format(project))
try:
sys.exit(subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:]))
finally:
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
<commit_msg>Fix forwarding ephemeral cluster exit code.
Summary: Also improves logging a little bit.
Test Plan:
$ python ephemeral-cluster.py run --rm --entrypoint=bash pgshovel -c "exit 10"
$ test $? -eq 10
Reviewers: jeff, tail
Reviewed By: tail
Differential Revision: http://phabricator.local.disqus.net/D19564<commit_after>
|
#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
|
#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and images after command completion (regardless of success or failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Starting ephemeral cluster: {0}\n'.format(project))
try:
sys.exit(subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:]))
finally:
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
Fix forwarding ephemeral cluster exit code.
Summary: Also improves logging a little bit.
Test Plan:
$ python ephemeral-cluster.py run --rm --entrypoint=bash pgshovel -c "exit 10"
$ test $? -eq 10
Reviewers: jeff, tail
Reviewed By: tail
Differential Revision: http://phabricator.local.disqus.net/D19564#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
|
<commit_before>#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and images after command completion (regardless of success or failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Starting ephemeral cluster: {0}\n'.format(project))
try:
sys.exit(subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:]))
finally:
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
<commit_msg>Fix forwarding ephemeral cluster exit code.
Summary: Also improves logging a little bit.
Test Plan:
$ python ephemeral-cluster.py run --rm --entrypoint=bash pgshovel -c "exit 10"
$ test $? -eq 10
Reviewers: jeff, tail
Reviewed By: tail
Differential Revision: http://phabricator.local.disqus.net/D19564<commit_after>#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
|
ded80de3c276b57cd36d94ab393937289f772a25
|
django_prometheus/db/backends/postgresql/base.py
|
django_prometheus/db/backends/postgresql/base.py
|
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
return base.DatabaseWrapper.create_cursor(self, name=name)
|
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
if django.VERSION >= (1, 11, 0):
return base.DatabaseWrapper.create_cursor(self, name=name)
else:
return base.DatabaseWrapper.create_cursor(self)
|
Fix backwards compatibility for postgresql backend on Django 1.10 and earlier
|
Fix backwards compatibility for postgresql backend on Django 1.10 and earlier
|
Python
|
apache-2.0
|
korfuri/django-prometheus,obytes/django-prometheus,korfuri/django-prometheus,obytes/django-prometheus
|
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
return base.DatabaseWrapper.create_cursor(self, name=name)
Fix backwards compatibility for postgresql backend on Django 1.10 and earlier
|
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
if django.VERSION >= (1, 11, 0):
return base.DatabaseWrapper.create_cursor(self, name=name)
else:
return base.DatabaseWrapper.create_cursor(self)
|
<commit_before>import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
return base.DatabaseWrapper.create_cursor(self, name=name)
<commit_msg>Fix backwards compatibility for postgresql backend on Django 1.10 and earlier<commit_after>
|
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
if django.VERSION >= (1, 11, 0):
return base.DatabaseWrapper.create_cursor(self, name=name)
else:
return base.DatabaseWrapper.create_cursor(self)
|
import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
return base.DatabaseWrapper.create_cursor(self, name=name)
Fix backwards compatibility for postgresql backend on Django 1.10 and earlierimport django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
if django.VERSION >= (1, 11, 0):
return base.DatabaseWrapper.create_cursor(self, name=name)
else:
return base.DatabaseWrapper.create_cursor(self)
|
<commit_before>import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
return base.DatabaseWrapper.create_cursor(self, name=name)
<commit_msg>Fix backwards compatibility for postgresql backend on Django 1.10 and earlier<commit_after>import django
import psycopg2.extensions
from django_prometheus.db.common import DatabaseWrapperMixin, \
ExportingCursorWrapper
if django.VERSION >= (1, 9):
from django.db.backends.postgresql import base
else:
from django.db.backends.postgresql_psycopg2 import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
def get_connection_params(self):
conn_params = super(DatabaseWrapper, self).get_connection_params()
conn_params['cursor_factory'] = ExportingCursorWrapper(
psycopg2.extensions.cursor,
self.alias,
self.vendor,
)
return conn_params
def create_cursor(self, name=None):
# cursor_factory is a kwarg to connect() so restore create_cursor()'s
# default behavior
if django.VERSION >= (1, 11, 0):
return base.DatabaseWrapper.create_cursor(self, name=name)
else:
return base.DatabaseWrapper.create_cursor(self)
|
128506f0e21bff78ab3612602b17eb13658e837d
|
utils/clear_redis.py
|
utils/clear_redis.py
|
"""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys. "
"Press Y to confirm, N to exit: ")
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
|
"""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys in db %d. "
"Press Y to confirm, N to exit: " %
(options.db,))
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
|
Print which DB will be cleared.
|
Print which DB will be cleared.
|
Python
|
bsd-3-clause
|
vishwaprakashmishra/xmatrix,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,harrissoerja/vumi,TouK/vumi,TouK/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix
|
"""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys. "
"Press Y to confirm, N to exit: ")
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
Print which DB will be cleared.
|
"""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys in db %d. "
"Press Y to confirm, N to exit: " %
(options.db,))
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
|
<commit_before>"""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys. "
"Press Y to confirm, N to exit: ")
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Print which DB will be cleared.<commit_after>
|
"""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys in db %d. "
"Press Y to confirm, N to exit: " %
(options.db,))
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
|
"""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys. "
"Press Y to confirm, N to exit: ")
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
Print which DB will be cleared."""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys in db %d. "
"Press Y to confirm, N to exit: " %
(options.db,))
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
|
<commit_before>"""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys. "
"Press Y to confirm, N to exit: ")
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Print which DB will be cleared.<commit_after>"""Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-d", "--db",
type="int", dest="db", default=1,
help="Redis DB to clear.")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys in db %d. "
"Press Y to confirm, N to exit: " %
(options.db,))
if confirm.lower() != 'y':
return 1
r_server = redis.Redis(db=options.db)
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
|
068b7f8886af9c468f20b41efd76817bfc237f69
|
serrano/__init__.py
|
serrano/__init__.py
|
__version_info__ = {
'major': 2,
'minor': 0,
'micro': 19,
'releaselevel': 'beta',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
|
__version_info__ = {
'major': 2,
'minor': 1,
'micro': 0,
'releaselevel': 'beta',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
|
Update branch to version 2.1.0 beta
|
Update branch to version 2.1.0 beta
|
Python
|
bsd-2-clause
|
chop-dbhi/serrano,rv816/serrano_night,rv816/serrano_night,chop-dbhi/serrano
|
__version_info__ = {
'major': 2,
'minor': 0,
'micro': 19,
'releaselevel': 'beta',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
Update branch to version 2.1.0 beta
|
__version_info__ = {
'major': 2,
'minor': 1,
'micro': 0,
'releaselevel': 'beta',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
|
<commit_before>__version_info__ = {
'major': 2,
'minor': 0,
'micro': 19,
'releaselevel': 'beta',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
<commit_msg>Update branch to version 2.1.0 beta<commit_after>
|
__version_info__ = {
'major': 2,
'minor': 1,
'micro': 0,
'releaselevel': 'beta',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
|
__version_info__ = {
'major': 2,
'minor': 0,
'micro': 19,
'releaselevel': 'beta',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
Update branch to version 2.1.0 beta__version_info__ = {
'major': 2,
'minor': 1,
'micro': 0,
'releaselevel': 'beta',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
|
<commit_before>__version_info__ = {
'major': 2,
'minor': 0,
'micro': 19,
'releaselevel': 'beta',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
<commit_msg>Update branch to version 2.1.0 beta<commit_after>__version_info__ = {
'major': 2,
'minor': 1,
'micro': 0,
'releaselevel': 'beta',
'serial': 1
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
|
4bc9d1b51cd735c366edce81cd4e36e2eca904c7
|
worker/models/spotify_artist.py
|
worker/models/spotify_artist.py
|
from spotify_item import SpotifyItem
class Artist(SpotifyItem):
def __init__(self, **entries):
super(Artist, self).__init__(**entries)
def __repr__(self):
return '<Artist: {0}>'.format(self.name)
|
from spotify_item import SpotifyItem
from pyechonest import config
from pyechonest import artist
from worker.config import ECHO_NEST_API_KEY
config.ECHO_NEST_API_KEY = ECHO_NEST_API_KEY
class Artist(SpotifyItem):
def __init__(self, **entries):
super(Artist, self).__init__(**entries)
self.echonest = artist.Artist(self.uri)
def __repr__(self):
return '<Artist: {0}>'.format(self.name)
|
Add echo nest to artist model
|
Add echo nest to artist model
|
Python
|
mit
|
projectweekend/song-feed-worker
|
from spotify_item import SpotifyItem
class Artist(SpotifyItem):
def __init__(self, **entries):
super(Artist, self).__init__(**entries)
def __repr__(self):
return '<Artist: {0}>'.format(self.name)
Add echo nest to artist model
|
from spotify_item import SpotifyItem
from pyechonest import config
from pyechonest import artist
from worker.config import ECHO_NEST_API_KEY
config.ECHO_NEST_API_KEY = ECHO_NEST_API_KEY
class Artist(SpotifyItem):
def __init__(self, **entries):
super(Artist, self).__init__(**entries)
self.echonest = artist.Artist(self.uri)
def __repr__(self):
return '<Artist: {0}>'.format(self.name)
|
<commit_before>from spotify_item import SpotifyItem
class Artist(SpotifyItem):
def __init__(self, **entries):
super(Artist, self).__init__(**entries)
def __repr__(self):
return '<Artist: {0}>'.format(self.name)
<commit_msg>Add echo nest to artist model<commit_after>
|
from spotify_item import SpotifyItem
from pyechonest import config
from pyechonest import artist
from worker.config import ECHO_NEST_API_KEY
config.ECHO_NEST_API_KEY = ECHO_NEST_API_KEY
class Artist(SpotifyItem):
def __init__(self, **entries):
super(Artist, self).__init__(**entries)
self.echonest = artist.Artist(self.uri)
def __repr__(self):
return '<Artist: {0}>'.format(self.name)
|
from spotify_item import SpotifyItem
class Artist(SpotifyItem):
def __init__(self, **entries):
super(Artist, self).__init__(**entries)
def __repr__(self):
return '<Artist: {0}>'.format(self.name)
Add echo nest to artist modelfrom spotify_item import SpotifyItem
from pyechonest import config
from pyechonest import artist
from worker.config import ECHO_NEST_API_KEY
config.ECHO_NEST_API_KEY = ECHO_NEST_API_KEY
class Artist(SpotifyItem):
def __init__(self, **entries):
super(Artist, self).__init__(**entries)
self.echonest = artist.Artist(self.uri)
def __repr__(self):
return '<Artist: {0}>'.format(self.name)
|
<commit_before>from spotify_item import SpotifyItem
class Artist(SpotifyItem):
def __init__(self, **entries):
super(Artist, self).__init__(**entries)
def __repr__(self):
return '<Artist: {0}>'.format(self.name)
<commit_msg>Add echo nest to artist model<commit_after>from spotify_item import SpotifyItem
from pyechonest import config
from pyechonest import artist
from worker.config import ECHO_NEST_API_KEY
config.ECHO_NEST_API_KEY = ECHO_NEST_API_KEY
class Artist(SpotifyItem):
def __init__(self, **entries):
super(Artist, self).__init__(**entries)
self.echonest = artist.Artist(self.uri)
def __repr__(self):
return '<Artist: {0}>'.format(self.name)
|
9a04a0506f9ebc0d4cca5fa7a77d4b86f88ffea4
|
src/tests/test_PolicyIterationModified.py
|
src/tests/test_PolicyIterationModified.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 13:33:16 2013
@author: steve
"""
from nose.tools import assert_equal
import numpy as np
import scipy.sparse as sp
from mdptoolbox import mdp
from utils import BaseTestMDP, assert_sequence_almost_equal
class TestPolicyIterationModified(BaseTestMDP):
def test_small(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 0.9)
pim.run()
assert_sequence_almost_equal(pim.V,
(41.8656419239403, 35.4702797722819))
assert_equal(pim.policy, (1, 0))
def test_small_undiscounted(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 1)
pim.run()
assert_equal(pim.policy, (1, 0))
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 13:33:16 2013
@author: steve
"""
from nose.tools import assert_equal
import numpy as np
import scipy.sparse as sp
from mdptoolbox import mdp
from .utils import BaseTestMDP, assert_sequence_almost_equal
class TestPolicyIterationModified(BaseTestMDP):
def test_small(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 0.9)
pim.run()
assert_sequence_almost_equal(pim.V,
(41.8656419239403, 35.4702797722819))
assert_equal(pim.policy, (1, 0))
def test_small_undiscounted(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 1)
pim.run()
assert_equal(pim.policy, (1, 0))
|
Use Python 3 .import syntax
|
[tests] Use Python 3 .import syntax
|
Python
|
bsd-3-clause
|
silgon/pymdptoolbox,yasserglez/pymdptoolbox,silgon/pymdptoolbox,sawcordwell/pymdptoolbox,McCabeJM/pymdptoolbox,sawcordwell/pymdptoolbox,McCabeJM/pymdptoolbox,yasserglez/pymdptoolbox
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 13:33:16 2013
@author: steve
"""
from nose.tools import assert_equal
import numpy as np
import scipy.sparse as sp
from mdptoolbox import mdp
from utils import BaseTestMDP, assert_sequence_almost_equal
class TestPolicyIterationModified(BaseTestMDP):
def test_small(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 0.9)
pim.run()
assert_sequence_almost_equal(pim.V,
(41.8656419239403, 35.4702797722819))
assert_equal(pim.policy, (1, 0))
def test_small_undiscounted(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 1)
pim.run()
assert_equal(pim.policy, (1, 0))
[tests] Use Python 3 .import syntax
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 13:33:16 2013
@author: steve
"""
from nose.tools import assert_equal
import numpy as np
import scipy.sparse as sp
from mdptoolbox import mdp
from .utils import BaseTestMDP, assert_sequence_almost_equal
class TestPolicyIterationModified(BaseTestMDP):
def test_small(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 0.9)
pim.run()
assert_sequence_almost_equal(pim.V,
(41.8656419239403, 35.4702797722819))
assert_equal(pim.policy, (1, 0))
def test_small_undiscounted(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 1)
pim.run()
assert_equal(pim.policy, (1, 0))
|
<commit_before># -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 13:33:16 2013
@author: steve
"""
from nose.tools import assert_equal
import numpy as np
import scipy.sparse as sp
from mdptoolbox import mdp
from utils import BaseTestMDP, assert_sequence_almost_equal
class TestPolicyIterationModified(BaseTestMDP):
def test_small(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 0.9)
pim.run()
assert_sequence_almost_equal(pim.V,
(41.8656419239403, 35.4702797722819))
assert_equal(pim.policy, (1, 0))
def test_small_undiscounted(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 1)
pim.run()
assert_equal(pim.policy, (1, 0))
<commit_msg>[tests] Use Python 3 .import syntax<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 13:33:16 2013
@author: steve
"""
from nose.tools import assert_equal
import numpy as np
import scipy.sparse as sp
from mdptoolbox import mdp
from .utils import BaseTestMDP, assert_sequence_almost_equal
class TestPolicyIterationModified(BaseTestMDP):
def test_small(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 0.9)
pim.run()
assert_sequence_almost_equal(pim.V,
(41.8656419239403, 35.4702797722819))
assert_equal(pim.policy, (1, 0))
def test_small_undiscounted(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 1)
pim.run()
assert_equal(pim.policy, (1, 0))
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 13:33:16 2013
@author: steve
"""
from nose.tools import assert_equal
import numpy as np
import scipy.sparse as sp
from mdptoolbox import mdp
from utils import BaseTestMDP, assert_sequence_almost_equal
class TestPolicyIterationModified(BaseTestMDP):
def test_small(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 0.9)
pim.run()
assert_sequence_almost_equal(pim.V,
(41.8656419239403, 35.4702797722819))
assert_equal(pim.policy, (1, 0))
def test_small_undiscounted(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 1)
pim.run()
assert_equal(pim.policy, (1, 0))
[tests] Use Python 3 .import syntax# -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 13:33:16 2013
@author: steve
"""
from nose.tools import assert_equal
import numpy as np
import scipy.sparse as sp
from mdptoolbox import mdp
from .utils import BaseTestMDP, assert_sequence_almost_equal
class TestPolicyIterationModified(BaseTestMDP):
def test_small(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 0.9)
pim.run()
assert_sequence_almost_equal(pim.V,
(41.8656419239403, 35.4702797722819))
assert_equal(pim.policy, (1, 0))
def test_small_undiscounted(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 1)
pim.run()
assert_equal(pim.policy, (1, 0))
|
<commit_before># -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 13:33:16 2013
@author: steve
"""
from nose.tools import assert_equal
import numpy as np
import scipy.sparse as sp
from mdptoolbox import mdp
from utils import BaseTestMDP, assert_sequence_almost_equal
class TestPolicyIterationModified(BaseTestMDP):
def test_small(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 0.9)
pim.run()
assert_sequence_almost_equal(pim.V,
(41.8656419239403, 35.4702797722819))
assert_equal(pim.policy, (1, 0))
def test_small_undiscounted(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 1)
pim.run()
assert_equal(pim.policy, (1, 0))
<commit_msg>[tests] Use Python 3 .import syntax<commit_after># -*- coding: utf-8 -*-
"""
Created on Wed Sep 11 13:33:16 2013
@author: steve
"""
from nose.tools import assert_equal
import numpy as np
import scipy.sparse as sp
from mdptoolbox import mdp
from .utils import BaseTestMDP, assert_sequence_almost_equal
class TestPolicyIterationModified(BaseTestMDP):
def test_small(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 0.9)
pim.run()
assert_sequence_almost_equal(pim.V,
(41.8656419239403, 35.4702797722819))
assert_equal(pim.policy, (1, 0))
def test_small_undiscounted(self):
pim = mdp.PolicyIterationModified(self.small_P, self.small_R, 1)
pim.run()
assert_equal(pim.policy, (1, 0))
|
964125fd5871179c51ea24af0a3767ce88431c26
|
modules/bibharvest/lib/oai_repository_config.py
|
modules/bibharvest/lib/oai_repository_config.py
|
## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI repository config"""
from invenio.config import cfg_oai_id_prefix, \
cfg_oai_sample_identifier, \
cfg_oai_identify_description, \
cfg_oai_id_field, \
cfg_oai_set_field, \
cfg_oai_deleted_policy, \
cfg_oai_expire, \
cfg_oai_sleep, \
cfg_oai_load
|
## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI repository config"""
__revision__ = "$Id$"
# pylint: disable-msg=W0611
from invenio.config import cfg_oai_id_prefix, \
cfg_oai_sample_identifier, \
cfg_oai_identify_description, \
cfg_oai_id_field, \
cfg_oai_set_field, \
cfg_oai_deleted_policy, \
cfg_oai_expire, \
cfg_oai_sleep, \
cfg_oai_load
|
Disable W0611 warning, as the imported config variables are exposed to the business logic from here.
|
Disable W0611 warning, as the imported config variables are exposed to
the business logic from here.
|
Python
|
mit
|
tiborsimko/invenio,inveniosoftware/invenio,tiborsimko/invenio,inveniosoftware/invenio
|
## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI repository config"""
from invenio.config import cfg_oai_id_prefix, \
cfg_oai_sample_identifier, \
cfg_oai_identify_description, \
cfg_oai_id_field, \
cfg_oai_set_field, \
cfg_oai_deleted_policy, \
cfg_oai_expire, \
cfg_oai_sleep, \
cfg_oai_load
Disable W0611 warning, as the imported config variables are exposed to
the business logic from here.
|
## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI repository config"""
__revision__ = "$Id$"
# pylint: disable-msg=W0611
from invenio.config import cfg_oai_id_prefix, \
cfg_oai_sample_identifier, \
cfg_oai_identify_description, \
cfg_oai_id_field, \
cfg_oai_set_field, \
cfg_oai_deleted_policy, \
cfg_oai_expire, \
cfg_oai_sleep, \
cfg_oai_load
|
<commit_before>## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI repository config"""
from invenio.config import cfg_oai_id_prefix, \
cfg_oai_sample_identifier, \
cfg_oai_identify_description, \
cfg_oai_id_field, \
cfg_oai_set_field, \
cfg_oai_deleted_policy, \
cfg_oai_expire, \
cfg_oai_sleep, \
cfg_oai_load
<commit_msg>Disable W0611 warning, as the imported config variables are exposed to
the business logic from here.<commit_after>
|
## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI repository config"""
__revision__ = "$Id$"
# pylint: disable-msg=W0611
from invenio.config import cfg_oai_id_prefix, \
cfg_oai_sample_identifier, \
cfg_oai_identify_description, \
cfg_oai_id_field, \
cfg_oai_set_field, \
cfg_oai_deleted_policy, \
cfg_oai_expire, \
cfg_oai_sleep, \
cfg_oai_load
|
## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI repository config"""
from invenio.config import cfg_oai_id_prefix, \
cfg_oai_sample_identifier, \
cfg_oai_identify_description, \
cfg_oai_id_field, \
cfg_oai_set_field, \
cfg_oai_deleted_policy, \
cfg_oai_expire, \
cfg_oai_sleep, \
cfg_oai_load
Disable W0611 warning, as the imported config variables are exposed to
the business logic from here.## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI repository config"""
__revision__ = "$Id$"
# pylint: disable-msg=W0611
from invenio.config import cfg_oai_id_prefix, \
cfg_oai_sample_identifier, \
cfg_oai_identify_description, \
cfg_oai_id_field, \
cfg_oai_set_field, \
cfg_oai_deleted_policy, \
cfg_oai_expire, \
cfg_oai_sleep, \
cfg_oai_load
|
<commit_before>## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI repository config"""
from invenio.config import cfg_oai_id_prefix, \
cfg_oai_sample_identifier, \
cfg_oai_identify_description, \
cfg_oai_id_field, \
cfg_oai_set_field, \
cfg_oai_deleted_policy, \
cfg_oai_expire, \
cfg_oai_sleep, \
cfg_oai_load
<commit_msg>Disable W0611 warning, as the imported config variables are exposed to
the business logic from here.<commit_after>## $Id$
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI repository config"""
__revision__ = "$Id$"
# pylint: disable-msg=W0611
from invenio.config import cfg_oai_id_prefix, \
cfg_oai_sample_identifier, \
cfg_oai_identify_description, \
cfg_oai_id_field, \
cfg_oai_set_field, \
cfg_oai_deleted_policy, \
cfg_oai_expire, \
cfg_oai_sleep, \
cfg_oai_load
|
e4798424b22a38cfca519e5e792644ae7757a4f5
|
api/base/pagination.py
|
api/base/pagination.py
|
from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
url = self.request.build_absolute_uri()
page_number = self.page.paginator.num_pages
return replace_query_param(url, self.page_query_param, page_number)
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('prev', self.get_previous_link()),
('next', self.get_next_link()),
('meta', OrderedDict([
('total', self.page.paginator.count),
]))
])),
])
return Response(response_dict)
|
from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
page_size_query_param = 'page[size]'
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
url = self.request.build_absolute_uri()
page_number = self.page.paginator.num_pages
return replace_query_param(url, self.page_query_param, page_number)
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('prev', self.get_previous_link()),
('next', self.get_next_link()),
('meta', OrderedDict([
('total', self.page.paginator.count),
('per_page', self.page.paginator.per_page),
]))
])),
])
return Response(response_dict)
|
Allow client to customize page size using page[size] query param
|
Allow client to customize page size using page[size] query param
|
Python
|
apache-2.0
|
samchrisinger/osf.io,fabianvf/osf.io,erinspace/osf.io,lyndsysimon/osf.io,samanehsan/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,baylee-d/osf.io,billyhunt/osf.io,erinspace/osf.io,ZobairAlijan/osf.io,mfraezz/osf.io,adlius/osf.io,brandonPurvis/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,jnayak1/osf.io,jinluyuan/osf.io,acshi/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,alexschiller/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,ZobairAlijan/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,jolene-esposito/osf.io,kch8qx/osf.io,ckc6cz/osf.io,doublebits/osf.io,mattclark/osf.io,njantrania/osf.io,cwisecarver/osf.io,zamattiac/osf.io,sloria/osf.io,cslzchen/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,felliott/osf.io,aaxelb/osf.io,doublebits/osf.io,bdyetton/prettychart,sbt9uc/osf.io,amyshi188/osf.io,TomBaxter/osf.io,SSJohns/osf.io,barbour-em/osf.io,cslzchen/osf.io,jmcarp/osf.io,Ghalko/osf.io,arpitar/osf.io,TomHeatwole/osf.io,petermalcolm/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,jmcarp/osf.io,adlius/osf.io,TomHeatwole/osf.io,samanehsan/osf.io,MerlinZhang/osf.io,cldershem/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,Ghalko/osf.io,mattclark/osf.io,kch8qx/osf.io,samanehsan/osf.io,samchrisinger/osf.io,bdyetton/prettychart,ZobairAlijan/osf.io,icereval/osf.io,sbt9uc/osf.io,arpitar/osf.io,wearpants/osf.io,kwierman/osf.io,caneruguz/osf.io,dplorimer/osf,mluo613/osf.io,jmcarp/osf.io,cldershem/osf.io,chrisseto/osf.io,Ghalko/osf.io,bdyetton/prettychart,mattclark/osf.io,rdhyee/osf.io,kch8qx/osf.io,hmoco/osf.io,mluo613/osf.io,DanielSBrown/osf.io,sloria/osf.io,adlius/osf.io,wearpants/osf.io,emetsger/osf.io,kwierman/osf.io,petermalcolm/osf.io,kch8qx/osf.io,barbour-em/osf.io,cslzchen/osf.io,reinaH/osf.io,leb2dg/osf.io,aaxelb/osf.io,cldershem/osf.io,KAsante95/osf.io,doublebits/osf.io,jeffreyliu3230/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,adlius/osf.io,monikagrabowska/osf.io,petermalcolm/osf.io,abought/osf.io,emetsger/osf.io,binoculars/osf.io,danielneis/osf.io,mluo613/osf.io,reinaH/osf.io,njantrania/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,TomHeatwole/osf.io,KAsante95/osf.io,sbt9uc/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,MerlinZhang/osf.io,haoyuchen1992/osf.io,cosenal/osf.io,chrisseto/osf.io,wearpants/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,dplorimer/osf,doublebits/osf.io,dplorimer/osf,GageGaskins/osf.io,jinluyuan/osf.io,asanfilippo7/osf.io,njantrania/osf.io,laurenrevere/osf.io,binoculars/osf.io,samchrisinger/osf.io,billyhunt/osf.io,aaxelb/osf.io,brandonPurvis/osf.io,crcresearch/osf.io,ticklemepierce/osf.io,jeffreyliu3230/osf.io,Nesiehr/osf.io,cosenal/osf.io,jnayak1/osf.io,asanfilippo7/osf.io,MerlinZhang/osf.io,lyndsysimon/osf.io,mluo613/osf.io,caseyrygt/osf.io,rdhyee/osf.io,brianjgeiger/osf.io,chennan47/osf.io,leb2dg/osf.io,binoculars/osf.io,jolene-esposito/osf.io,kwierman/osf.io,acshi/osf.io,hmoco/osf.io,jolene-esposito/osf.io,zachjanicki/osf.io,Ghalko/osf.io,zachjanicki/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,danielneis/osf.io,asanfilippo7/osf.io,petermalcolm/osf.io,Nesiehr/osf.io,cosenal/osf.io,mluo613/osf.io,TomBaxter/osf.io,ZobairAlijan/osf.io,rdhyee/osf.io,wearpants/osf.io,dplorimer/osf,SSJohns/osf.io,chrisseto/osf.io,HarryRybacki/osf.io,KAsante95/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,zachjanicki/osf.io,abought/osf.io,cldershem/osf.io,mluke93/osf.io,arpitar/osf.io,brandonPurvis/osf.io,jeffreyliu3230/osf.io,caneruguz/osf.io,jeffreyliu3230/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,njantrania/osf.io,cosenal/osf.io,alexschiller/osf.io,mfraezz/osf.io,saradbowman/osf.io,icereval/osf.io,pattisdr/osf.io,asanfilippo7/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,brandonPurvis/osf.io,caseyrollins/osf.io,acshi/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,fabianvf/osf.io,abought/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,jinluyuan/osf.io,alexschiller/osf.io,reinaH/osf.io,abought/osf.io,caseyrygt/osf.io,zamattiac/osf.io,RomanZWang/osf.io,sloria/osf.io,aaxelb/osf.io,HarryRybacki/osf.io,felliott/osf.io,jmcarp/osf.io,pattisdr/osf.io,crcresearch/osf.io,chrisseto/osf.io,ckc6cz/osf.io,jolene-esposito/osf.io,icereval/osf.io,chennan47/osf.io,bdyetton/prettychart,cwisecarver/osf.io,cslzchen/osf.io,KAsante95/osf.io,jinluyuan/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,doublebits/osf.io,RomanZWang/osf.io,haoyuchen1992/osf.io,felliott/osf.io,crcresearch/osf.io,cwisecarver/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,lyndsysimon/osf.io,DanielSBrown/osf.io,billyhunt/osf.io,kch8qx/osf.io,mluke93/osf.io,amyshi188/osf.io,caseyrollins/osf.io,mfraezz/osf.io,laurenrevere/osf.io,felliott/osf.io,brianjgeiger/osf.io,acshi/osf.io,hmoco/osf.io,ticklemepierce/osf.io,chennan47/osf.io,mluke93/osf.io,haoyuchen1992/osf.io,caneruguz/osf.io,danielneis/osf.io,haoyuchen1992/osf.io,hmoco/osf.io,sbt9uc/osf.io,HarryRybacki/osf.io,danielneis/osf.io,fabianvf/osf.io,SSJohns/osf.io,ckc6cz/osf.io,zamattiac/osf.io,caseyrygt/osf.io,MerlinZhang/osf.io,billyhunt/osf.io,acshi/osf.io,arpitar/osf.io,mluke93/osf.io,Nesiehr/osf.io,emetsger/osf.io,lyndsysimon/osf.io,zachjanicki/osf.io,ckc6cz/osf.io,fabianvf/osf.io,DanielSBrown/osf.io,barbour-em/osf.io,GageGaskins/osf.io,amyshi188/osf.io,amyshi188/osf.io,emetsger/osf.io,jnayak1/osf.io,Nesiehr/osf.io,barbour-em/osf.io,kwierman/osf.io,pattisdr/osf.io,baylee-d/osf.io,rdhyee/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,reinaH/osf.io,erinspace/osf.io
|
from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
url = self.request.build_absolute_uri()
page_number = self.page.paginator.num_pages
return replace_query_param(url, self.page_query_param, page_number)
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('prev', self.get_previous_link()),
('next', self.get_next_link()),
('meta', OrderedDict([
('total', self.page.paginator.count),
]))
])),
])
return Response(response_dict)
Allow client to customize page size using page[size] query param
|
from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
page_size_query_param = 'page[size]'
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
url = self.request.build_absolute_uri()
page_number = self.page.paginator.num_pages
return replace_query_param(url, self.page_query_param, page_number)
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('prev', self.get_previous_link()),
('next', self.get_next_link()),
('meta', OrderedDict([
('total', self.page.paginator.count),
('per_page', self.page.paginator.per_page),
]))
])),
])
return Response(response_dict)
|
<commit_before>from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
url = self.request.build_absolute_uri()
page_number = self.page.paginator.num_pages
return replace_query_param(url, self.page_query_param, page_number)
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('prev', self.get_previous_link()),
('next', self.get_next_link()),
('meta', OrderedDict([
('total', self.page.paginator.count),
]))
])),
])
return Response(response_dict)
<commit_msg>Allow client to customize page size using page[size] query param<commit_after>
|
from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
page_size_query_param = 'page[size]'
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
url = self.request.build_absolute_uri()
page_number = self.page.paginator.num_pages
return replace_query_param(url, self.page_query_param, page_number)
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('prev', self.get_previous_link()),
('next', self.get_next_link()),
('meta', OrderedDict([
('total', self.page.paginator.count),
('per_page', self.page.paginator.per_page),
]))
])),
])
return Response(response_dict)
|
from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
url = self.request.build_absolute_uri()
page_number = self.page.paginator.num_pages
return replace_query_param(url, self.page_query_param, page_number)
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('prev', self.get_previous_link()),
('next', self.get_next_link()),
('meta', OrderedDict([
('total', self.page.paginator.count),
]))
])),
])
return Response(response_dict)
Allow client to customize page size using page[size] query paramfrom collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
page_size_query_param = 'page[size]'
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
url = self.request.build_absolute_uri()
page_number = self.page.paginator.num_pages
return replace_query_param(url, self.page_query_param, page_number)
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('prev', self.get_previous_link()),
('next', self.get_next_link()),
('meta', OrderedDict([
('total', self.page.paginator.count),
('per_page', self.page.paginator.per_page),
]))
])),
])
return Response(response_dict)
|
<commit_before>from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
url = self.request.build_absolute_uri()
page_number = self.page.paginator.num_pages
return replace_query_param(url, self.page_query_param, page_number)
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('prev', self.get_previous_link()),
('next', self.get_next_link()),
('meta', OrderedDict([
('total', self.page.paginator.count),
]))
])),
])
return Response(response_dict)
<commit_msg>Allow client to customize page size using page[size] query param<commit_after>from collections import OrderedDict
from rest_framework import pagination
from rest_framework.response import Response
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
class JSONAPIPagination(pagination.PageNumberPagination):
"""Custom paginator that formats responses in a JSON-API compatible format."""
page_size_query_param = 'page[size]'
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
url = self.request.build_absolute_uri()
page_number = self.page.paginator.num_pages
return replace_query_param(url, self.page_query_param, page_number)
def get_paginated_response(self, data):
response_dict = OrderedDict([
('data', data),
('links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('prev', self.get_previous_link()),
('next', self.get_next_link()),
('meta', OrderedDict([
('total', self.page.paginator.count),
('per_page', self.page.paginator.per_page),
]))
])),
])
return Response(response_dict)
|
e284c0e512edd18ed0ef1259fd4606d630699f3a
|
wtl/wtgithub/models.py
|
wtl/wtgithub/models.py
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
def __str__(self):
return '{0}/{1}'.format(self.owner, self.name)
|
Add missing `__str__` to `Repository` model
|
Add missing `__str__` to `Repository` model
Can't use `@python_2_unicode_compatible` without defining `__str__`.
|
Python
|
mit
|
elegion/djangodash2013,elegion/djangodash2013
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
Add missing `__str__` to `Repository` model
Can't use `@python_2_unicode_compatible` without defining `__str__`.
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
def __str__(self):
return '{0}/{1}'.format(self.owner, self.name)
|
<commit_before>from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
<commit_msg>Add missing `__str__` to `Repository` model
Can't use `@python_2_unicode_compatible` without defining `__str__`.<commit_after>
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
def __str__(self):
return '{0}/{1}'.format(self.owner, self.name)
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
Add missing `__str__` to `Repository` model
Can't use `@python_2_unicode_compatible` without defining `__str__`.from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
def __str__(self):
return '{0}/{1}'.format(self.owner, self.name)
|
<commit_before>from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
<commit_msg>Add missing `__str__` to `Repository` model
Can't use `@python_2_unicode_compatible` without defining `__str__`.<commit_after>from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.db import models
@python_2_unicode_compatible
class Repository(models.Model):
"""
Repository
Represents github repository. Name, description, etc
"""
owner = models.CharField(_('owner'), max_length=512)
name = models.CharField(_('name'), max_length=512)
starsCount = models.IntegerField(_('stars count'))
description = models.TextField(_('description'))
def __str__(self):
return '{0}/{1}'.format(self.owner, self.name)
|
fd0479742afd994bfb241415f7db9c0c971a09b3
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ClangTidyTargetCMakeConan(ConanFile):
name = "clang-tidy-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/clang-tidy-target-cmake"
license = "MIT"
def source(self):
zip_name = "clang-tidy-target-cmake.zip"
download("https://github.com/polysquare/"
"clang-tidy-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/clang-tidy-target-cmake",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ClangTidyTargetCMakeConan(ConanFile):
name = "clang-tidy-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/clang-tidy-target-cmake"
license = "MIT"
def source(self):
zip_name = "clang-tidy-target-cmake.zip"
download("https://github.com/polysquare/"
"clang-tidy-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/clang-tidy-target-cmake",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
|
Copy find modules to root of module path
|
conan: Copy find modules to root of module path
|
Python
|
mit
|
polysquare/clang-tidy-target-cmake,polysquare/clang-tidy-target-cmake
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ClangTidyTargetCMakeConan(ConanFile):
name = "clang-tidy-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/clang-tidy-target-cmake"
license = "MIT"
def source(self):
zip_name = "clang-tidy-target-cmake.zip"
download("https://github.com/polysquare/"
"clang-tidy-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/clang-tidy-target-cmake",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
conan: Copy find modules to root of module path
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ClangTidyTargetCMakeConan(ConanFile):
name = "clang-tidy-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/clang-tidy-target-cmake"
license = "MIT"
def source(self):
zip_name = "clang-tidy-target-cmake.zip"
download("https://github.com/polysquare/"
"clang-tidy-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/clang-tidy-target-cmake",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
|
<commit_before>from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ClangTidyTargetCMakeConan(ConanFile):
name = "clang-tidy-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/clang-tidy-target-cmake"
license = "MIT"
def source(self):
zip_name = "clang-tidy-target-cmake.zip"
download("https://github.com/polysquare/"
"clang-tidy-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/clang-tidy-target-cmake",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
<commit_msg>conan: Copy find modules to root of module path<commit_after>
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ClangTidyTargetCMakeConan(ConanFile):
name = "clang-tidy-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/clang-tidy-target-cmake"
license = "MIT"
def source(self):
zip_name = "clang-tidy-target-cmake.zip"
download("https://github.com/polysquare/"
"clang-tidy-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/clang-tidy-target-cmake",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ClangTidyTargetCMakeConan(ConanFile):
name = "clang-tidy-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/clang-tidy-target-cmake"
license = "MIT"
def source(self):
zip_name = "clang-tidy-target-cmake.zip"
download("https://github.com/polysquare/"
"clang-tidy-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/clang-tidy-target-cmake",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
conan: Copy find modules to root of module pathfrom conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ClangTidyTargetCMakeConan(ConanFile):
name = "clang-tidy-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/clang-tidy-target-cmake"
license = "MIT"
def source(self):
zip_name = "clang-tidy-target-cmake.zip"
download("https://github.com/polysquare/"
"clang-tidy-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/clang-tidy-target-cmake",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
|
<commit_before>from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ClangTidyTargetCMakeConan(ConanFile):
name = "clang-tidy-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/clang-tidy-target-cmake"
license = "MIT"
def source(self):
zip_name = "clang-tidy-target-cmake.zip"
download("https://github.com/polysquare/"
"clang-tidy-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/clang-tidy-target-cmake",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
<commit_msg>conan: Copy find modules to root of module path<commit_after>from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class ClangTidyTargetCMakeConan(ConanFile):
name = "clang-tidy-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/clang-tidy-target-cmake"
license = "MIT"
def source(self):
zip_name = "clang-tidy-target-cmake.zip"
download("https://github.com/polysquare/"
"clang-tidy-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/clang-tidy-target-cmake",
src="clang-tidy-target-cmake-" + VERSION,
keep_path=True)
|
ce71f3bd66ad5b532e43791cca0ef811680000de
|
constants.py
|
constants.py
|
import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GH_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
Rename GH token env var
|
Rename GH token env var
GitHub disallows user set GITHUB_ prefixed ones.
|
Python
|
mit
|
nkantar/Starminder
|
import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
Rename GH token env var
GitHub disallows user set GITHUB_ prefixed ones.
|
import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GH_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
<commit_before>import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
<commit_msg>Rename GH token env var
GitHub disallows user set GITHUB_ prefixed ones.<commit_after>
|
import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GH_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
Rename GH token env var
GitHub disallows user set GITHUB_ prefixed ones.import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GH_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
<commit_before>import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
<commit_msg>Rename GH token env var
GitHub disallows user set GITHUB_ prefixed ones.<commit_after>import os
from pathlib import Path
from string import Template
from dotenv import load_dotenv
load_dotenv() # helps with local dev
TEMPLATE_PATH = Path.cwd() / "email.md"
STARMINDER_COUNT = int(os.getenv("STARMINDER_COUNT"))
STARMINDER_RECIPIENT = os.getenv("STARMINDER_RECIPIENT")
STARMINDER_SUBJECT = Template("[Starminder] Reminders for $today")
AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
AWS_FROM = os.getenv("AWS_FROM")
GITHUB_TOKEN = os.getenv("GH_TOKEN")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY")
GITHUB_FORK_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
a9a26ddff2e0d033854621e13b19693561f9fe5f
|
tests/drawing/demo_change_artist_group.py
|
tests/drawing/demo_change_artist_group.py
|
#!/usr/bin/env python3
"""An orange rectangle should be displayed on top of a green one. When you
click with the mouse, the green rectangle should move on top. When you release
the mouse, the orange rectangle should move back to the top."""
import pyglet
import glooey
import vecrec
from glooey.drawing import green, orange
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect_1 = vecrec.Rect.from_pyglet_window(window)
rect_1.shrink(50)
rect_2 = rect_1 + (10,-10)
bg = pyglet.graphics.OrderedGroup(0)
fg = pyglet.graphics.OrderedGroup(1)
artist_1 = glooey.drawing.Rectangle(rect_1, color=green, batch=batch, group=bg)
artist_2 = glooey.drawing.Rectangle(rect_2, color=orange, batch=batch, group=fg)
@window.event
def on_draw():
print('(draw)')
window.clear()
batch.draw()
@window.event
def on_mouse_press(self, *args):
print("- green in front")
artist_1.group = fg
artist_2.group = bg
@window.event
def on_mouse_release(self, *args):
print("- orange in front")
artist_1.group = bg
artist_2.group = fg
pyglet.app.run()
|
#!/usr/bin/env python3
"""An orange rectangle should be displayed on top of a green one. When you
click with the mouse, the green rectangle should move on top. When you release
the mouse, the orange rectangle should move back to the top."""
import pyglet
import glooey
import vecrec
from glooey.drawing import green, orange
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect_1 = vecrec.Rect.from_pyglet_window(window)
rect_1.shrink(50)
rect_2 = rect_1 + (10,-10)
bg = pyglet.graphics.OrderedGroup(0)
fg = pyglet.graphics.OrderedGroup(1)
artist_1 = glooey.drawing.Rectangle(rect_1, color=green, batch=batch, group=bg)
artist_2 = glooey.drawing.Rectangle(rect_2, color=orange, batch=batch, group=fg)
@window.event
def on_draw():
window.clear()
batch.draw()
@window.event
def on_mouse_press(self, *args):
print("- green in front")
artist_1.group = fg
artist_2.group = bg
@window.event
def on_mouse_release(self, *args):
print("- orange in front")
artist_1.group = bg
artist_2.group = fg
pyglet.app.run()
|
Make the test a bit more verbose.
|
Make the test a bit more verbose.
|
Python
|
mit
|
kxgames/glooey,kxgames/glooey
|
#!/usr/bin/env python3
"""An orange rectangle should be displayed on top of a green one. When you
click with the mouse, the green rectangle should move on top. When you release
the mouse, the orange rectangle should move back to the top."""
import pyglet
import glooey
import vecrec
from glooey.drawing import green, orange
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect_1 = vecrec.Rect.from_pyglet_window(window)
rect_1.shrink(50)
rect_2 = rect_1 + (10,-10)
bg = pyglet.graphics.OrderedGroup(0)
fg = pyglet.graphics.OrderedGroup(1)
artist_1 = glooey.drawing.Rectangle(rect_1, color=green, batch=batch, group=bg)
artist_2 = glooey.drawing.Rectangle(rect_2, color=orange, batch=batch, group=fg)
@window.event
def on_draw():
print('(draw)')
window.clear()
batch.draw()
@window.event
def on_mouse_press(self, *args):
print("- green in front")
artist_1.group = fg
artist_2.group = bg
@window.event
def on_mouse_release(self, *args):
print("- orange in front")
artist_1.group = bg
artist_2.group = fg
pyglet.app.run()
Make the test a bit more verbose.
|
#!/usr/bin/env python3
"""An orange rectangle should be displayed on top of a green one. When you
click with the mouse, the green rectangle should move on top. When you release
the mouse, the orange rectangle should move back to the top."""
import pyglet
import glooey
import vecrec
from glooey.drawing import green, orange
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect_1 = vecrec.Rect.from_pyglet_window(window)
rect_1.shrink(50)
rect_2 = rect_1 + (10,-10)
bg = pyglet.graphics.OrderedGroup(0)
fg = pyglet.graphics.OrderedGroup(1)
artist_1 = glooey.drawing.Rectangle(rect_1, color=green, batch=batch, group=bg)
artist_2 = glooey.drawing.Rectangle(rect_2, color=orange, batch=batch, group=fg)
@window.event
def on_draw():
window.clear()
batch.draw()
@window.event
def on_mouse_press(self, *args):
print("- green in front")
artist_1.group = fg
artist_2.group = bg
@window.event
def on_mouse_release(self, *args):
print("- orange in front")
artist_1.group = bg
artist_2.group = fg
pyglet.app.run()
|
<commit_before>#!/usr/bin/env python3
"""An orange rectangle should be displayed on top of a green one. When you
click with the mouse, the green rectangle should move on top. When you release
the mouse, the orange rectangle should move back to the top."""
import pyglet
import glooey
import vecrec
from glooey.drawing import green, orange
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect_1 = vecrec.Rect.from_pyglet_window(window)
rect_1.shrink(50)
rect_2 = rect_1 + (10,-10)
bg = pyglet.graphics.OrderedGroup(0)
fg = pyglet.graphics.OrderedGroup(1)
artist_1 = glooey.drawing.Rectangle(rect_1, color=green, batch=batch, group=bg)
artist_2 = glooey.drawing.Rectangle(rect_2, color=orange, batch=batch, group=fg)
@window.event
def on_draw():
print('(draw)')
window.clear()
batch.draw()
@window.event
def on_mouse_press(self, *args):
print("- green in front")
artist_1.group = fg
artist_2.group = bg
@window.event
def on_mouse_release(self, *args):
print("- orange in front")
artist_1.group = bg
artist_2.group = fg
pyglet.app.run()
<commit_msg>Make the test a bit more verbose.<commit_after>
|
#!/usr/bin/env python3
"""An orange rectangle should be displayed on top of a green one. When you
click with the mouse, the green rectangle should move on top. When you release
the mouse, the orange rectangle should move back to the top."""
import pyglet
import glooey
import vecrec
from glooey.drawing import green, orange
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect_1 = vecrec.Rect.from_pyglet_window(window)
rect_1.shrink(50)
rect_2 = rect_1 + (10,-10)
bg = pyglet.graphics.OrderedGroup(0)
fg = pyglet.graphics.OrderedGroup(1)
artist_1 = glooey.drawing.Rectangle(rect_1, color=green, batch=batch, group=bg)
artist_2 = glooey.drawing.Rectangle(rect_2, color=orange, batch=batch, group=fg)
@window.event
def on_draw():
window.clear()
batch.draw()
@window.event
def on_mouse_press(self, *args):
print("- green in front")
artist_1.group = fg
artist_2.group = bg
@window.event
def on_mouse_release(self, *args):
print("- orange in front")
artist_1.group = bg
artist_2.group = fg
pyglet.app.run()
|
#!/usr/bin/env python3
"""An orange rectangle should be displayed on top of a green one. When you
click with the mouse, the green rectangle should move on top. When you release
the mouse, the orange rectangle should move back to the top."""
import pyglet
import glooey
import vecrec
from glooey.drawing import green, orange
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect_1 = vecrec.Rect.from_pyglet_window(window)
rect_1.shrink(50)
rect_2 = rect_1 + (10,-10)
bg = pyglet.graphics.OrderedGroup(0)
fg = pyglet.graphics.OrderedGroup(1)
artist_1 = glooey.drawing.Rectangle(rect_1, color=green, batch=batch, group=bg)
artist_2 = glooey.drawing.Rectangle(rect_2, color=orange, batch=batch, group=fg)
@window.event
def on_draw():
print('(draw)')
window.clear()
batch.draw()
@window.event
def on_mouse_press(self, *args):
print("- green in front")
artist_1.group = fg
artist_2.group = bg
@window.event
def on_mouse_release(self, *args):
print("- orange in front")
artist_1.group = bg
artist_2.group = fg
pyglet.app.run()
Make the test a bit more verbose.#!/usr/bin/env python3
"""An orange rectangle should be displayed on top of a green one. When you
click with the mouse, the green rectangle should move on top. When you release
the mouse, the orange rectangle should move back to the top."""
import pyglet
import glooey
import vecrec
from glooey.drawing import green, orange
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect_1 = vecrec.Rect.from_pyglet_window(window)
rect_1.shrink(50)
rect_2 = rect_1 + (10,-10)
bg = pyglet.graphics.OrderedGroup(0)
fg = pyglet.graphics.OrderedGroup(1)
artist_1 = glooey.drawing.Rectangle(rect_1, color=green, batch=batch, group=bg)
artist_2 = glooey.drawing.Rectangle(rect_2, color=orange, batch=batch, group=fg)
@window.event
def on_draw():
window.clear()
batch.draw()
@window.event
def on_mouse_press(self, *args):
print("- green in front")
artist_1.group = fg
artist_2.group = bg
@window.event
def on_mouse_release(self, *args):
print("- orange in front")
artist_1.group = bg
artist_2.group = fg
pyglet.app.run()
|
<commit_before>#!/usr/bin/env python3
"""An orange rectangle should be displayed on top of a green one. When you
click with the mouse, the green rectangle should move on top. When you release
the mouse, the orange rectangle should move back to the top."""
import pyglet
import glooey
import vecrec
from glooey.drawing import green, orange
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect_1 = vecrec.Rect.from_pyglet_window(window)
rect_1.shrink(50)
rect_2 = rect_1 + (10,-10)
bg = pyglet.graphics.OrderedGroup(0)
fg = pyglet.graphics.OrderedGroup(1)
artist_1 = glooey.drawing.Rectangle(rect_1, color=green, batch=batch, group=bg)
artist_2 = glooey.drawing.Rectangle(rect_2, color=orange, batch=batch, group=fg)
@window.event
def on_draw():
print('(draw)')
window.clear()
batch.draw()
@window.event
def on_mouse_press(self, *args):
print("- green in front")
artist_1.group = fg
artist_2.group = bg
@window.event
def on_mouse_release(self, *args):
print("- orange in front")
artist_1.group = bg
artist_2.group = fg
pyglet.app.run()
<commit_msg>Make the test a bit more verbose.<commit_after>#!/usr/bin/env python3
"""An orange rectangle should be displayed on top of a green one. When you
click with the mouse, the green rectangle should move on top. When you release
the mouse, the orange rectangle should move back to the top."""
import pyglet
import glooey
import vecrec
from glooey.drawing import green, orange
print(__doc__)
window = pyglet.window.Window()
batch = pyglet.graphics.Batch()
rect_1 = vecrec.Rect.from_pyglet_window(window)
rect_1.shrink(50)
rect_2 = rect_1 + (10,-10)
bg = pyglet.graphics.OrderedGroup(0)
fg = pyglet.graphics.OrderedGroup(1)
artist_1 = glooey.drawing.Rectangle(rect_1, color=green, batch=batch, group=bg)
artist_2 = glooey.drawing.Rectangle(rect_2, color=orange, batch=batch, group=fg)
@window.event
def on_draw():
window.clear()
batch.draw()
@window.event
def on_mouse_press(self, *args):
print("- green in front")
artist_1.group = fg
artist_2.group = bg
@window.event
def on_mouse_release(self, *args):
print("- orange in front")
artist_1.group = bg
artist_2.group = fg
pyglet.app.run()
|
7437382d966d39c4de21d2686bd8f31a23e5c47b
|
IPython/html/texteditor/handlers.py
|
IPython/html/texteditor/handlers.py
|
#encoding: utf-8
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
|
#encoding: utf-8
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
page_title=path.rsplit('/', 1)[-1] + " (editing)",
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
|
Set page title for editor
|
Set page title for editor
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
#encoding: utf-8
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]Set page title for editor
|
#encoding: utf-8
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
page_title=path.rsplit('/', 1)[-1] + " (editing)",
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
|
<commit_before>#encoding: utf-8
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]<commit_msg>Set page title for editor<commit_after>
|
#encoding: utf-8
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
page_title=path.rsplit('/', 1)[-1] + " (editing)",
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
|
#encoding: utf-8
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]Set page title for editor#encoding: utf-8
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
page_title=path.rsplit('/', 1)[-1] + " (editing)",
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
|
<commit_before>#encoding: utf-8
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]<commit_msg>Set page title for editor<commit_after>#encoding: utf-8
"""Tornado handlers for the terminal emulator."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from tornado import web
from ..base.handlers import IPythonHandler, path_regex
from ..utils import url_escape
class EditorHandler(IPythonHandler):
"""Render the text editor interface."""
@web.authenticated
def get(self, path):
path = path.strip('/')
if not self.contents_manager.file_exists(path):
raise web.HTTPError(404, u'File does not exist: %s' % path)
self.write(self.render_template('texteditor.html',
file_path=url_escape(path),
page_title=path.rsplit('/', 1)[-1] + " (editing)",
)
)
default_handlers = [
(r"/texteditor%s" % path_regex, EditorHandler),
]
|
ecee83e5cbc66c631fce5278bc2533eb2f711afe
|
crust/api.py
|
crust/api.py
|
from . import requests
class Api(object):
resources = {}
def __init__(self, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.session = session
def __getattr__(self, name):
if name in self.resources:
return self.resources[name]
raise AttributeError("'{0}' object has no attribute '{1}'".format(self.__class__.__name__, name))
@classmethod
def bind(cls, resource):
instance = resource()
cls.resources[instance._meta.resource_name] = resource
return resource
|
import json
import posixpath
from . import requests
from . import six
from .exceptions import ResponseError
class Api(object):
resources = {}
def __init__(self, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.session = session
def __getattr__(self, name):
if name in self.resources:
return self.resources[name]
raise AttributeError("'{0}' object has no attribute '{1}'".format(self.__class__.__name__, name))
@classmethod
def bind(cls, resource):
instance = resource()
cls.resources[instance._meta.resource_name] = resource
return resource
@staticmethod
def resource_serialize(o):
"""
Returns JSON serialization of given object.
"""
return json.dumps(o)
@staticmethod
def resource_deserialize(s):
"""
Returns dict deserialization of a given JSON string.
"""
try:
return json.loads(s)
except ValueError:
raise ResponseError("The API Response was not valid.")
def url_for(self, *args):
args = [str(arg) for arg in args]
path = posixpath.join(*args)
return "/".join([self.url, path]) + "/"
def http_resource(self, method, resource, url=None, params=None, data=None):
"""
Makes an HTTP request.
"""
if isinstance(resource, six.string_types):
resource = [resource]
url = url or self.url_for(*resource)
r = self.session.request(method, url, params=params, data=data)
r.raise_for_status()
return r
|
Make the Api class the key focal point to accessing the API
|
Make the Api class the key focal point to accessing the API
|
Python
|
bsd-2-clause
|
dstufft/crust
|
from . import requests
class Api(object):
resources = {}
def __init__(self, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.session = session
def __getattr__(self, name):
if name in self.resources:
return self.resources[name]
raise AttributeError("'{0}' object has no attribute '{1}'".format(self.__class__.__name__, name))
@classmethod
def bind(cls, resource):
instance = resource()
cls.resources[instance._meta.resource_name] = resource
return resource
Make the Api class the key focal point to accessing the API
|
import json
import posixpath
from . import requests
from . import six
from .exceptions import ResponseError
class Api(object):
resources = {}
def __init__(self, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.session = session
def __getattr__(self, name):
if name in self.resources:
return self.resources[name]
raise AttributeError("'{0}' object has no attribute '{1}'".format(self.__class__.__name__, name))
@classmethod
def bind(cls, resource):
instance = resource()
cls.resources[instance._meta.resource_name] = resource
return resource
@staticmethod
def resource_serialize(o):
"""
Returns JSON serialization of given object.
"""
return json.dumps(o)
@staticmethod
def resource_deserialize(s):
"""
Returns dict deserialization of a given JSON string.
"""
try:
return json.loads(s)
except ValueError:
raise ResponseError("The API Response was not valid.")
def url_for(self, *args):
args = [str(arg) for arg in args]
path = posixpath.join(*args)
return "/".join([self.url, path]) + "/"
def http_resource(self, method, resource, url=None, params=None, data=None):
"""
Makes an HTTP request.
"""
if isinstance(resource, six.string_types):
resource = [resource]
url = url or self.url_for(*resource)
r = self.session.request(method, url, params=params, data=data)
r.raise_for_status()
return r
|
<commit_before>from . import requests
class Api(object):
resources = {}
def __init__(self, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.session = session
def __getattr__(self, name):
if name in self.resources:
return self.resources[name]
raise AttributeError("'{0}' object has no attribute '{1}'".format(self.__class__.__name__, name))
@classmethod
def bind(cls, resource):
instance = resource()
cls.resources[instance._meta.resource_name] = resource
return resource
<commit_msg>Make the Api class the key focal point to accessing the API<commit_after>
|
import json
import posixpath
from . import requests
from . import six
from .exceptions import ResponseError
class Api(object):
resources = {}
def __init__(self, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.session = session
def __getattr__(self, name):
if name in self.resources:
return self.resources[name]
raise AttributeError("'{0}' object has no attribute '{1}'".format(self.__class__.__name__, name))
@classmethod
def bind(cls, resource):
instance = resource()
cls.resources[instance._meta.resource_name] = resource
return resource
@staticmethod
def resource_serialize(o):
"""
Returns JSON serialization of given object.
"""
return json.dumps(o)
@staticmethod
def resource_deserialize(s):
"""
Returns dict deserialization of a given JSON string.
"""
try:
return json.loads(s)
except ValueError:
raise ResponseError("The API Response was not valid.")
def url_for(self, *args):
args = [str(arg) for arg in args]
path = posixpath.join(*args)
return "/".join([self.url, path]) + "/"
def http_resource(self, method, resource, url=None, params=None, data=None):
"""
Makes an HTTP request.
"""
if isinstance(resource, six.string_types):
resource = [resource]
url = url or self.url_for(*resource)
r = self.session.request(method, url, params=params, data=data)
r.raise_for_status()
return r
|
from . import requests
class Api(object):
resources = {}
def __init__(self, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.session = session
def __getattr__(self, name):
if name in self.resources:
return self.resources[name]
raise AttributeError("'{0}' object has no attribute '{1}'".format(self.__class__.__name__, name))
@classmethod
def bind(cls, resource):
instance = resource()
cls.resources[instance._meta.resource_name] = resource
return resource
Make the Api class the key focal point to accessing the APIimport json
import posixpath
from . import requests
from . import six
from .exceptions import ResponseError
class Api(object):
resources = {}
def __init__(self, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.session = session
def __getattr__(self, name):
if name in self.resources:
return self.resources[name]
raise AttributeError("'{0}' object has no attribute '{1}'".format(self.__class__.__name__, name))
@classmethod
def bind(cls, resource):
instance = resource()
cls.resources[instance._meta.resource_name] = resource
return resource
@staticmethod
def resource_serialize(o):
"""
Returns JSON serialization of given object.
"""
return json.dumps(o)
@staticmethod
def resource_deserialize(s):
"""
Returns dict deserialization of a given JSON string.
"""
try:
return json.loads(s)
except ValueError:
raise ResponseError("The API Response was not valid.")
def url_for(self, *args):
args = [str(arg) for arg in args]
path = posixpath.join(*args)
return "/".join([self.url, path]) + "/"
def http_resource(self, method, resource, url=None, params=None, data=None):
"""
Makes an HTTP request.
"""
if isinstance(resource, six.string_types):
resource = [resource]
url = url or self.url_for(*resource)
r = self.session.request(method, url, params=params, data=data)
r.raise_for_status()
return r
|
<commit_before>from . import requests
class Api(object):
resources = {}
def __init__(self, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.session = session
def __getattr__(self, name):
if name in self.resources:
return self.resources[name]
raise AttributeError("'{0}' object has no attribute '{1}'".format(self.__class__.__name__, name))
@classmethod
def bind(cls, resource):
instance = resource()
cls.resources[instance._meta.resource_name] = resource
return resource
<commit_msg>Make the Api class the key focal point to accessing the API<commit_after>import json
import posixpath
from . import requests
from . import six
from .exceptions import ResponseError
class Api(object):
resources = {}
def __init__(self, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.session = session
def __getattr__(self, name):
if name in self.resources:
return self.resources[name]
raise AttributeError("'{0}' object has no attribute '{1}'".format(self.__class__.__name__, name))
@classmethod
def bind(cls, resource):
instance = resource()
cls.resources[instance._meta.resource_name] = resource
return resource
@staticmethod
def resource_serialize(o):
"""
Returns JSON serialization of given object.
"""
return json.dumps(o)
@staticmethod
def resource_deserialize(s):
"""
Returns dict deserialization of a given JSON string.
"""
try:
return json.loads(s)
except ValueError:
raise ResponseError("The API Response was not valid.")
def url_for(self, *args):
args = [str(arg) for arg in args]
path = posixpath.join(*args)
return "/".join([self.url, path]) + "/"
def http_resource(self, method, resource, url=None, params=None, data=None):
"""
Makes an HTTP request.
"""
if isinstance(resource, six.string_types):
resource = [resource]
url = url or self.url_for(*resource)
r = self.session.request(method, url, params=params, data=data)
r.raise_for_status()
return r
|
bd75d4548edfde2bebff116f33bfb66be6c982e2
|
warthog/transport.py
|
warthog/transport.py
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import print_function, division
import ssl
import warnings
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def get_transport(disable_verify=False, use_tlsv1=True):
transport = requests.Session()
if disable_verify:
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
transport.verify = False
if use_tlsv1:
transport.mount('https://', SSLTLSV1Adapter())
return transport
class SSLTLSV1Adapter(HTTPAdapter):
""""Transport adapter that allows us to use TLSv1 which is required
for interacting with the A10 load balancer.
"""
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize, block=block,
ssl_version=ssl.PROTOCOL_TLSv1, **pool_kwargs)
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import print_function, division
import ssl
import warnings
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def get_transport(verify=True, use_tlsv1=True):
transport = requests.Session()
if not verify:
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
transport.verify = False
if use_tlsv1:
transport.mount('https://', SSLTLSV1Adapter())
return transport
class SSLTLSV1Adapter(HTTPAdapter):
""""Transport adapter that allows us to use TLSv1 which is required
for interacting with the A10 load balancer.
"""
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize, block=block,
ssl_version=ssl.PROTOCOL_TLSv1, **pool_kwargs)
|
Change the name of the verify/no-verify param
|
Change the name of the verify/no-verify param
|
Python
|
mit
|
smarter-travel-media/warthog
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import print_function, division
import ssl
import warnings
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def get_transport(disable_verify=False, use_tlsv1=True):
transport = requests.Session()
if disable_verify:
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
transport.verify = False
if use_tlsv1:
transport.mount('https://', SSLTLSV1Adapter())
return transport
class SSLTLSV1Adapter(HTTPAdapter):
""""Transport adapter that allows us to use TLSv1 which is required
for interacting with the A10 load balancer.
"""
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize, block=block,
ssl_version=ssl.PROTOCOL_TLSv1, **pool_kwargs)
Change the name of the verify/no-verify param
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import print_function, division
import ssl
import warnings
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def get_transport(verify=True, use_tlsv1=True):
transport = requests.Session()
if not verify:
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
transport.verify = False
if use_tlsv1:
transport.mount('https://', SSLTLSV1Adapter())
return transport
class SSLTLSV1Adapter(HTTPAdapter):
""""Transport adapter that allows us to use TLSv1 which is required
for interacting with the A10 load balancer.
"""
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize, block=block,
ssl_version=ssl.PROTOCOL_TLSv1, **pool_kwargs)
|
<commit_before># -*- coding: utf-8 -*-
"""
"""
from __future__ import print_function, division
import ssl
import warnings
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def get_transport(disable_verify=False, use_tlsv1=True):
transport = requests.Session()
if disable_verify:
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
transport.verify = False
if use_tlsv1:
transport.mount('https://', SSLTLSV1Adapter())
return transport
class SSLTLSV1Adapter(HTTPAdapter):
""""Transport adapter that allows us to use TLSv1 which is required
for interacting with the A10 load balancer.
"""
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize, block=block,
ssl_version=ssl.PROTOCOL_TLSv1, **pool_kwargs)
<commit_msg>Change the name of the verify/no-verify param<commit_after>
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import print_function, division
import ssl
import warnings
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def get_transport(verify=True, use_tlsv1=True):
transport = requests.Session()
if not verify:
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
transport.verify = False
if use_tlsv1:
transport.mount('https://', SSLTLSV1Adapter())
return transport
class SSLTLSV1Adapter(HTTPAdapter):
""""Transport adapter that allows us to use TLSv1 which is required
for interacting with the A10 load balancer.
"""
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize, block=block,
ssl_version=ssl.PROTOCOL_TLSv1, **pool_kwargs)
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import print_function, division
import ssl
import warnings
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def get_transport(disable_verify=False, use_tlsv1=True):
transport = requests.Session()
if disable_verify:
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
transport.verify = False
if use_tlsv1:
transport.mount('https://', SSLTLSV1Adapter())
return transport
class SSLTLSV1Adapter(HTTPAdapter):
""""Transport adapter that allows us to use TLSv1 which is required
for interacting with the A10 load balancer.
"""
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize, block=block,
ssl_version=ssl.PROTOCOL_TLSv1, **pool_kwargs)
Change the name of the verify/no-verify param# -*- coding: utf-8 -*-
"""
"""
from __future__ import print_function, division
import ssl
import warnings
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def get_transport(verify=True, use_tlsv1=True):
transport = requests.Session()
if not verify:
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
transport.verify = False
if use_tlsv1:
transport.mount('https://', SSLTLSV1Adapter())
return transport
class SSLTLSV1Adapter(HTTPAdapter):
""""Transport adapter that allows us to use TLSv1 which is required
for interacting with the A10 load balancer.
"""
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize, block=block,
ssl_version=ssl.PROTOCOL_TLSv1, **pool_kwargs)
|
<commit_before># -*- coding: utf-8 -*-
"""
"""
from __future__ import print_function, division
import ssl
import warnings
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def get_transport(disable_verify=False, use_tlsv1=True):
transport = requests.Session()
if disable_verify:
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
transport.verify = False
if use_tlsv1:
transport.mount('https://', SSLTLSV1Adapter())
return transport
class SSLTLSV1Adapter(HTTPAdapter):
""""Transport adapter that allows us to use TLSv1 which is required
for interacting with the A10 load balancer.
"""
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize, block=block,
ssl_version=ssl.PROTOCOL_TLSv1, **pool_kwargs)
<commit_msg>Change the name of the verify/no-verify param<commit_after># -*- coding: utf-8 -*-
"""
"""
from __future__ import print_function, division
import ssl
import warnings
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.exceptions import InsecureRequestWarning
def get_transport(verify=True, use_tlsv1=True):
transport = requests.Session()
if not verify:
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
transport.verify = False
if use_tlsv1:
transport.mount('https://', SSLTLSV1Adapter())
return transport
class SSLTLSV1Adapter(HTTPAdapter):
""""Transport adapter that allows us to use TLSv1 which is required
for interacting with the A10 load balancer.
"""
def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs):
self.poolmanager = PoolManager(
num_pools=connections, maxsize=maxsize, block=block,
ssl_version=ssl.PROTOCOL_TLSv1, **pool_kwargs)
|
f232e433e6d4ad4f7c54a6d119c27a04743acee8
|
graphene/types/argument.py
|
graphene/types/argument.py
|
from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg.name)
arguments[arg_name] = arg
return arguments
|
from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg_name)
arguments[arg_name] = arg
return arguments
|
Make assertion message use same var as test
|
Make assertion message use same var as test
Ensure that the assertion message correctly shows the value used by
the assertion test.
|
Python
|
mit
|
sjhewitt/graphene,sjhewitt/graphene,Globegitter/graphene,Globegitter/graphene,graphql-python/graphene,graphql-python/graphene
|
from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg.name)
arguments[arg_name] = arg
return arguments
Make assertion message use same var as test
Ensure that the assertion message correctly shows the value used by
the assertion test.
|
from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg_name)
arguments[arg_name] = arg
return arguments
|
<commit_before>from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg.name)
arguments[arg_name] = arg
return arguments
<commit_msg>Make assertion message use same var as test
Ensure that the assertion message correctly shows the value used by
the assertion test.<commit_after>
|
from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg_name)
arguments[arg_name] = arg
return arguments
|
from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg.name)
arguments[arg_name] = arg
return arguments
Make assertion message use same var as test
Ensure that the assertion message correctly shows the value used by
the assertion test.from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg_name)
arguments[arg_name] = arg
return arguments
|
<commit_before>from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg.name)
arguments[arg_name] = arg
return arguments
<commit_msg>Make assertion message use same var as test
Ensure that the assertion message correctly shows the value used by
the assertion test.<commit_after>from collections import OrderedDict
from itertools import chain
from ..utils.orderedtype import OrderedType
from .structures import NonNull
class Argument(OrderedType):
def __init__(self, type, default_value=None, description=None, name=None, required=False, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
if required:
type = NonNull(type)
self.name = name
self.type = type
self.default_value = default_value
self.description = description
def to_arguments(args, extra_args):
from .unmountedtype import UnmountedType
extra_args = sorted(extra_args.items(), key=lambda f: f[1])
iter_arguments = chain(args.items(), extra_args)
arguments = OrderedDict()
for default_name, arg in iter_arguments:
if isinstance(arg, UnmountedType):
arg = arg.Argument()
if not isinstance(arg, Argument):
raise ValueError('Unknown argument "{}".'.format(default_name))
arg_name = default_name or arg.name
assert arg_name not in arguments, 'More than one Argument have same name "{}".'.format(arg_name)
arguments[arg_name] = arg
return arguments
|
d75cd0e1a8894f86877f455abd8f281f0d3a246a
|
flake8diff/vcs/hg.py
|
flake8diff/vcs/hg.py
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def changed_files(self):
"""
Return a list of all changed files.
"""
commits = ['-r {}'.format(c) for c in self.commits]
command = [self.vcs, 'diff', '--stat'] + commits
result = _execute(' '.join(command))
lines = result.strip().split('\n')[:-1]
files = [
line.split('|')[0].strip()
for line in lines
]
return files
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
|
Add changed_files support to mercurial
|
Add changed_files support to mercurial
|
Python
|
mit
|
dealertrack/flake8-diff,miki725/flake8-diff
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
Add changed_files support to mercurial
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def changed_files(self):
"""
Return a list of all changed files.
"""
commits = ['-r {}'.format(c) for c in self.commits]
command = [self.vcs, 'diff', '--stat'] + commits
result = _execute(' '.join(command))
lines = result.strip().split('\n')[:-1]
files = [
line.split('|')[0].strip()
for line in lines
]
return files
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
|
<commit_before>from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
<commit_msg>Add changed_files support to mercurial<commit_after>
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def changed_files(self):
"""
Return a list of all changed files.
"""
commits = ['-r {}'.format(c) for c in self.commits]
command = [self.vcs, 'diff', '--stat'] + commits
result = _execute(' '.join(command))
lines = result.strip().split('\n')[:-1]
files = [
line.split('|')[0].strip()
for line in lines
]
return files
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
|
from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
Add changed_files support to mercurialfrom __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def changed_files(self):
"""
Return a list of all changed files.
"""
commits = ['-r {}'.format(c) for c in self.commits]
command = [self.vcs, 'diff', '--stat'] + commits
result = _execute(' '.join(command))
lines = result.strip().split('\n')[:-1]
files = [
line.split('|')[0].strip()
for line in lines
]
return files
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
|
<commit_before>from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
<commit_msg>Add changed_files support to mercurial<commit_after>from __future__ import unicode_literals, print_function
import logging
import subprocess
from ..utils import _execute
from .base import VCSBase
logger = logging.getLogger(__name__)
class HgVCS(VCSBase):
"""
Mercurial support implementation
"""
name = 'hg'
def get_vcs(self):
"""
Get git binary executable path
"""
vcs = _execute('which hg', strict=True).strip()
self._check_extdiff_extension(vcs)
return vcs
def is_used(self):
"""
Determines if this VCS should be used
TODO: implement
"""
return True
def changed_files(self):
"""
Return a list of all changed files.
"""
commits = ['-r {}'.format(c) for c in self.commits]
command = [self.vcs, 'diff', '--stat'] + commits
result = _execute(' '.join(command))
lines = result.strip().split('\n')[:-1]
files = [
line.split('|')[0].strip()
for line in lines
]
return files
def _check_extdiff_extension(self, vcs):
try:
return _execute('{vcs} extdiff'.format(vcs=vcs), strict=True)
except subprocess.CalledProcessError:
message = (
"Mercurial 'extdiff' extension is disabled.\n"
"Please add the following lines to your ~/.hgrc\n\n"
"[extensions]\n"
"extdiff = \n")
print(message)
raise Exception("Please enable 'extdiff' extension")
|
68af617bf9f69ea6a35fc2d57c35853867f03c88
|
web/ext/contentment.py
|
web/ext/contentment.py
|
# encoding: utf-8
from marrow.package.host import PluginManager
def indent(context, lines, padding='\t'):
return padding + ('\n' + padding).join(lines.split('\n'))
class ContentmentExtension:
needs = ('template', )
def __call__(self, context, app):
def protected_inner(environ, start_response=None):
try:
return app(environ, start_response)
except:
import pudb; pudb.post_mortem()
return protected_inner
def start(self, context):
log = __import__('logging').getLogger(__name__)
log.info("Starting Contentment extension.")
context.namespace.indent = indent
for asset_type in PluginManager('web.component'):
log.info("Found asset type: " + repr(asset_type))
# registry.register(render_asset, Asset)
|
# encoding: utf-8
from marrow.package.host import PluginManager
def indent(context, lines, padding='\t'):
return padding + ('\n' + padding).join(lines.split('\n'))
class ContentmentExtension:
needs = ('template', )
def __call__(self, context, app):
def protected_inner(environ, start_response=None):
try:
return app(environ, start_response)
except:
if __debug__:
try:
import pudb; pudb.post_mortem()
except:
pass
raise
return protected_inner
def start(self, context):
log = __import__('logging').getLogger(__name__)
log.info("Starting Contentment extension.")
context.namespace.indent = indent
for asset_type in PluginManager('web.component'):
log.info("Found asset type: " + repr(asset_type))
# registry.register(render_asset, Asset)
|
Mark pudb post_mortem as debug-only, and optional.
|
Mark pudb post_mortem as debug-only, and optional.
|
Python
|
mit
|
marrow/contentment,marrow/contentment
|
# encoding: utf-8
from marrow.package.host import PluginManager
def indent(context, lines, padding='\t'):
return padding + ('\n' + padding).join(lines.split('\n'))
class ContentmentExtension:
needs = ('template', )
def __call__(self, context, app):
def protected_inner(environ, start_response=None):
try:
return app(environ, start_response)
except:
import pudb; pudb.post_mortem()
return protected_inner
def start(self, context):
log = __import__('logging').getLogger(__name__)
log.info("Starting Contentment extension.")
context.namespace.indent = indent
for asset_type in PluginManager('web.component'):
log.info("Found asset type: " + repr(asset_type))
# registry.register(render_asset, Asset)
Mark pudb post_mortem as debug-only, and optional.
|
# encoding: utf-8
from marrow.package.host import PluginManager
def indent(context, lines, padding='\t'):
return padding + ('\n' + padding).join(lines.split('\n'))
class ContentmentExtension:
needs = ('template', )
def __call__(self, context, app):
def protected_inner(environ, start_response=None):
try:
return app(environ, start_response)
except:
if __debug__:
try:
import pudb; pudb.post_mortem()
except:
pass
raise
return protected_inner
def start(self, context):
log = __import__('logging').getLogger(__name__)
log.info("Starting Contentment extension.")
context.namespace.indent = indent
for asset_type in PluginManager('web.component'):
log.info("Found asset type: " + repr(asset_type))
# registry.register(render_asset, Asset)
|
<commit_before># encoding: utf-8
from marrow.package.host import PluginManager
def indent(context, lines, padding='\t'):
return padding + ('\n' + padding).join(lines.split('\n'))
class ContentmentExtension:
needs = ('template', )
def __call__(self, context, app):
def protected_inner(environ, start_response=None):
try:
return app(environ, start_response)
except:
import pudb; pudb.post_mortem()
return protected_inner
def start(self, context):
log = __import__('logging').getLogger(__name__)
log.info("Starting Contentment extension.")
context.namespace.indent = indent
for asset_type in PluginManager('web.component'):
log.info("Found asset type: " + repr(asset_type))
# registry.register(render_asset, Asset)
<commit_msg>Mark pudb post_mortem as debug-only, and optional.<commit_after>
|
# encoding: utf-8
from marrow.package.host import PluginManager
def indent(context, lines, padding='\t'):
return padding + ('\n' + padding).join(lines.split('\n'))
class ContentmentExtension:
needs = ('template', )
def __call__(self, context, app):
def protected_inner(environ, start_response=None):
try:
return app(environ, start_response)
except:
if __debug__:
try:
import pudb; pudb.post_mortem()
except:
pass
raise
return protected_inner
def start(self, context):
log = __import__('logging').getLogger(__name__)
log.info("Starting Contentment extension.")
context.namespace.indent = indent
for asset_type in PluginManager('web.component'):
log.info("Found asset type: " + repr(asset_type))
# registry.register(render_asset, Asset)
|
# encoding: utf-8
from marrow.package.host import PluginManager
def indent(context, lines, padding='\t'):
return padding + ('\n' + padding).join(lines.split('\n'))
class ContentmentExtension:
needs = ('template', )
def __call__(self, context, app):
def protected_inner(environ, start_response=None):
try:
return app(environ, start_response)
except:
import pudb; pudb.post_mortem()
return protected_inner
def start(self, context):
log = __import__('logging').getLogger(__name__)
log.info("Starting Contentment extension.")
context.namespace.indent = indent
for asset_type in PluginManager('web.component'):
log.info("Found asset type: " + repr(asset_type))
# registry.register(render_asset, Asset)
Mark pudb post_mortem as debug-only, and optional.# encoding: utf-8
from marrow.package.host import PluginManager
def indent(context, lines, padding='\t'):
return padding + ('\n' + padding).join(lines.split('\n'))
class ContentmentExtension:
needs = ('template', )
def __call__(self, context, app):
def protected_inner(environ, start_response=None):
try:
return app(environ, start_response)
except:
if __debug__:
try:
import pudb; pudb.post_mortem()
except:
pass
raise
return protected_inner
def start(self, context):
log = __import__('logging').getLogger(__name__)
log.info("Starting Contentment extension.")
context.namespace.indent = indent
for asset_type in PluginManager('web.component'):
log.info("Found asset type: " + repr(asset_type))
# registry.register(render_asset, Asset)
|
<commit_before># encoding: utf-8
from marrow.package.host import PluginManager
def indent(context, lines, padding='\t'):
return padding + ('\n' + padding).join(lines.split('\n'))
class ContentmentExtension:
needs = ('template', )
def __call__(self, context, app):
def protected_inner(environ, start_response=None):
try:
return app(environ, start_response)
except:
import pudb; pudb.post_mortem()
return protected_inner
def start(self, context):
log = __import__('logging').getLogger(__name__)
log.info("Starting Contentment extension.")
context.namespace.indent = indent
for asset_type in PluginManager('web.component'):
log.info("Found asset type: " + repr(asset_type))
# registry.register(render_asset, Asset)
<commit_msg>Mark pudb post_mortem as debug-only, and optional.<commit_after># encoding: utf-8
from marrow.package.host import PluginManager
def indent(context, lines, padding='\t'):
return padding + ('\n' + padding).join(lines.split('\n'))
class ContentmentExtension:
needs = ('template', )
def __call__(self, context, app):
def protected_inner(environ, start_response=None):
try:
return app(environ, start_response)
except:
if __debug__:
try:
import pudb; pudb.post_mortem()
except:
pass
raise
return protected_inner
def start(self, context):
log = __import__('logging').getLogger(__name__)
log.info("Starting Contentment extension.")
context.namespace.indent = indent
for asset_type in PluginManager('web.component'):
log.info("Found asset type: " + repr(asset_type))
# registry.register(render_asset, Asset)
|
f7f5bc45f6c3e86e9ea77be7a9be16d86465e3b3
|
perfkitbenchmarker/linux_packages/mysqlclient56.py
|
perfkitbenchmarker/linux_packages/mysqlclient56.py
|
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mysqlclient installation and cleanup functions."""
def YumInstall(vm):
"""Installs the mysql package on the VM."""
del vm
raise NotImplementedError('mysqlclient56 not implemented for yum')
def AptInstall(vm):
"""Installs the mysql package on the VM."""
vm.RemoteCommand('sudo add-apt-repository '
"'deb http://archive.ubuntu.com/ubuntu trusty universe'")
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get -y install mysql-client-5.6')
|
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mysqlclient installation and cleanup functions."""
def YumInstall(vm):
"""Installs the mysql package on the VM."""
del vm
raise NotImplementedError('mysqlclient56 not implemented for yum')
def AptInstall(vm):
"""Installs the mysql package on the VM."""
vm.RemoteCommand(
'sudo add-apt-repository '
"'deb https://mirror.ubuntu.serverforge.org trusty universe'")
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get -y install mysql-client-5.6')
|
Install mysqlclient from HTTPs repo.
|
Install mysqlclient from HTTPs repo.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=263786009
|
Python
|
apache-2.0
|
GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker
|
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mysqlclient installation and cleanup functions."""
def YumInstall(vm):
"""Installs the mysql package on the VM."""
del vm
raise NotImplementedError('mysqlclient56 not implemented for yum')
def AptInstall(vm):
"""Installs the mysql package on the VM."""
vm.RemoteCommand('sudo add-apt-repository '
"'deb http://archive.ubuntu.com/ubuntu trusty universe'")
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get -y install mysql-client-5.6')
Install mysqlclient from HTTPs repo.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=263786009
|
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mysqlclient installation and cleanup functions."""
def YumInstall(vm):
"""Installs the mysql package on the VM."""
del vm
raise NotImplementedError('mysqlclient56 not implemented for yum')
def AptInstall(vm):
"""Installs the mysql package on the VM."""
vm.RemoteCommand(
'sudo add-apt-repository '
"'deb https://mirror.ubuntu.serverforge.org trusty universe'")
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get -y install mysql-client-5.6')
|
<commit_before># Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mysqlclient installation and cleanup functions."""
def YumInstall(vm):
"""Installs the mysql package on the VM."""
del vm
raise NotImplementedError('mysqlclient56 not implemented for yum')
def AptInstall(vm):
"""Installs the mysql package on the VM."""
vm.RemoteCommand('sudo add-apt-repository '
"'deb http://archive.ubuntu.com/ubuntu trusty universe'")
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get -y install mysql-client-5.6')
<commit_msg>Install mysqlclient from HTTPs repo.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=263786009<commit_after>
|
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mysqlclient installation and cleanup functions."""
def YumInstall(vm):
"""Installs the mysql package on the VM."""
del vm
raise NotImplementedError('mysqlclient56 not implemented for yum')
def AptInstall(vm):
"""Installs the mysql package on the VM."""
vm.RemoteCommand(
'sudo add-apt-repository '
"'deb https://mirror.ubuntu.serverforge.org trusty universe'")
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get -y install mysql-client-5.6')
|
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mysqlclient installation and cleanup functions."""
def YumInstall(vm):
"""Installs the mysql package on the VM."""
del vm
raise NotImplementedError('mysqlclient56 not implemented for yum')
def AptInstall(vm):
"""Installs the mysql package on the VM."""
vm.RemoteCommand('sudo add-apt-repository '
"'deb http://archive.ubuntu.com/ubuntu trusty universe'")
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get -y install mysql-client-5.6')
Install mysqlclient from HTTPs repo.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=263786009# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mysqlclient installation and cleanup functions."""
def YumInstall(vm):
"""Installs the mysql package on the VM."""
del vm
raise NotImplementedError('mysqlclient56 not implemented for yum')
def AptInstall(vm):
"""Installs the mysql package on the VM."""
vm.RemoteCommand(
'sudo add-apt-repository '
"'deb https://mirror.ubuntu.serverforge.org trusty universe'")
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get -y install mysql-client-5.6')
|
<commit_before># Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mysqlclient installation and cleanup functions."""
def YumInstall(vm):
"""Installs the mysql package on the VM."""
del vm
raise NotImplementedError('mysqlclient56 not implemented for yum')
def AptInstall(vm):
"""Installs the mysql package on the VM."""
vm.RemoteCommand('sudo add-apt-repository '
"'deb http://archive.ubuntu.com/ubuntu trusty universe'")
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get -y install mysql-client-5.6')
<commit_msg>Install mysqlclient from HTTPs repo.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=263786009<commit_after># Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing mysqlclient installation and cleanup functions."""
def YumInstall(vm):
"""Installs the mysql package on the VM."""
del vm
raise NotImplementedError('mysqlclient56 not implemented for yum')
def AptInstall(vm):
"""Installs the mysql package on the VM."""
vm.RemoteCommand(
'sudo add-apt-repository '
"'deb https://mirror.ubuntu.serverforge.org trusty universe'")
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get -y install mysql-client-5.6')
|
6ba0614a3a2c18307f86f5a6c6f119a2b2587697
|
docs/conf.py
|
docs/conf.py
|
# pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = f'{datetime.strftime(_today, "%Y")}, Nick Barrett'
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
|
# pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = '{0}, Nick Barrett'.format(datetime.strftime(_today, '%Y'))
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
|
Use old style string formatting to appease readthedocs aging 3.5 interpreter.
|
Use old style string formatting to appease readthedocs aging 3.5 interpreter.
|
Python
|
mit
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
# pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = f'{datetime.strftime(_today, "%Y")}, Nick Barrett'
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
Use old style string formatting to appease readthedocs aging 3.5 interpreter.
|
# pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = '{0}, Nick Barrett'.format(datetime.strftime(_today, '%Y'))
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
|
<commit_before># pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = f'{datetime.strftime(_today, "%Y")}, Nick Barrett'
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
<commit_msg>Use old style string formatting to appease readthedocs aging 3.5 interpreter.<commit_after>
|
# pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = '{0}, Nick Barrett'.format(datetime.strftime(_today, '%Y'))
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
|
# pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = f'{datetime.strftime(_today, "%Y")}, Nick Barrett'
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
Use old style string formatting to appease readthedocs aging 3.5 interpreter.# pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = '{0}, Nick Barrett'.format(datetime.strftime(_today, '%Y'))
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
|
<commit_before># pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = f'{datetime.strftime(_today, "%Y")}, Nick Barrett'
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
<commit_msg>Use old style string formatting to appease readthedocs aging 3.5 interpreter.<commit_after># pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = '{0}, Nick Barrett'.format(datetime.strftime(_today, '%Y'))
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
|
7228761d42276b1b009ca698aab9a901ee9d4d48
|
src/appleseed.python/studio/helpers.py
|
src/appleseed.python/studio/helpers.py
|
import appleseed as asr
import appleseed.studio as studio
from appleseed.textureconverter import *
import sys
import logging
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
def get_textures(container):
assert isinstance(container, asr.BaseGroup)
textures = list(container.textures())
assemblies = container.assemblies()
for key in assemblies:
textures += get_textures(assemblies[key])
return textures
def convert_all_textures_to_tx(maketx_path):
scene = studio.current_project().get_scene()
textures = get_textures(scene)
tx_converter = TextureConverter(maketx_path)
for texture in textures:
texture_parameters = texture.get_parameters()
texture_path = texture_parameters['filename']
if texture_path.endswith('.tx'):
continue
new_texture_path = tx_converter.convert(texture_path)
if new_texture_path is None:
logging.info('Skipped converting of {}'.format(texture_path))
else:
texture_parameters['filename'] = new_texture_path
texture.set_parameters(texture_parameters)
logging.info('{} converted to {}'.format(texture_path, new_texture_path))
|
import appleseed as asr
import appleseed.studio as studio
from appleseed.textureconverter import *
import sys
import logging
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
def get_textures(container):
assert isinstance(container, asr.BaseGroup)
textures = list(container.textures())
assemblies = container.assemblies()
for key in assemblies:
textures += get_textures(assemblies[key])
return textures
def convert_all_textures_to_tx(maketx_path):
scene = studio.current_project().get_scene()
textures = get_textures(scene)
tx_converter = TextureConverter(maketx_path)
for texture in textures:
texture_parameters = texture.get_parameters()
texture_path = texture_parameters['filename']
if texture.get_model() != 'disk_texture_2d' or texture_path.endswith('.tx'):
logging.debug('Skipped converting {}'.format(texture_path))
continue
new_texture_path = tx_converter.convert(texture_path)
if new_texture_path is None:
logging.info('Skipped converting of {}'.format(texture_path))
else:
texture_parameters['filename'] = new_texture_path
texture.set_parameters(texture_parameters)
logging.info('{} converted to {}'.format(texture_path, new_texture_path))
|
Convert only disk_texture_2d textures with extension other than tx
|
Convert only disk_texture_2d textures with extension other than tx
|
Python
|
mit
|
luisbarrancos/appleseed,est77/appleseed,pjessesco/appleseed,glebmish/appleseed,glebmish/appleseed,luisbarrancos/appleseed,glebmish/appleseed,Biart95/appleseed,dictoon/appleseed,dictoon/appleseed,luisbarrancos/appleseed,Biart95/appleseed,appleseedhq/appleseed,pjessesco/appleseed,Biart95/appleseed,pjessesco/appleseed,Vertexwahn/appleseed,gospodnetic/appleseed,est77/appleseed,pjessesco/appleseed,Vertexwahn/appleseed,Vertexwahn/appleseed,dictoon/appleseed,Biart95/appleseed,appleseedhq/appleseed,glebmish/appleseed,dictoon/appleseed,glebmish/appleseed,est77/appleseed,Biart95/appleseed,Aakash1312/appleseed,aytekaman/appleseed,Aakash1312/appleseed,Aakash1312/appleseed,luisbarrancos/appleseed,aytekaman/appleseed,Aakash1312/appleseed,gospodnetic/appleseed,appleseedhq/appleseed,gospodnetic/appleseed,aytekaman/appleseed,appleseedhq/appleseed,pjessesco/appleseed,est77/appleseed,gospodnetic/appleseed,dictoon/appleseed,appleseedhq/appleseed,luisbarrancos/appleseed,aytekaman/appleseed,Vertexwahn/appleseed,aytekaman/appleseed,est77/appleseed,Vertexwahn/appleseed,gospodnetic/appleseed,Aakash1312/appleseed
|
import appleseed as asr
import appleseed.studio as studio
from appleseed.textureconverter import *
import sys
import logging
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
def get_textures(container):
assert isinstance(container, asr.BaseGroup)
textures = list(container.textures())
assemblies = container.assemblies()
for key in assemblies:
textures += get_textures(assemblies[key])
return textures
def convert_all_textures_to_tx(maketx_path):
scene = studio.current_project().get_scene()
textures = get_textures(scene)
tx_converter = TextureConverter(maketx_path)
for texture in textures:
texture_parameters = texture.get_parameters()
texture_path = texture_parameters['filename']
if texture_path.endswith('.tx'):
continue
new_texture_path = tx_converter.convert(texture_path)
if new_texture_path is None:
logging.info('Skipped converting of {}'.format(texture_path))
else:
texture_parameters['filename'] = new_texture_path
texture.set_parameters(texture_parameters)
logging.info('{} converted to {}'.format(texture_path, new_texture_path))
Convert only disk_texture_2d textures with extension other than tx
|
import appleseed as asr
import appleseed.studio as studio
from appleseed.textureconverter import *
import sys
import logging
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
def get_textures(container):
assert isinstance(container, asr.BaseGroup)
textures = list(container.textures())
assemblies = container.assemblies()
for key in assemblies:
textures += get_textures(assemblies[key])
return textures
def convert_all_textures_to_tx(maketx_path):
scene = studio.current_project().get_scene()
textures = get_textures(scene)
tx_converter = TextureConverter(maketx_path)
for texture in textures:
texture_parameters = texture.get_parameters()
texture_path = texture_parameters['filename']
if texture.get_model() != 'disk_texture_2d' or texture_path.endswith('.tx'):
logging.debug('Skipped converting {}'.format(texture_path))
continue
new_texture_path = tx_converter.convert(texture_path)
if new_texture_path is None:
logging.info('Skipped converting of {}'.format(texture_path))
else:
texture_parameters['filename'] = new_texture_path
texture.set_parameters(texture_parameters)
logging.info('{} converted to {}'.format(texture_path, new_texture_path))
|
<commit_before>import appleseed as asr
import appleseed.studio as studio
from appleseed.textureconverter import *
import sys
import logging
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
def get_textures(container):
assert isinstance(container, asr.BaseGroup)
textures = list(container.textures())
assemblies = container.assemblies()
for key in assemblies:
textures += get_textures(assemblies[key])
return textures
def convert_all_textures_to_tx(maketx_path):
scene = studio.current_project().get_scene()
textures = get_textures(scene)
tx_converter = TextureConverter(maketx_path)
for texture in textures:
texture_parameters = texture.get_parameters()
texture_path = texture_parameters['filename']
if texture_path.endswith('.tx'):
continue
new_texture_path = tx_converter.convert(texture_path)
if new_texture_path is None:
logging.info('Skipped converting of {}'.format(texture_path))
else:
texture_parameters['filename'] = new_texture_path
texture.set_parameters(texture_parameters)
logging.info('{} converted to {}'.format(texture_path, new_texture_path))
<commit_msg>Convert only disk_texture_2d textures with extension other than tx<commit_after>
|
import appleseed as asr
import appleseed.studio as studio
from appleseed.textureconverter import *
import sys
import logging
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
def get_textures(container):
assert isinstance(container, asr.BaseGroup)
textures = list(container.textures())
assemblies = container.assemblies()
for key in assemblies:
textures += get_textures(assemblies[key])
return textures
def convert_all_textures_to_tx(maketx_path):
scene = studio.current_project().get_scene()
textures = get_textures(scene)
tx_converter = TextureConverter(maketx_path)
for texture in textures:
texture_parameters = texture.get_parameters()
texture_path = texture_parameters['filename']
if texture.get_model() != 'disk_texture_2d' or texture_path.endswith('.tx'):
logging.debug('Skipped converting {}'.format(texture_path))
continue
new_texture_path = tx_converter.convert(texture_path)
if new_texture_path is None:
logging.info('Skipped converting of {}'.format(texture_path))
else:
texture_parameters['filename'] = new_texture_path
texture.set_parameters(texture_parameters)
logging.info('{} converted to {}'.format(texture_path, new_texture_path))
|
import appleseed as asr
import appleseed.studio as studio
from appleseed.textureconverter import *
import sys
import logging
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
def get_textures(container):
assert isinstance(container, asr.BaseGroup)
textures = list(container.textures())
assemblies = container.assemblies()
for key in assemblies:
textures += get_textures(assemblies[key])
return textures
def convert_all_textures_to_tx(maketx_path):
scene = studio.current_project().get_scene()
textures = get_textures(scene)
tx_converter = TextureConverter(maketx_path)
for texture in textures:
texture_parameters = texture.get_parameters()
texture_path = texture_parameters['filename']
if texture_path.endswith('.tx'):
continue
new_texture_path = tx_converter.convert(texture_path)
if new_texture_path is None:
logging.info('Skipped converting of {}'.format(texture_path))
else:
texture_parameters['filename'] = new_texture_path
texture.set_parameters(texture_parameters)
logging.info('{} converted to {}'.format(texture_path, new_texture_path))
Convert only disk_texture_2d textures with extension other than tximport appleseed as asr
import appleseed.studio as studio
from appleseed.textureconverter import *
import sys
import logging
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
def get_textures(container):
assert isinstance(container, asr.BaseGroup)
textures = list(container.textures())
assemblies = container.assemblies()
for key in assemblies:
textures += get_textures(assemblies[key])
return textures
def convert_all_textures_to_tx(maketx_path):
scene = studio.current_project().get_scene()
textures = get_textures(scene)
tx_converter = TextureConverter(maketx_path)
for texture in textures:
texture_parameters = texture.get_parameters()
texture_path = texture_parameters['filename']
if texture.get_model() != 'disk_texture_2d' or texture_path.endswith('.tx'):
logging.debug('Skipped converting {}'.format(texture_path))
continue
new_texture_path = tx_converter.convert(texture_path)
if new_texture_path is None:
logging.info('Skipped converting of {}'.format(texture_path))
else:
texture_parameters['filename'] = new_texture_path
texture.set_parameters(texture_parameters)
logging.info('{} converted to {}'.format(texture_path, new_texture_path))
|
<commit_before>import appleseed as asr
import appleseed.studio as studio
from appleseed.textureconverter import *
import sys
import logging
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
def get_textures(container):
assert isinstance(container, asr.BaseGroup)
textures = list(container.textures())
assemblies = container.assemblies()
for key in assemblies:
textures += get_textures(assemblies[key])
return textures
def convert_all_textures_to_tx(maketx_path):
scene = studio.current_project().get_scene()
textures = get_textures(scene)
tx_converter = TextureConverter(maketx_path)
for texture in textures:
texture_parameters = texture.get_parameters()
texture_path = texture_parameters['filename']
if texture_path.endswith('.tx'):
continue
new_texture_path = tx_converter.convert(texture_path)
if new_texture_path is None:
logging.info('Skipped converting of {}'.format(texture_path))
else:
texture_parameters['filename'] = new_texture_path
texture.set_parameters(texture_parameters)
logging.info('{} converted to {}'.format(texture_path, new_texture_path))
<commit_msg>Convert only disk_texture_2d textures with extension other than tx<commit_after>import appleseed as asr
import appleseed.studio as studio
from appleseed.textureconverter import *
import sys
import logging
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
def get_textures(container):
assert isinstance(container, asr.BaseGroup)
textures = list(container.textures())
assemblies = container.assemblies()
for key in assemblies:
textures += get_textures(assemblies[key])
return textures
def convert_all_textures_to_tx(maketx_path):
scene = studio.current_project().get_scene()
textures = get_textures(scene)
tx_converter = TextureConverter(maketx_path)
for texture in textures:
texture_parameters = texture.get_parameters()
texture_path = texture_parameters['filename']
if texture.get_model() != 'disk_texture_2d' or texture_path.endswith('.tx'):
logging.debug('Skipped converting {}'.format(texture_path))
continue
new_texture_path = tx_converter.convert(texture_path)
if new_texture_path is None:
logging.info('Skipped converting of {}'.format(texture_path))
else:
texture_parameters['filename'] = new_texture_path
texture.set_parameters(texture_parameters)
logging.info('{} converted to {}'.format(texture_path, new_texture_path))
|
f769360dbb6da83fc8bf9c244c04b3d2f7c49ffa
|
lab/runnerctl.py
|
lab/runnerctl.py
|
"""
pytest runner control plugin
"""
import pytest
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
|
"""
pytest runner control plugin
"""
import pytest
import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
|
Move some fixtures into better places
|
Move some fixtures into better places
Move datadir into the sipsecmon plugin and testname into
lab.runnerctl.
|
Python
|
mpl-2.0
|
sangoma/pytestlab
|
"""
pytest runner control plugin
"""
import pytest
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
Move some fixtures into better places
Move datadir into the sipsecmon plugin and testname into
lab.runnerctl.
|
"""
pytest runner control plugin
"""
import pytest
import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
|
<commit_before>"""
pytest runner control plugin
"""
import pytest
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
<commit_msg>Move some fixtures into better places
Move datadir into the sipsecmon plugin and testname into
lab.runnerctl.<commit_after>
|
"""
pytest runner control plugin
"""
import pytest
import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
|
"""
pytest runner control plugin
"""
import pytest
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
Move some fixtures into better places
Move datadir into the sipsecmon plugin and testname into
lab.runnerctl."""
pytest runner control plugin
"""
import pytest
import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
|
<commit_before>"""
pytest runner control plugin
"""
import pytest
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
<commit_msg>Move some fixtures into better places
Move datadir into the sipsecmon plugin and testname into
lab.runnerctl.<commit_after>"""
pytest runner control plugin
"""
import pytest
import string
def pytest_runtest_makereport(item, call):
if 'setup_test' in item.keywords and call.excinfo:
if not call.excinfo.errisinstance(pytest.skip.Exception):
pytest.halt('A setup test has failed, aborting...')
class Halt(object):
def __init__(self):
self.msg = None
def __call__(self, msg):
self.msg = msg
def pytest_namespace():
return {'halt': Halt()}
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
yield
if pytest.halt.msg:
item.session.shouldstop = pytest.halt.msg
@pytest.fixture(scope='class')
def testname(request):
"""Pytest test node name with all unfriendly characters transformed
into underscores. The lifetime is class scoped since this name is
often used to provision remote sw profiles which live for the entirety
of a test suite.
"""
return request.node.name.translate(
string.maketrans('\[', '__')).strip(']')
|
9173a91ed6fc234c4a7b9dbf1d2e8f853d977a86
|
mail_restrict_follower_selection/__manifest__.py
|
mail_restrict_follower_selection/__manifest__.py
|
# Copyright (C) 2015 Therp BV <http://therp.nl>
# Copyright (C) 2017 Komit <http://www.komit-consulting.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Restrict follower selection",
"version": "13.0.1.0.2",
"author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Social Network",
"summary": "Define a domain from which followers can be selected",
"depends": ["mail"],
"data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"],
"installable": True,
}
|
# Copyright (C) 2015 Therp BV <http://therp.nl>
# Copyright (C) 2017 Komit <http://www.komit-consulting.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Restrict follower selection",
"version": "13.0.1.0.2",
"author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Social Network",
"website": "https://github.com/OCA/social",
"summary": "Define a domain from which followers can be selected",
"depends": ["mail"],
"data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"],
"installable": True,
}
|
Apply pre-commit changes: Resolve conflicts
|
[IMP] Apply pre-commit changes: Resolve conflicts
|
Python
|
agpl-3.0
|
OCA/social,OCA/social,OCA/social
|
# Copyright (C) 2015 Therp BV <http://therp.nl>
# Copyright (C) 2017 Komit <http://www.komit-consulting.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Restrict follower selection",
"version": "13.0.1.0.2",
"author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Social Network",
"summary": "Define a domain from which followers can be selected",
"depends": ["mail"],
"data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"],
"installable": True,
}
[IMP] Apply pre-commit changes: Resolve conflicts
|
# Copyright (C) 2015 Therp BV <http://therp.nl>
# Copyright (C) 2017 Komit <http://www.komit-consulting.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Restrict follower selection",
"version": "13.0.1.0.2",
"author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Social Network",
"website": "https://github.com/OCA/social",
"summary": "Define a domain from which followers can be selected",
"depends": ["mail"],
"data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"],
"installable": True,
}
|
<commit_before># Copyright (C) 2015 Therp BV <http://therp.nl>
# Copyright (C) 2017 Komit <http://www.komit-consulting.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Restrict follower selection",
"version": "13.0.1.0.2",
"author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Social Network",
"summary": "Define a domain from which followers can be selected",
"depends": ["mail"],
"data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"],
"installable": True,
}
<commit_msg>[IMP] Apply pre-commit changes: Resolve conflicts<commit_after>
|
# Copyright (C) 2015 Therp BV <http://therp.nl>
# Copyright (C) 2017 Komit <http://www.komit-consulting.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Restrict follower selection",
"version": "13.0.1.0.2",
"author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Social Network",
"website": "https://github.com/OCA/social",
"summary": "Define a domain from which followers can be selected",
"depends": ["mail"],
"data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"],
"installable": True,
}
|
# Copyright (C) 2015 Therp BV <http://therp.nl>
# Copyright (C) 2017 Komit <http://www.komit-consulting.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Restrict follower selection",
"version": "13.0.1.0.2",
"author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Social Network",
"summary": "Define a domain from which followers can be selected",
"depends": ["mail"],
"data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"],
"installable": True,
}
[IMP] Apply pre-commit changes: Resolve conflicts# Copyright (C) 2015 Therp BV <http://therp.nl>
# Copyright (C) 2017 Komit <http://www.komit-consulting.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Restrict follower selection",
"version": "13.0.1.0.2",
"author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Social Network",
"website": "https://github.com/OCA/social",
"summary": "Define a domain from which followers can be selected",
"depends": ["mail"],
"data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"],
"installable": True,
}
|
<commit_before># Copyright (C) 2015 Therp BV <http://therp.nl>
# Copyright (C) 2017 Komit <http://www.komit-consulting.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Restrict follower selection",
"version": "13.0.1.0.2",
"author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Social Network",
"summary": "Define a domain from which followers can be selected",
"depends": ["mail"],
"data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"],
"installable": True,
}
<commit_msg>[IMP] Apply pre-commit changes: Resolve conflicts<commit_after># Copyright (C) 2015 Therp BV <http://therp.nl>
# Copyright (C) 2017 Komit <http://www.komit-consulting.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Restrict follower selection",
"version": "13.0.1.0.2",
"author": "Therp BV,Creu Blanca,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Social Network",
"website": "https://github.com/OCA/social",
"summary": "Define a domain from which followers can be selected",
"depends": ["mail"],
"data": ["data/ir_config_parameter.xml", "data/ir_actions.xml"],
"installable": True,
}
|
328e65e2c134363a1407c42a44ae9043f701874e
|
tests/commands/load/test_load_cnv_report_cmd.py
|
tests/commands/load/test_load_cnv_report_cmd.py
|
# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],
)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],
)
assert 'Path "invalid-path" does not exist.' in result.output
assert result.exit_code == 2
|
# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],)
assert "Path 'invalid-path' does not exist." in result.output
assert result.exit_code == 2
|
Update test to avoid pipeline fail
|
Update test to avoid pipeline fail
|
Python
|
bsd-3-clause
|
Clinical-Genomics/scout,Clinical-Genomics/scout,Clinical-Genomics/scout
|
# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],
)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],
)
assert 'Path "invalid-path" does not exist.' in result.output
assert result.exit_code == 2
Update test to avoid pipeline fail
|
# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],)
assert "Path 'invalid-path' does not exist." in result.output
assert result.exit_code == 2
|
<commit_before># -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],
)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],
)
assert 'Path "invalid-path" does not exist.' in result.output
assert result.exit_code == 2
<commit_msg>Update test to avoid pipeline fail<commit_after>
|
# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],)
assert "Path 'invalid-path' does not exist." in result.output
assert result.exit_code == 2
|
# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],
)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],
)
assert 'Path "invalid-path" does not exist.' in result.output
assert result.exit_code == 2
Update test to avoid pipeline fail# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],)
assert "Path 'invalid-path' does not exist." in result.output
assert result.exit_code == 2
|
<commit_before># -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],
)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],
)
assert 'Path "invalid-path" does not exist.' in result.output
assert result.exit_code == 2
<commit_msg>Update test to avoid pipeline fail<commit_after># -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],)
assert "Path 'invalid-path' does not exist." in result.output
assert result.exit_code == 2
|
8e4833c50b46d8b2f9604fcddcbd5258565ce185
|
examples/multiple_devices_with_watcher.py
|
examples/multiple_devices_with_watcher.py
|
"""
An example showing how to use the Watcher to track multiple devices in one
process.
"""
from ninja.api import NinjaAPI, Watcher
from ninja.devices import TemperatureSensor
from datetime import datetime
# Set up the NinjaAPI and Device wrappers:
# Access token from https://a.ninja.is/you#apiTab
api = NinjaAPI('<YOUR_ACCESS_TOKEN>')
# Device GUID can be found using https://api.ninja.is/rest/v0/devices/?access_token=<YOUR_ACCESS_TOKEN>
device1 = TemperatureSensor(api, '<DEVICE_1_GUID>')
device2 = TemperatureSensor(api, '<DEVICE_2_GUID>')
# The watcher will provide a single loop for polling all of the devices.
watcher = Watcher()
# Output the temperature to stdio.
def printTempCelsius(inst, data):
date = inst.last_read.isoformat()
print date + ' - ' + inst.guid + ': ' + str(data.c)
# Bind the output to the heartbeat event.
device1.onHeartbeat(printTempCelsius)
device2.onHeartbeat(printTempCelsius)
# Watch both devices in the same loop, triggering their heartbeats ever
# 10 seconds.
watcher.watch(device1)
watcher.watch(device2)
watcher.start(period=10)
|
"""
An example showing how to use the Watcher to track multiple devices in one
process.
"""
from ninja.api import NinjaAPI, Watcher
from ninja.devices import TemperatureSensor, HumiditySensor
from datetime import datetime
# Set up the NinjaAPI and Device wrappers:
# Access token from https://a.ninja.is/you#apiTab
api = NinjaAPI('<YOUR_ACCESS_TOKEN>')
# Device GUID can be found using https://api.ninja.is/rest/v0/devices/?access_token=<YOUR_ACCESS_TOKEN>
device1 = TemperatureSensor(api, '<DEVICE_1_GUID>')
device2 = HumiditySensor(api, '<DEVICE_2_GUID>')
# The watcher will provide a single loop for polling all of the devices.
watcher = Watcher()
# Output the temperature to stdio.
def printTempCelsius(inst, temperature):
date = inst.last_read.isoformat()
print '{date} - {id}: {temperature} C'.format(
date=date,
id=inst.guid,
temperature=temperature.c,
)
def printRelHumidity(inst, humidity):
date = inst.last_read.isoformat()
print '{date} - {id}: {humidity}%'.format(
date=date,
id=inst.guid,
humidity=humidity,
)
# Bind the output to the heartbeat event.
device1.onHeartbeat(printTempCelsius)
device2.onHeartbeat(printRelHumidity)
# Watch both devices in the same loop, triggering their heartbeats ever
# 10 seconds.
watcher.watch(device1)
watcher.watch(device2)
watcher.start(period=10)
|
Update Watcher example to use HumiditySensor instead of second TemperatureSensor
|
Update Watcher example to use HumiditySensor instead of second TemperatureSensor
|
Python
|
unlicense
|
alecperkins/py-ninja
|
"""
An example showing how to use the Watcher to track multiple devices in one
process.
"""
from ninja.api import NinjaAPI, Watcher
from ninja.devices import TemperatureSensor
from datetime import datetime
# Set up the NinjaAPI and Device wrappers:
# Access token from https://a.ninja.is/you#apiTab
api = NinjaAPI('<YOUR_ACCESS_TOKEN>')
# Device GUID can be found using https://api.ninja.is/rest/v0/devices/?access_token=<YOUR_ACCESS_TOKEN>
device1 = TemperatureSensor(api, '<DEVICE_1_GUID>')
device2 = TemperatureSensor(api, '<DEVICE_2_GUID>')
# The watcher will provide a single loop for polling all of the devices.
watcher = Watcher()
# Output the temperature to stdio.
def printTempCelsius(inst, data):
date = inst.last_read.isoformat()
print date + ' - ' + inst.guid + ': ' + str(data.c)
# Bind the output to the heartbeat event.
device1.onHeartbeat(printTempCelsius)
device2.onHeartbeat(printTempCelsius)
# Watch both devices in the same loop, triggering their heartbeats ever
# 10 seconds.
watcher.watch(device1)
watcher.watch(device2)
watcher.start(period=10)
Update Watcher example to use HumiditySensor instead of second TemperatureSensor
|
"""
An example showing how to use the Watcher to track multiple devices in one
process.
"""
from ninja.api import NinjaAPI, Watcher
from ninja.devices import TemperatureSensor, HumiditySensor
from datetime import datetime
# Set up the NinjaAPI and Device wrappers:
# Access token from https://a.ninja.is/you#apiTab
api = NinjaAPI('<YOUR_ACCESS_TOKEN>')
# Device GUID can be found using https://api.ninja.is/rest/v0/devices/?access_token=<YOUR_ACCESS_TOKEN>
device1 = TemperatureSensor(api, '<DEVICE_1_GUID>')
device2 = HumiditySensor(api, '<DEVICE_2_GUID>')
# The watcher will provide a single loop for polling all of the devices.
watcher = Watcher()
# Output the temperature to stdio.
def printTempCelsius(inst, temperature):
date = inst.last_read.isoformat()
print '{date} - {id}: {temperature} C'.format(
date=date,
id=inst.guid,
temperature=temperature.c,
)
def printRelHumidity(inst, humidity):
date = inst.last_read.isoformat()
print '{date} - {id}: {humidity}%'.format(
date=date,
id=inst.guid,
humidity=humidity,
)
# Bind the output to the heartbeat event.
device1.onHeartbeat(printTempCelsius)
device2.onHeartbeat(printRelHumidity)
# Watch both devices in the same loop, triggering their heartbeats ever
# 10 seconds.
watcher.watch(device1)
watcher.watch(device2)
watcher.start(period=10)
|
<commit_before>"""
An example showing how to use the Watcher to track multiple devices in one
process.
"""
from ninja.api import NinjaAPI, Watcher
from ninja.devices import TemperatureSensor
from datetime import datetime
# Set up the NinjaAPI and Device wrappers:
# Access token from https://a.ninja.is/you#apiTab
api = NinjaAPI('<YOUR_ACCESS_TOKEN>')
# Device GUID can be found using https://api.ninja.is/rest/v0/devices/?access_token=<YOUR_ACCESS_TOKEN>
device1 = TemperatureSensor(api, '<DEVICE_1_GUID>')
device2 = TemperatureSensor(api, '<DEVICE_2_GUID>')
# The watcher will provide a single loop for polling all of the devices.
watcher = Watcher()
# Output the temperature to stdio.
def printTempCelsius(inst, data):
date = inst.last_read.isoformat()
print date + ' - ' + inst.guid + ': ' + str(data.c)
# Bind the output to the heartbeat event.
device1.onHeartbeat(printTempCelsius)
device2.onHeartbeat(printTempCelsius)
# Watch both devices in the same loop, triggering their heartbeats ever
# 10 seconds.
watcher.watch(device1)
watcher.watch(device2)
watcher.start(period=10)
<commit_msg>Update Watcher example to use HumiditySensor instead of second TemperatureSensor<commit_after>
|
"""
An example showing how to use the Watcher to track multiple devices in one
process.
"""
from ninja.api import NinjaAPI, Watcher
from ninja.devices import TemperatureSensor, HumiditySensor
from datetime import datetime
# Set up the NinjaAPI and Device wrappers:
# Access token from https://a.ninja.is/you#apiTab
api = NinjaAPI('<YOUR_ACCESS_TOKEN>')
# Device GUID can be found using https://api.ninja.is/rest/v0/devices/?access_token=<YOUR_ACCESS_TOKEN>
device1 = TemperatureSensor(api, '<DEVICE_1_GUID>')
device2 = HumiditySensor(api, '<DEVICE_2_GUID>')
# The watcher will provide a single loop for polling all of the devices.
watcher = Watcher()
# Output the temperature to stdio.
def printTempCelsius(inst, temperature):
date = inst.last_read.isoformat()
print '{date} - {id}: {temperature} C'.format(
date=date,
id=inst.guid,
temperature=temperature.c,
)
def printRelHumidity(inst, humidity):
date = inst.last_read.isoformat()
print '{date} - {id}: {humidity}%'.format(
date=date,
id=inst.guid,
humidity=humidity,
)
# Bind the output to the heartbeat event.
device1.onHeartbeat(printTempCelsius)
device2.onHeartbeat(printRelHumidity)
# Watch both devices in the same loop, triggering their heartbeats ever
# 10 seconds.
watcher.watch(device1)
watcher.watch(device2)
watcher.start(period=10)
|
"""
An example showing how to use the Watcher to track multiple devices in one
process.
"""
from ninja.api import NinjaAPI, Watcher
from ninja.devices import TemperatureSensor
from datetime import datetime
# Set up the NinjaAPI and Device wrappers:
# Access token from https://a.ninja.is/you#apiTab
api = NinjaAPI('<YOUR_ACCESS_TOKEN>')
# Device GUID can be found using https://api.ninja.is/rest/v0/devices/?access_token=<YOUR_ACCESS_TOKEN>
device1 = TemperatureSensor(api, '<DEVICE_1_GUID>')
device2 = TemperatureSensor(api, '<DEVICE_2_GUID>')
# The watcher will provide a single loop for polling all of the devices.
watcher = Watcher()
# Output the temperature to stdio.
def printTempCelsius(inst, data):
date = inst.last_read.isoformat()
print date + ' - ' + inst.guid + ': ' + str(data.c)
# Bind the output to the heartbeat event.
device1.onHeartbeat(printTempCelsius)
device2.onHeartbeat(printTempCelsius)
# Watch both devices in the same loop, triggering their heartbeats ever
# 10 seconds.
watcher.watch(device1)
watcher.watch(device2)
watcher.start(period=10)
Update Watcher example to use HumiditySensor instead of second TemperatureSensor"""
An example showing how to use the Watcher to track multiple devices in one
process.
"""
from ninja.api import NinjaAPI, Watcher
from ninja.devices import TemperatureSensor, HumiditySensor
from datetime import datetime
# Set up the NinjaAPI and Device wrappers:
# Access token from https://a.ninja.is/you#apiTab
api = NinjaAPI('<YOUR_ACCESS_TOKEN>')
# Device GUID can be found using https://api.ninja.is/rest/v0/devices/?access_token=<YOUR_ACCESS_TOKEN>
device1 = TemperatureSensor(api, '<DEVICE_1_GUID>')
device2 = HumiditySensor(api, '<DEVICE_2_GUID>')
# The watcher will provide a single loop for polling all of the devices.
watcher = Watcher()
# Output the temperature to stdio.
def printTempCelsius(inst, temperature):
date = inst.last_read.isoformat()
print '{date} - {id}: {temperature} C'.format(
date=date,
id=inst.guid,
temperature=temperature.c,
)
def printRelHumidity(inst, humidity):
date = inst.last_read.isoformat()
print '{date} - {id}: {humidity}%'.format(
date=date,
id=inst.guid,
humidity=humidity,
)
# Bind the output to the heartbeat event.
device1.onHeartbeat(printTempCelsius)
device2.onHeartbeat(printRelHumidity)
# Watch both devices in the same loop, triggering their heartbeats ever
# 10 seconds.
watcher.watch(device1)
watcher.watch(device2)
watcher.start(period=10)
|
<commit_before>"""
An example showing how to use the Watcher to track multiple devices in one
process.
"""
from ninja.api import NinjaAPI, Watcher
from ninja.devices import TemperatureSensor
from datetime import datetime
# Set up the NinjaAPI and Device wrappers:
# Access token from https://a.ninja.is/you#apiTab
api = NinjaAPI('<YOUR_ACCESS_TOKEN>')
# Device GUID can be found using https://api.ninja.is/rest/v0/devices/?access_token=<YOUR_ACCESS_TOKEN>
device1 = TemperatureSensor(api, '<DEVICE_1_GUID>')
device2 = TemperatureSensor(api, '<DEVICE_2_GUID>')
# The watcher will provide a single loop for polling all of the devices.
watcher = Watcher()
# Output the temperature to stdio.
def printTempCelsius(inst, data):
date = inst.last_read.isoformat()
print date + ' - ' + inst.guid + ': ' + str(data.c)
# Bind the output to the heartbeat event.
device1.onHeartbeat(printTempCelsius)
device2.onHeartbeat(printTempCelsius)
# Watch both devices in the same loop, triggering their heartbeats ever
# 10 seconds.
watcher.watch(device1)
watcher.watch(device2)
watcher.start(period=10)
<commit_msg>Update Watcher example to use HumiditySensor instead of second TemperatureSensor<commit_after>"""
An example showing how to use the Watcher to track multiple devices in one
process.
"""
from ninja.api import NinjaAPI, Watcher
from ninja.devices import TemperatureSensor, HumiditySensor
from datetime import datetime
# Set up the NinjaAPI and Device wrappers:
# Access token from https://a.ninja.is/you#apiTab
api = NinjaAPI('<YOUR_ACCESS_TOKEN>')
# Device GUID can be found using https://api.ninja.is/rest/v0/devices/?access_token=<YOUR_ACCESS_TOKEN>
device1 = TemperatureSensor(api, '<DEVICE_1_GUID>')
device2 = HumiditySensor(api, '<DEVICE_2_GUID>')
# The watcher will provide a single loop for polling all of the devices.
watcher = Watcher()
# Output the temperature to stdio.
def printTempCelsius(inst, temperature):
date = inst.last_read.isoformat()
print '{date} - {id}: {temperature} C'.format(
date=date,
id=inst.guid,
temperature=temperature.c,
)
def printRelHumidity(inst, humidity):
date = inst.last_read.isoformat()
print '{date} - {id}: {humidity}%'.format(
date=date,
id=inst.guid,
humidity=humidity,
)
# Bind the output to the heartbeat event.
device1.onHeartbeat(printTempCelsius)
device2.onHeartbeat(printRelHumidity)
# Watch both devices in the same loop, triggering their heartbeats ever
# 10 seconds.
watcher.watch(device1)
watcher.watch(device2)
watcher.start(period=10)
|
acbdb030430df2af7d0e884ea7a2851e99b42944
|
barbican/model/migration/alembic_migrations/versions/d2780d5aa510_change_url_length.py
|
barbican/model/migration/alembic_migrations/versions/d2780d5aa510_change_url_length.py
|
"""change_url_length
Revision ID: d2780d5aa510
Revises: dce488646127
Create Date: 2016-03-11 09:39:32.593231
"""
# revision identifiers, used by Alembic.
revision = 'd2780d5aa510'
down_revision = 'dce488646127'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
'ContainerConsumerMetadatum',
'URL',
type_=sa.String(length=255)
)
|
"""change_url_length
Revision ID: d2780d5aa510
Revises: dce488646127
Create Date: 2016-03-11 09:39:32.593231
"""
# revision identifiers, used by Alembic.
revision = 'd2780d5aa510'
down_revision = 'dce488646127'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
'container_consumer_metadata',
'URL',
type_=sa.String(length=255)
)
|
Change Table name to correct name
|
Change Table name to correct name
The table name is currently wrong in this version and needs to
be changed to the correct name. It is preventing the database
migration script from running correctly.
Closes-Bug: #1562091
Change-Id: I9be88a4385ab58b37be5842aaaefd8353a2f6f76
(cherry picked from commit 2ecc676dd2521c727eb1d720bac6c2533f8337d9)
|
Python
|
apache-2.0
|
openstack/barbican,openstack/barbican
|
"""change_url_length
Revision ID: d2780d5aa510
Revises: dce488646127
Create Date: 2016-03-11 09:39:32.593231
"""
# revision identifiers, used by Alembic.
revision = 'd2780d5aa510'
down_revision = 'dce488646127'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
'ContainerConsumerMetadatum',
'URL',
type_=sa.String(length=255)
)
Change Table name to correct name
The table name is currently wrong in this version and needs to
be changed to the correct name. It is preventing the database
migration script from running correctly.
Closes-Bug: #1562091
Change-Id: I9be88a4385ab58b37be5842aaaefd8353a2f6f76
(cherry picked from commit 2ecc676dd2521c727eb1d720bac6c2533f8337d9)
|
"""change_url_length
Revision ID: d2780d5aa510
Revises: dce488646127
Create Date: 2016-03-11 09:39:32.593231
"""
# revision identifiers, used by Alembic.
revision = 'd2780d5aa510'
down_revision = 'dce488646127'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
'container_consumer_metadata',
'URL',
type_=sa.String(length=255)
)
|
<commit_before>"""change_url_length
Revision ID: d2780d5aa510
Revises: dce488646127
Create Date: 2016-03-11 09:39:32.593231
"""
# revision identifiers, used by Alembic.
revision = 'd2780d5aa510'
down_revision = 'dce488646127'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
'ContainerConsumerMetadatum',
'URL',
type_=sa.String(length=255)
)
<commit_msg>Change Table name to correct name
The table name is currently wrong in this version and needs to
be changed to the correct name. It is preventing the database
migration script from running correctly.
Closes-Bug: #1562091
Change-Id: I9be88a4385ab58b37be5842aaaefd8353a2f6f76
(cherry picked from commit 2ecc676dd2521c727eb1d720bac6c2533f8337d9)<commit_after>
|
"""change_url_length
Revision ID: d2780d5aa510
Revises: dce488646127
Create Date: 2016-03-11 09:39:32.593231
"""
# revision identifiers, used by Alembic.
revision = 'd2780d5aa510'
down_revision = 'dce488646127'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
'container_consumer_metadata',
'URL',
type_=sa.String(length=255)
)
|
"""change_url_length
Revision ID: d2780d5aa510
Revises: dce488646127
Create Date: 2016-03-11 09:39:32.593231
"""
# revision identifiers, used by Alembic.
revision = 'd2780d5aa510'
down_revision = 'dce488646127'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
'ContainerConsumerMetadatum',
'URL',
type_=sa.String(length=255)
)
Change Table name to correct name
The table name is currently wrong in this version and needs to
be changed to the correct name. It is preventing the database
migration script from running correctly.
Closes-Bug: #1562091
Change-Id: I9be88a4385ab58b37be5842aaaefd8353a2f6f76
(cherry picked from commit 2ecc676dd2521c727eb1d720bac6c2533f8337d9)"""change_url_length
Revision ID: d2780d5aa510
Revises: dce488646127
Create Date: 2016-03-11 09:39:32.593231
"""
# revision identifiers, used by Alembic.
revision = 'd2780d5aa510'
down_revision = 'dce488646127'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
'container_consumer_metadata',
'URL',
type_=sa.String(length=255)
)
|
<commit_before>"""change_url_length
Revision ID: d2780d5aa510
Revises: dce488646127
Create Date: 2016-03-11 09:39:32.593231
"""
# revision identifiers, used by Alembic.
revision = 'd2780d5aa510'
down_revision = 'dce488646127'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
'ContainerConsumerMetadatum',
'URL',
type_=sa.String(length=255)
)
<commit_msg>Change Table name to correct name
The table name is currently wrong in this version and needs to
be changed to the correct name. It is preventing the database
migration script from running correctly.
Closes-Bug: #1562091
Change-Id: I9be88a4385ab58b37be5842aaaefd8353a2f6f76
(cherry picked from commit 2ecc676dd2521c727eb1d720bac6c2533f8337d9)<commit_after>"""change_url_length
Revision ID: d2780d5aa510
Revises: dce488646127
Create Date: 2016-03-11 09:39:32.593231
"""
# revision identifiers, used by Alembic.
revision = 'd2780d5aa510'
down_revision = 'dce488646127'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column(
'container_consumer_metadata',
'URL',
type_=sa.String(length=255)
)
|
ff9a8cb1f68785cc16c99fe26dd96e9fa01c325e
|
src/hunter/const.py
|
src/hunter/const.py
|
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set(site.getsitepackages())
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
Add checks in case site.py is broken (eg: virtualenv).
|
Add checks in case site.py is broken (eg: virtualenv).
|
Python
|
bsd-2-clause
|
ionelmc/python-hunter
|
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set(site.getsitepackages())
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
Add checks in case site.py is broken (eg: virtualenv).
|
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
<commit_before>import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set(site.getsitepackages())
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
<commit_msg>Add checks in case site.py is broken (eg: virtualenv).<commit_after>
|
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set(site.getsitepackages())
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
Add checks in case site.py is broken (eg: virtualenv).import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
<commit_before>import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set(site.getsitepackages())
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
<commit_msg>Add checks in case site.py is broken (eg: virtualenv).<commit_after>import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
338435f7b1a10f749266138a0fbe610fa065a422
|
clients.py
|
clients.py
|
# -*- coding: utf-8 -*-
from helpers import HTTPEventHandler
import looping
import buffer_event
class StatusClient(HTTPEventHandler):
def __init__(self, server, sock, address, request_parser):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
204, b'No content')
class StreamClient(HTTPEventHandler):
def __init__(self, server, source, sock, address, request_parser, content_type):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
200, b'OK', {b'Content-Length': None,
b'Content-Type': content_type})
self.source = source
def add_packet(self, packet):
self.output_buffer.add_buffer(packet)
def close(self):
self.server.remove_client(self)
HTTPEventHandler.close(self)
def flush_if_ready(self):
# print 'buffer is ready: %s' % (self.output_buffer.ready)
if self.output_buffer.ready:
self.output_buffer.flush()
def handle_event(self, eventmask):
if eventmask & looping.POLLOUT:
# print 'lol I can write, %d items in buffer' % (len(self.output_buffer.buffer_queue))
self.output_buffer.flush()
else:
print 'Unexpected eventmask %s' % (eventmask)
|
# -*- coding: utf-8 -*-
from helpers import HTTPEventHandler
import looping
import buffer_event
class StatusClient(HTTPEventHandler):
def __init__(self, server, sock, address, request_parser):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
204, b'No content')
class StreamClient(HTTPEventHandler):
def __init__(self, server, source, sock, address, request_parser, content_type):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
200, b'OK', {b'Content-Length': None,
b'Content-Type': content_type})
self.source = source
def add_packet(self, packet):
self.output_buffer.add_buffer(packet)
def close(self):
self.server.remove_client(self)
HTTPEventHandler.close(self)
def flush_if_ready(self):
if self.output_buffer.ready:
self.output_buffer.flush()
def handle_event(self, eventmask):
if eventmask & looping.POLLOUT:
self.output_buffer.flush()
else:
print 'Unexpected eventmask %s' % (eventmask)
|
Remove commented out debug print lines
|
Remove commented out debug print lines
|
Python
|
agpl-3.0
|
noirbee/savate,noirbee/savate
|
# -*- coding: utf-8 -*-
from helpers import HTTPEventHandler
import looping
import buffer_event
class StatusClient(HTTPEventHandler):
def __init__(self, server, sock, address, request_parser):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
204, b'No content')
class StreamClient(HTTPEventHandler):
def __init__(self, server, source, sock, address, request_parser, content_type):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
200, b'OK', {b'Content-Length': None,
b'Content-Type': content_type})
self.source = source
def add_packet(self, packet):
self.output_buffer.add_buffer(packet)
def close(self):
self.server.remove_client(self)
HTTPEventHandler.close(self)
def flush_if_ready(self):
# print 'buffer is ready: %s' % (self.output_buffer.ready)
if self.output_buffer.ready:
self.output_buffer.flush()
def handle_event(self, eventmask):
if eventmask & looping.POLLOUT:
# print 'lol I can write, %d items in buffer' % (len(self.output_buffer.buffer_queue))
self.output_buffer.flush()
else:
print 'Unexpected eventmask %s' % (eventmask)
Remove commented out debug print lines
|
# -*- coding: utf-8 -*-
from helpers import HTTPEventHandler
import looping
import buffer_event
class StatusClient(HTTPEventHandler):
def __init__(self, server, sock, address, request_parser):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
204, b'No content')
class StreamClient(HTTPEventHandler):
def __init__(self, server, source, sock, address, request_parser, content_type):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
200, b'OK', {b'Content-Length': None,
b'Content-Type': content_type})
self.source = source
def add_packet(self, packet):
self.output_buffer.add_buffer(packet)
def close(self):
self.server.remove_client(self)
HTTPEventHandler.close(self)
def flush_if_ready(self):
if self.output_buffer.ready:
self.output_buffer.flush()
def handle_event(self, eventmask):
if eventmask & looping.POLLOUT:
self.output_buffer.flush()
else:
print 'Unexpected eventmask %s' % (eventmask)
|
<commit_before># -*- coding: utf-8 -*-
from helpers import HTTPEventHandler
import looping
import buffer_event
class StatusClient(HTTPEventHandler):
def __init__(self, server, sock, address, request_parser):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
204, b'No content')
class StreamClient(HTTPEventHandler):
def __init__(self, server, source, sock, address, request_parser, content_type):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
200, b'OK', {b'Content-Length': None,
b'Content-Type': content_type})
self.source = source
def add_packet(self, packet):
self.output_buffer.add_buffer(packet)
def close(self):
self.server.remove_client(self)
HTTPEventHandler.close(self)
def flush_if_ready(self):
# print 'buffer is ready: %s' % (self.output_buffer.ready)
if self.output_buffer.ready:
self.output_buffer.flush()
def handle_event(self, eventmask):
if eventmask & looping.POLLOUT:
# print 'lol I can write, %d items in buffer' % (len(self.output_buffer.buffer_queue))
self.output_buffer.flush()
else:
print 'Unexpected eventmask %s' % (eventmask)
<commit_msg>Remove commented out debug print lines<commit_after>
|
# -*- coding: utf-8 -*-
from helpers import HTTPEventHandler
import looping
import buffer_event
class StatusClient(HTTPEventHandler):
def __init__(self, server, sock, address, request_parser):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
204, b'No content')
class StreamClient(HTTPEventHandler):
def __init__(self, server, source, sock, address, request_parser, content_type):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
200, b'OK', {b'Content-Length': None,
b'Content-Type': content_type})
self.source = source
def add_packet(self, packet):
self.output_buffer.add_buffer(packet)
def close(self):
self.server.remove_client(self)
HTTPEventHandler.close(self)
def flush_if_ready(self):
if self.output_buffer.ready:
self.output_buffer.flush()
def handle_event(self, eventmask):
if eventmask & looping.POLLOUT:
self.output_buffer.flush()
else:
print 'Unexpected eventmask %s' % (eventmask)
|
# -*- coding: utf-8 -*-
from helpers import HTTPEventHandler
import looping
import buffer_event
class StatusClient(HTTPEventHandler):
def __init__(self, server, sock, address, request_parser):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
204, b'No content')
class StreamClient(HTTPEventHandler):
def __init__(self, server, source, sock, address, request_parser, content_type):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
200, b'OK', {b'Content-Length': None,
b'Content-Type': content_type})
self.source = source
def add_packet(self, packet):
self.output_buffer.add_buffer(packet)
def close(self):
self.server.remove_client(self)
HTTPEventHandler.close(self)
def flush_if_ready(self):
# print 'buffer is ready: %s' % (self.output_buffer.ready)
if self.output_buffer.ready:
self.output_buffer.flush()
def handle_event(self, eventmask):
if eventmask & looping.POLLOUT:
# print 'lol I can write, %d items in buffer' % (len(self.output_buffer.buffer_queue))
self.output_buffer.flush()
else:
print 'Unexpected eventmask %s' % (eventmask)
Remove commented out debug print lines# -*- coding: utf-8 -*-
from helpers import HTTPEventHandler
import looping
import buffer_event
class StatusClient(HTTPEventHandler):
def __init__(self, server, sock, address, request_parser):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
204, b'No content')
class StreamClient(HTTPEventHandler):
def __init__(self, server, source, sock, address, request_parser, content_type):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
200, b'OK', {b'Content-Length': None,
b'Content-Type': content_type})
self.source = source
def add_packet(self, packet):
self.output_buffer.add_buffer(packet)
def close(self):
self.server.remove_client(self)
HTTPEventHandler.close(self)
def flush_if_ready(self):
if self.output_buffer.ready:
self.output_buffer.flush()
def handle_event(self, eventmask):
if eventmask & looping.POLLOUT:
self.output_buffer.flush()
else:
print 'Unexpected eventmask %s' % (eventmask)
|
<commit_before># -*- coding: utf-8 -*-
from helpers import HTTPEventHandler
import looping
import buffer_event
class StatusClient(HTTPEventHandler):
def __init__(self, server, sock, address, request_parser):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
204, b'No content')
class StreamClient(HTTPEventHandler):
def __init__(self, server, source, sock, address, request_parser, content_type):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
200, b'OK', {b'Content-Length': None,
b'Content-Type': content_type})
self.source = source
def add_packet(self, packet):
self.output_buffer.add_buffer(packet)
def close(self):
self.server.remove_client(self)
HTTPEventHandler.close(self)
def flush_if_ready(self):
# print 'buffer is ready: %s' % (self.output_buffer.ready)
if self.output_buffer.ready:
self.output_buffer.flush()
def handle_event(self, eventmask):
if eventmask & looping.POLLOUT:
# print 'lol I can write, %d items in buffer' % (len(self.output_buffer.buffer_queue))
self.output_buffer.flush()
else:
print 'Unexpected eventmask %s' % (eventmask)
<commit_msg>Remove commented out debug print lines<commit_after># -*- coding: utf-8 -*-
from helpers import HTTPEventHandler
import looping
import buffer_event
class StatusClient(HTTPEventHandler):
def __init__(self, server, sock, address, request_parser):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
204, b'No content')
class StreamClient(HTTPEventHandler):
def __init__(self, server, source, sock, address, request_parser, content_type):
HTTPEventHandler.__init__(self, server, sock, address, request_parser,
200, b'OK', {b'Content-Length': None,
b'Content-Type': content_type})
self.source = source
def add_packet(self, packet):
self.output_buffer.add_buffer(packet)
def close(self):
self.server.remove_client(self)
HTTPEventHandler.close(self)
def flush_if_ready(self):
if self.output_buffer.ready:
self.output_buffer.flush()
def handle_event(self, eventmask):
if eventmask & looping.POLLOUT:
self.output_buffer.flush()
else:
print 'Unexpected eventmask %s' % (eventmask)
|
5776e16a93f804bbdb34bdcac5128ecac25f6e88
|
micall/core/filter_quality.py
|
micall/core/filter_quality.py
|
#! /usr/bin/env python
import argparse
import csv
import itertools
import logging
import math
import os
import miseq_logging
BAD_ERROR_RATE = 7.5
def parseArgs():
parser = argparse.ArgumentParser(
description='Post-processing of short-read alignments.')
parser.add_argument('quality_csv',
type=argparse.FileType('rU'),
help='QC error rate data, grouped by tile')
parser.add_argument('bad_cycles_csv',
type=argparse.FileType('w'),
help='List of tiles and cycles rejected for poor quality')
return parser.parse_args()
logger = miseq_logging.init_logging_console_only(logging.DEBUG)
def grouper(cycle):
return (cycle['tile'], math.copysign(1, int(cycle['cycle'])))
def main():
args = parseArgs()
with args.quality_csv, args.bad_cycles_csv:
reader = csv.DictReader(args.quality_csv)
writer = csv.DictWriter(args.bad_cycles_csv,
['tile', 'cycle', 'errorrate'],
lineterminator=os.linesep)
writer.writeheader()
for _tile_direction, cycles in itertools.groupby(reader, grouper):
is_bad = False
for cycle in cycles:
errorrate = cycle['errorrate']
is_bad = (is_bad or
errorrate == '' or
float(errorrate) >= BAD_ERROR_RATE)
if is_bad:
writer.writerow(cycle)
if __name__ == '__main__':
main()
|
#! /usr/bin/env python
import argparse
import csv
import itertools
import logging
import math
import os
import miseq_logging
BAD_ERROR_RATE = 7.5
def parseArgs():
parser = argparse.ArgumentParser(
description='Post-processing of short-read alignments.')
parser.add_argument('quality_csv',
type=argparse.FileType('rU'),
help='QC error rate data, grouped by tile')
parser.add_argument('bad_cycles_csv',
type=argparse.FileType('w'),
help='List of tiles and cycles rejected for poor quality')
return parser.parse_args()
logger = miseq_logging.init_logging_console_only(logging.DEBUG)
def grouper(cycle):
return (cycle['tile'], math.copysign(1, int(cycle['cycle'])))
def main():
args = parseArgs()
with args.quality_csv, args.bad_cycles_csv:
reader = csv.DictReader(args.quality_csv)
writer = csv.DictWriter(args.bad_cycles_csv,
['tile', 'cycle', 'errorrate'],
lineterminator=os.linesep)
writer.writeheader()
for _tile_direction, cycles in itertools.groupby(reader, grouper):
is_bad = False
for cycle in cycles:
errorrate = cycle['errorrate']
is_bad = (is_bad or
errorrate == '' or
float(errorrate) >= BAD_ERROR_RATE)
if is_bad:
writer.writerow(cycle)
if __name__ == '__main__':
main()
|
Fix a bunch of PEP8 warnings.
|
Fix a bunch of PEP8 warnings.
|
Python
|
agpl-3.0
|
cfe-lab/MiCall,cfe-lab/MiCall,cfe-lab/MiCall
|
#! /usr/bin/env python
import argparse
import csv
import itertools
import logging
import math
import os
import miseq_logging
BAD_ERROR_RATE = 7.5
def parseArgs():
parser = argparse.ArgumentParser(
description='Post-processing of short-read alignments.')
parser.add_argument('quality_csv',
type=argparse.FileType('rU'),
help='QC error rate data, grouped by tile')
parser.add_argument('bad_cycles_csv',
type=argparse.FileType('w'),
help='List of tiles and cycles rejected for poor quality')
return parser.parse_args()
logger = miseq_logging.init_logging_console_only(logging.DEBUG)
def grouper(cycle):
return (cycle['tile'], math.copysign(1, int(cycle['cycle'])))
def main():
args = parseArgs()
with args.quality_csv, args.bad_cycles_csv:
reader = csv.DictReader(args.quality_csv)
writer = csv.DictWriter(args.bad_cycles_csv,
['tile', 'cycle', 'errorrate'],
lineterminator=os.linesep)
writer.writeheader()
for _tile_direction, cycles in itertools.groupby(reader, grouper):
is_bad = False
for cycle in cycles:
errorrate = cycle['errorrate']
is_bad = (is_bad or
errorrate == '' or
float(errorrate) >= BAD_ERROR_RATE)
if is_bad:
writer.writerow(cycle)
if __name__ == '__main__':
main()Fix a bunch of PEP8 warnings.
|
#! /usr/bin/env python
import argparse
import csv
import itertools
import logging
import math
import os
import miseq_logging
BAD_ERROR_RATE = 7.5
def parseArgs():
parser = argparse.ArgumentParser(
description='Post-processing of short-read alignments.')
parser.add_argument('quality_csv',
type=argparse.FileType('rU'),
help='QC error rate data, grouped by tile')
parser.add_argument('bad_cycles_csv',
type=argparse.FileType('w'),
help='List of tiles and cycles rejected for poor quality')
return parser.parse_args()
logger = miseq_logging.init_logging_console_only(logging.DEBUG)
def grouper(cycle):
return (cycle['tile'], math.copysign(1, int(cycle['cycle'])))
def main():
args = parseArgs()
with args.quality_csv, args.bad_cycles_csv:
reader = csv.DictReader(args.quality_csv)
writer = csv.DictWriter(args.bad_cycles_csv,
['tile', 'cycle', 'errorrate'],
lineterminator=os.linesep)
writer.writeheader()
for _tile_direction, cycles in itertools.groupby(reader, grouper):
is_bad = False
for cycle in cycles:
errorrate = cycle['errorrate']
is_bad = (is_bad or
errorrate == '' or
float(errorrate) >= BAD_ERROR_RATE)
if is_bad:
writer.writerow(cycle)
if __name__ == '__main__':
main()
|
<commit_before>#! /usr/bin/env python
import argparse
import csv
import itertools
import logging
import math
import os
import miseq_logging
BAD_ERROR_RATE = 7.5
def parseArgs():
parser = argparse.ArgumentParser(
description='Post-processing of short-read alignments.')
parser.add_argument('quality_csv',
type=argparse.FileType('rU'),
help='QC error rate data, grouped by tile')
parser.add_argument('bad_cycles_csv',
type=argparse.FileType('w'),
help='List of tiles and cycles rejected for poor quality')
return parser.parse_args()
logger = miseq_logging.init_logging_console_only(logging.DEBUG)
def grouper(cycle):
return (cycle['tile'], math.copysign(1, int(cycle['cycle'])))
def main():
args = parseArgs()
with args.quality_csv, args.bad_cycles_csv:
reader = csv.DictReader(args.quality_csv)
writer = csv.DictWriter(args.bad_cycles_csv,
['tile', 'cycle', 'errorrate'],
lineterminator=os.linesep)
writer.writeheader()
for _tile_direction, cycles in itertools.groupby(reader, grouper):
is_bad = False
for cycle in cycles:
errorrate = cycle['errorrate']
is_bad = (is_bad or
errorrate == '' or
float(errorrate) >= BAD_ERROR_RATE)
if is_bad:
writer.writerow(cycle)
if __name__ == '__main__':
main()<commit_msg>Fix a bunch of PEP8 warnings.<commit_after>
|
#! /usr/bin/env python
import argparse
import csv
import itertools
import logging
import math
import os
import miseq_logging
BAD_ERROR_RATE = 7.5
def parseArgs():
parser = argparse.ArgumentParser(
description='Post-processing of short-read alignments.')
parser.add_argument('quality_csv',
type=argparse.FileType('rU'),
help='QC error rate data, grouped by tile')
parser.add_argument('bad_cycles_csv',
type=argparse.FileType('w'),
help='List of tiles and cycles rejected for poor quality')
return parser.parse_args()
logger = miseq_logging.init_logging_console_only(logging.DEBUG)
def grouper(cycle):
return (cycle['tile'], math.copysign(1, int(cycle['cycle'])))
def main():
args = parseArgs()
with args.quality_csv, args.bad_cycles_csv:
reader = csv.DictReader(args.quality_csv)
writer = csv.DictWriter(args.bad_cycles_csv,
['tile', 'cycle', 'errorrate'],
lineterminator=os.linesep)
writer.writeheader()
for _tile_direction, cycles in itertools.groupby(reader, grouper):
is_bad = False
for cycle in cycles:
errorrate = cycle['errorrate']
is_bad = (is_bad or
errorrate == '' or
float(errorrate) >= BAD_ERROR_RATE)
if is_bad:
writer.writerow(cycle)
if __name__ == '__main__':
main()
|
#! /usr/bin/env python
import argparse
import csv
import itertools
import logging
import math
import os
import miseq_logging
BAD_ERROR_RATE = 7.5
def parseArgs():
parser = argparse.ArgumentParser(
description='Post-processing of short-read alignments.')
parser.add_argument('quality_csv',
type=argparse.FileType('rU'),
help='QC error rate data, grouped by tile')
parser.add_argument('bad_cycles_csv',
type=argparse.FileType('w'),
help='List of tiles and cycles rejected for poor quality')
return parser.parse_args()
logger = miseq_logging.init_logging_console_only(logging.DEBUG)
def grouper(cycle):
return (cycle['tile'], math.copysign(1, int(cycle['cycle'])))
def main():
args = parseArgs()
with args.quality_csv, args.bad_cycles_csv:
reader = csv.DictReader(args.quality_csv)
writer = csv.DictWriter(args.bad_cycles_csv,
['tile', 'cycle', 'errorrate'],
lineterminator=os.linesep)
writer.writeheader()
for _tile_direction, cycles in itertools.groupby(reader, grouper):
is_bad = False
for cycle in cycles:
errorrate = cycle['errorrate']
is_bad = (is_bad or
errorrate == '' or
float(errorrate) >= BAD_ERROR_RATE)
if is_bad:
writer.writerow(cycle)
if __name__ == '__main__':
main()Fix a bunch of PEP8 warnings.#! /usr/bin/env python
import argparse
import csv
import itertools
import logging
import math
import os
import miseq_logging
BAD_ERROR_RATE = 7.5
def parseArgs():
parser = argparse.ArgumentParser(
description='Post-processing of short-read alignments.')
parser.add_argument('quality_csv',
type=argparse.FileType('rU'),
help='QC error rate data, grouped by tile')
parser.add_argument('bad_cycles_csv',
type=argparse.FileType('w'),
help='List of tiles and cycles rejected for poor quality')
return parser.parse_args()
logger = miseq_logging.init_logging_console_only(logging.DEBUG)
def grouper(cycle):
return (cycle['tile'], math.copysign(1, int(cycle['cycle'])))
def main():
args = parseArgs()
with args.quality_csv, args.bad_cycles_csv:
reader = csv.DictReader(args.quality_csv)
writer = csv.DictWriter(args.bad_cycles_csv,
['tile', 'cycle', 'errorrate'],
lineterminator=os.linesep)
writer.writeheader()
for _tile_direction, cycles in itertools.groupby(reader, grouper):
is_bad = False
for cycle in cycles:
errorrate = cycle['errorrate']
is_bad = (is_bad or
errorrate == '' or
float(errorrate) >= BAD_ERROR_RATE)
if is_bad:
writer.writerow(cycle)
if __name__ == '__main__':
main()
|
<commit_before>#! /usr/bin/env python
import argparse
import csv
import itertools
import logging
import math
import os
import miseq_logging
BAD_ERROR_RATE = 7.5
def parseArgs():
parser = argparse.ArgumentParser(
description='Post-processing of short-read alignments.')
parser.add_argument('quality_csv',
type=argparse.FileType('rU'),
help='QC error rate data, grouped by tile')
parser.add_argument('bad_cycles_csv',
type=argparse.FileType('w'),
help='List of tiles and cycles rejected for poor quality')
return parser.parse_args()
logger = miseq_logging.init_logging_console_only(logging.DEBUG)
def grouper(cycle):
return (cycle['tile'], math.copysign(1, int(cycle['cycle'])))
def main():
args = parseArgs()
with args.quality_csv, args.bad_cycles_csv:
reader = csv.DictReader(args.quality_csv)
writer = csv.DictWriter(args.bad_cycles_csv,
['tile', 'cycle', 'errorrate'],
lineterminator=os.linesep)
writer.writeheader()
for _tile_direction, cycles in itertools.groupby(reader, grouper):
is_bad = False
for cycle in cycles:
errorrate = cycle['errorrate']
is_bad = (is_bad or
errorrate == '' or
float(errorrate) >= BAD_ERROR_RATE)
if is_bad:
writer.writerow(cycle)
if __name__ == '__main__':
main()<commit_msg>Fix a bunch of PEP8 warnings.<commit_after>#! /usr/bin/env python
import argparse
import csv
import itertools
import logging
import math
import os
import miseq_logging
BAD_ERROR_RATE = 7.5
def parseArgs():
parser = argparse.ArgumentParser(
description='Post-processing of short-read alignments.')
parser.add_argument('quality_csv',
type=argparse.FileType('rU'),
help='QC error rate data, grouped by tile')
parser.add_argument('bad_cycles_csv',
type=argparse.FileType('w'),
help='List of tiles and cycles rejected for poor quality')
return parser.parse_args()
logger = miseq_logging.init_logging_console_only(logging.DEBUG)
def grouper(cycle):
return (cycle['tile'], math.copysign(1, int(cycle['cycle'])))
def main():
args = parseArgs()
with args.quality_csv, args.bad_cycles_csv:
reader = csv.DictReader(args.quality_csv)
writer = csv.DictWriter(args.bad_cycles_csv,
['tile', 'cycle', 'errorrate'],
lineterminator=os.linesep)
writer.writeheader()
for _tile_direction, cycles in itertools.groupby(reader, grouper):
is_bad = False
for cycle in cycles:
errorrate = cycle['errorrate']
is_bad = (is_bad or
errorrate == '' or
float(errorrate) >= BAD_ERROR_RATE)
if is_bad:
writer.writerow(cycle)
if __name__ == '__main__':
main()
|
ef29e2dd9426d42312de6969b927b5315f3df115
|
src/redmill/catalog.py
|
src/redmill/catalog.py
|
# This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
from . import Image
class Catalog(object):
def __init__(self):
self._id_to_path = {}
self._keyword_to_path = {}
def add_image(self, path):
try:
image = Image(path)
except Exception, e:
print "Cannot load {}: {}".format(path, e)
self._id_to_path[image.id] = path
for keyword in image.keywords:
self._keyword_to_path.setdefault(keyword, set()).add(path)
def remove_image(self, path):
pass
def get_image(self, id_):
return self._id_to_path[id_]
def get_images(self, keyword):
return self._keyword_to_path[keyword]
|
# This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
from . import Image
class Catalog(object):
def __init__(self):
self._id_to_image = {}
self._keyword_to_image = {}
def add_image(self, path):
try:
image = Image(path)
except Exception, e:
print "Cannot load {}: {}".format(path, e)
self._id_to_image[image.id] = image
for keyword in image.keywords:
self._keyword_to_image.setdefault(keyword, set()).add(image)
def remove_image(self, path):
pass
def get_image(self, id_):
return self._id_to_image[id_]
def get_images(self, keyword):
return self._keyword_to_image[keyword]
def _get_id_to_image(self):
return self._id_to_image
def _get_keyword_to_image(self):
return self._keyword_to_image
id_to_image = property(_get_id_to_image)
keyword_to_image = property(_get_keyword_to_image)
|
Save images instead of paths in Catalog.
|
Save images instead of paths in Catalog.
|
Python
|
agpl-3.0
|
lamyj/redmill,lamyj/redmill,lamyj/redmill
|
# This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
from . import Image
class Catalog(object):
def __init__(self):
self._id_to_path = {}
self._keyword_to_path = {}
def add_image(self, path):
try:
image = Image(path)
except Exception, e:
print "Cannot load {}: {}".format(path, e)
self._id_to_path[image.id] = path
for keyword in image.keywords:
self._keyword_to_path.setdefault(keyword, set()).add(path)
def remove_image(self, path):
pass
def get_image(self, id_):
return self._id_to_path[id_]
def get_images(self, keyword):
return self._keyword_to_path[keyword]
Save images instead of paths in Catalog.
|
# This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
from . import Image
class Catalog(object):
def __init__(self):
self._id_to_image = {}
self._keyword_to_image = {}
def add_image(self, path):
try:
image = Image(path)
except Exception, e:
print "Cannot load {}: {}".format(path, e)
self._id_to_image[image.id] = image
for keyword in image.keywords:
self._keyword_to_image.setdefault(keyword, set()).add(image)
def remove_image(self, path):
pass
def get_image(self, id_):
return self._id_to_image[id_]
def get_images(self, keyword):
return self._keyword_to_image[keyword]
def _get_id_to_image(self):
return self._id_to_image
def _get_keyword_to_image(self):
return self._keyword_to_image
id_to_image = property(_get_id_to_image)
keyword_to_image = property(_get_keyword_to_image)
|
<commit_before># This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
from . import Image
class Catalog(object):
def __init__(self):
self._id_to_path = {}
self._keyword_to_path = {}
def add_image(self, path):
try:
image = Image(path)
except Exception, e:
print "Cannot load {}: {}".format(path, e)
self._id_to_path[image.id] = path
for keyword in image.keywords:
self._keyword_to_path.setdefault(keyword, set()).add(path)
def remove_image(self, path):
pass
def get_image(self, id_):
return self._id_to_path[id_]
def get_images(self, keyword):
return self._keyword_to_path[keyword]
<commit_msg>Save images instead of paths in Catalog.<commit_after>
|
# This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
from . import Image
class Catalog(object):
def __init__(self):
self._id_to_image = {}
self._keyword_to_image = {}
def add_image(self, path):
try:
image = Image(path)
except Exception, e:
print "Cannot load {}: {}".format(path, e)
self._id_to_image[image.id] = image
for keyword in image.keywords:
self._keyword_to_image.setdefault(keyword, set()).add(image)
def remove_image(self, path):
pass
def get_image(self, id_):
return self._id_to_image[id_]
def get_images(self, keyword):
return self._keyword_to_image[keyword]
def _get_id_to_image(self):
return self._id_to_image
def _get_keyword_to_image(self):
return self._keyword_to_image
id_to_image = property(_get_id_to_image)
keyword_to_image = property(_get_keyword_to_image)
|
# This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
from . import Image
class Catalog(object):
def __init__(self):
self._id_to_path = {}
self._keyword_to_path = {}
def add_image(self, path):
try:
image = Image(path)
except Exception, e:
print "Cannot load {}: {}".format(path, e)
self._id_to_path[image.id] = path
for keyword in image.keywords:
self._keyword_to_path.setdefault(keyword, set()).add(path)
def remove_image(self, path):
pass
def get_image(self, id_):
return self._id_to_path[id_]
def get_images(self, keyword):
return self._keyword_to_path[keyword]
Save images instead of paths in Catalog.# This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
from . import Image
class Catalog(object):
def __init__(self):
self._id_to_image = {}
self._keyword_to_image = {}
def add_image(self, path):
try:
image = Image(path)
except Exception, e:
print "Cannot load {}: {}".format(path, e)
self._id_to_image[image.id] = image
for keyword in image.keywords:
self._keyword_to_image.setdefault(keyword, set()).add(image)
def remove_image(self, path):
pass
def get_image(self, id_):
return self._id_to_image[id_]
def get_images(self, keyword):
return self._keyword_to_image[keyword]
def _get_id_to_image(self):
return self._id_to_image
def _get_keyword_to_image(self):
return self._keyword_to_image
id_to_image = property(_get_id_to_image)
keyword_to_image = property(_get_keyword_to_image)
|
<commit_before># This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
from . import Image
class Catalog(object):
def __init__(self):
self._id_to_path = {}
self._keyword_to_path = {}
def add_image(self, path):
try:
image = Image(path)
except Exception, e:
print "Cannot load {}: {}".format(path, e)
self._id_to_path[image.id] = path
for keyword in image.keywords:
self._keyword_to_path.setdefault(keyword, set()).add(path)
def remove_image(self, path):
pass
def get_image(self, id_):
return self._id_to_path[id_]
def get_images(self, keyword):
return self._keyword_to_path[keyword]
<commit_msg>Save images instead of paths in Catalog.<commit_after># This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
from . import Image
class Catalog(object):
def __init__(self):
self._id_to_image = {}
self._keyword_to_image = {}
def add_image(self, path):
try:
image = Image(path)
except Exception, e:
print "Cannot load {}: {}".format(path, e)
self._id_to_image[image.id] = image
for keyword in image.keywords:
self._keyword_to_image.setdefault(keyword, set()).add(image)
def remove_image(self, path):
pass
def get_image(self, id_):
return self._id_to_image[id_]
def get_images(self, keyword):
return self._keyword_to_image[keyword]
def _get_id_to_image(self):
return self._id_to_image
def _get_keyword_to_image(self):
return self._keyword_to_image
id_to_image = property(_get_id_to_image)
keyword_to_image = property(_get_keyword_to_image)
|
1d11f8d123709626ba3e41b7697a1511c034ab55
|
timeser.py
|
timeser.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from goristock import goristock
a = goristock(8261)
for i in range(3):
a.display(3,6,9)
a.raw_data.pop()
a.data_date.pop()
a.stock_range.pop()
a.stock_vol.pop()
a.display(3,6,9)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from goristock import goristock
a = goristock(2201)
while len(a.raw_data) > 19:
a.raw_data.pop()
a.data_date.pop()
a.stock_range.pop()
a.stock_vol.pop()
if a.MAC(3) == '↑' and a.MAC(6) == '↑' and a.MAC(18) == '↑':
#if a.MAO(3,6)[0][1][-1] < 0 and a.MAO(3,6)[1] == '↑':
print 'Buy: %s %s %s %s' % (a.stock_no,a.stock_name,a.data_date[-1],a.raw_data[-1])
#a.display(3,6,18)
elif a.MAC(3) == '↓' and a.VOLMAX3:
#if a.MAO(3,6)[0][1][-1] > 0 and a.MAO(3,6)[1] == '↓':
print 'Sell: %s %s %s %s' % (a.stock_no,a.stock_name,a.data_date[-1],a.raw_data[-1])
#a.display(3,6,18)
|
Add time serial test, buy or sell point.
|
Add time serial test, buy or sell point.
|
Python
|
mit
|
toomore/goristock
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from goristock import goristock
a = goristock(8261)
for i in range(3):
a.display(3,6,9)
a.raw_data.pop()
a.data_date.pop()
a.stock_range.pop()
a.stock_vol.pop()
a.display(3,6,9)
Add time serial test, buy or sell point.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from goristock import goristock
a = goristock(2201)
while len(a.raw_data) > 19:
a.raw_data.pop()
a.data_date.pop()
a.stock_range.pop()
a.stock_vol.pop()
if a.MAC(3) == '↑' and a.MAC(6) == '↑' and a.MAC(18) == '↑':
#if a.MAO(3,6)[0][1][-1] < 0 and a.MAO(3,6)[1] == '↑':
print 'Buy: %s %s %s %s' % (a.stock_no,a.stock_name,a.data_date[-1],a.raw_data[-1])
#a.display(3,6,18)
elif a.MAC(3) == '↓' and a.VOLMAX3:
#if a.MAO(3,6)[0][1][-1] > 0 and a.MAO(3,6)[1] == '↓':
print 'Sell: %s %s %s %s' % (a.stock_no,a.stock_name,a.data_date[-1],a.raw_data[-1])
#a.display(3,6,18)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from goristock import goristock
a = goristock(8261)
for i in range(3):
a.display(3,6,9)
a.raw_data.pop()
a.data_date.pop()
a.stock_range.pop()
a.stock_vol.pop()
a.display(3,6,9)
<commit_msg>Add time serial test, buy or sell point.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from goristock import goristock
a = goristock(2201)
while len(a.raw_data) > 19:
a.raw_data.pop()
a.data_date.pop()
a.stock_range.pop()
a.stock_vol.pop()
if a.MAC(3) == '↑' and a.MAC(6) == '↑' and a.MAC(18) == '↑':
#if a.MAO(3,6)[0][1][-1] < 0 and a.MAO(3,6)[1] == '↑':
print 'Buy: %s %s %s %s' % (a.stock_no,a.stock_name,a.data_date[-1],a.raw_data[-1])
#a.display(3,6,18)
elif a.MAC(3) == '↓' and a.VOLMAX3:
#if a.MAO(3,6)[0][1][-1] > 0 and a.MAO(3,6)[1] == '↓':
print 'Sell: %s %s %s %s' % (a.stock_no,a.stock_name,a.data_date[-1],a.raw_data[-1])
#a.display(3,6,18)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from goristock import goristock
a = goristock(8261)
for i in range(3):
a.display(3,6,9)
a.raw_data.pop()
a.data_date.pop()
a.stock_range.pop()
a.stock_vol.pop()
a.display(3,6,9)
Add time serial test, buy or sell point.#!/usr/bin/env python
# -*- coding: utf-8 -*-
from goristock import goristock
a = goristock(2201)
while len(a.raw_data) > 19:
a.raw_data.pop()
a.data_date.pop()
a.stock_range.pop()
a.stock_vol.pop()
if a.MAC(3) == '↑' and a.MAC(6) == '↑' and a.MAC(18) == '↑':
#if a.MAO(3,6)[0][1][-1] < 0 and a.MAO(3,6)[1] == '↑':
print 'Buy: %s %s %s %s' % (a.stock_no,a.stock_name,a.data_date[-1],a.raw_data[-1])
#a.display(3,6,18)
elif a.MAC(3) == '↓' and a.VOLMAX3:
#if a.MAO(3,6)[0][1][-1] > 0 and a.MAO(3,6)[1] == '↓':
print 'Sell: %s %s %s %s' % (a.stock_no,a.stock_name,a.data_date[-1],a.raw_data[-1])
#a.display(3,6,18)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from goristock import goristock
a = goristock(8261)
for i in range(3):
a.display(3,6,9)
a.raw_data.pop()
a.data_date.pop()
a.stock_range.pop()
a.stock_vol.pop()
a.display(3,6,9)
<commit_msg>Add time serial test, buy or sell point.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from goristock import goristock
a = goristock(2201)
while len(a.raw_data) > 19:
a.raw_data.pop()
a.data_date.pop()
a.stock_range.pop()
a.stock_vol.pop()
if a.MAC(3) == '↑' and a.MAC(6) == '↑' and a.MAC(18) == '↑':
#if a.MAO(3,6)[0][1][-1] < 0 and a.MAO(3,6)[1] == '↑':
print 'Buy: %s %s %s %s' % (a.stock_no,a.stock_name,a.data_date[-1],a.raw_data[-1])
#a.display(3,6,18)
elif a.MAC(3) == '↓' and a.VOLMAX3:
#if a.MAO(3,6)[0][1][-1] > 0 and a.MAO(3,6)[1] == '↓':
print 'Sell: %s %s %s %s' % (a.stock_no,a.stock_name,a.data_date[-1],a.raw_data[-1])
#a.display(3,6,18)
|
4bf7c8f1522b433cbe7b9b9312a51942a9ea75c1
|
pytac/__init__.py
|
pytac/__init__.py
|
"""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
|
"""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
from . import device, element, lattice, load_csv, lattice, utils
|
Add modules to pytac namespace.
|
Add modules to pytac namespace.
|
Python
|
apache-2.0
|
willrogers/pytac,willrogers/pytac
|
"""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
Add modules to pytac namespace.
|
"""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
from . import device, element, lattice, load_csv, lattice, utils
|
<commit_before>"""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
<commit_msg>Add modules to pytac namespace.<commit_after>
|
"""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
from . import device, element, lattice, load_csv, lattice, utils
|
"""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
Add modules to pytac namespace."""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
from . import device, element, lattice, load_csv, lattice, utils
|
<commit_before>"""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
<commit_msg>Add modules to pytac namespace.<commit_after>"""Pytac: Python Toolkit for Accelerator Controls."""
# PV types
SP = 'setpoint'
RB = 'readback'
# Unit systems
ENG = 'engineering'
PHYS = 'physics'
# Model types.
SIM = 'simulation'
LIVE = 'live'
from . import device, element, lattice, load_csv, lattice, utils
|
30044f8272557dbd367eab3dbe7c1ba1076484e9
|
readux/pages/models.py
|
readux/pages/models.py
|
from django.db import models
# Create your models here.
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.models import Page
from feincms.content.richtext.models import RichTextContent
from feincms.content.medialibrary.models import MediaFileContent
# Page.register_extensions('datepublisher', 'translations') # Example set of extensions
# Page.register_extensions('changedate') # in docs but not available
Page.register_templates({
'title': _('Standard template'),
'path': 'pages/base.html',
'regions': (
('main', _('Main content area')),
# ('sidebar', _('Sidebar'), 'inherited'),
),
})
Page.create_content_type(RichTextContent)
Page.create_content_type(MediaFileContent, TYPE_CHOICES=(
('default', _('default')),
('lightbox', _('lightbox')),
))
|
from django.db import models
# Create your models here.
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.models import Page
from feincms.content.richtext.models import RichTextContent
from feincms.content.medialibrary.models import MediaFileContent
from feincms.content.video.models import VideoContent
# Page.register_extensions('datepublisher', 'translations') # Example set of extensions
# Page.register_extensions('changedate') # in docs but not available
Page.register_templates({
'title': _('Standard template'),
'path': 'pages/base.html',
'regions': (
('main', _('Main content area')),
# ('sidebar', _('Sidebar'), 'inherited'),
),
})
Page.create_content_type(RichTextContent)
Page.create_content_type(MediaFileContent, TYPE_CHOICES=(
('default', _('default')),
('lightbox', _('lightbox')),
))
Page.create_content_type(VideoContent)
|
Enable video content for cms pages
|
Enable video content for cms pages
[#110289088]
|
Python
|
apache-2.0
|
emory-libraries/readux,emory-libraries/readux,emory-libraries/readux
|
from django.db import models
# Create your models here.
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.models import Page
from feincms.content.richtext.models import RichTextContent
from feincms.content.medialibrary.models import MediaFileContent
# Page.register_extensions('datepublisher', 'translations') # Example set of extensions
# Page.register_extensions('changedate') # in docs but not available
Page.register_templates({
'title': _('Standard template'),
'path': 'pages/base.html',
'regions': (
('main', _('Main content area')),
# ('sidebar', _('Sidebar'), 'inherited'),
),
})
Page.create_content_type(RichTextContent)
Page.create_content_type(MediaFileContent, TYPE_CHOICES=(
('default', _('default')),
('lightbox', _('lightbox')),
))Enable video content for cms pages
[#110289088]
|
from django.db import models
# Create your models here.
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.models import Page
from feincms.content.richtext.models import RichTextContent
from feincms.content.medialibrary.models import MediaFileContent
from feincms.content.video.models import VideoContent
# Page.register_extensions('datepublisher', 'translations') # Example set of extensions
# Page.register_extensions('changedate') # in docs but not available
Page.register_templates({
'title': _('Standard template'),
'path': 'pages/base.html',
'regions': (
('main', _('Main content area')),
# ('sidebar', _('Sidebar'), 'inherited'),
),
})
Page.create_content_type(RichTextContent)
Page.create_content_type(MediaFileContent, TYPE_CHOICES=(
('default', _('default')),
('lightbox', _('lightbox')),
))
Page.create_content_type(VideoContent)
|
<commit_before>from django.db import models
# Create your models here.
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.models import Page
from feincms.content.richtext.models import RichTextContent
from feincms.content.medialibrary.models import MediaFileContent
# Page.register_extensions('datepublisher', 'translations') # Example set of extensions
# Page.register_extensions('changedate') # in docs but not available
Page.register_templates({
'title': _('Standard template'),
'path': 'pages/base.html',
'regions': (
('main', _('Main content area')),
# ('sidebar', _('Sidebar'), 'inherited'),
),
})
Page.create_content_type(RichTextContent)
Page.create_content_type(MediaFileContent, TYPE_CHOICES=(
('default', _('default')),
('lightbox', _('lightbox')),
))<commit_msg>Enable video content for cms pages
[#110289088]<commit_after>
|
from django.db import models
# Create your models here.
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.models import Page
from feincms.content.richtext.models import RichTextContent
from feincms.content.medialibrary.models import MediaFileContent
from feincms.content.video.models import VideoContent
# Page.register_extensions('datepublisher', 'translations') # Example set of extensions
# Page.register_extensions('changedate') # in docs but not available
Page.register_templates({
'title': _('Standard template'),
'path': 'pages/base.html',
'regions': (
('main', _('Main content area')),
# ('sidebar', _('Sidebar'), 'inherited'),
),
})
Page.create_content_type(RichTextContent)
Page.create_content_type(MediaFileContent, TYPE_CHOICES=(
('default', _('default')),
('lightbox', _('lightbox')),
))
Page.create_content_type(VideoContent)
|
from django.db import models
# Create your models here.
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.models import Page
from feincms.content.richtext.models import RichTextContent
from feincms.content.medialibrary.models import MediaFileContent
# Page.register_extensions('datepublisher', 'translations') # Example set of extensions
# Page.register_extensions('changedate') # in docs but not available
Page.register_templates({
'title': _('Standard template'),
'path': 'pages/base.html',
'regions': (
('main', _('Main content area')),
# ('sidebar', _('Sidebar'), 'inherited'),
),
})
Page.create_content_type(RichTextContent)
Page.create_content_type(MediaFileContent, TYPE_CHOICES=(
('default', _('default')),
('lightbox', _('lightbox')),
))Enable video content for cms pages
[#110289088]from django.db import models
# Create your models here.
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.models import Page
from feincms.content.richtext.models import RichTextContent
from feincms.content.medialibrary.models import MediaFileContent
from feincms.content.video.models import VideoContent
# Page.register_extensions('datepublisher', 'translations') # Example set of extensions
# Page.register_extensions('changedate') # in docs but not available
Page.register_templates({
'title': _('Standard template'),
'path': 'pages/base.html',
'regions': (
('main', _('Main content area')),
# ('sidebar', _('Sidebar'), 'inherited'),
),
})
Page.create_content_type(RichTextContent)
Page.create_content_type(MediaFileContent, TYPE_CHOICES=(
('default', _('default')),
('lightbox', _('lightbox')),
))
Page.create_content_type(VideoContent)
|
<commit_before>from django.db import models
# Create your models here.
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.models import Page
from feincms.content.richtext.models import RichTextContent
from feincms.content.medialibrary.models import MediaFileContent
# Page.register_extensions('datepublisher', 'translations') # Example set of extensions
# Page.register_extensions('changedate') # in docs but not available
Page.register_templates({
'title': _('Standard template'),
'path': 'pages/base.html',
'regions': (
('main', _('Main content area')),
# ('sidebar', _('Sidebar'), 'inherited'),
),
})
Page.create_content_type(RichTextContent)
Page.create_content_type(MediaFileContent, TYPE_CHOICES=(
('default', _('default')),
('lightbox', _('lightbox')),
))<commit_msg>Enable video content for cms pages
[#110289088]<commit_after>from django.db import models
# Create your models here.
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.models import Page
from feincms.content.richtext.models import RichTextContent
from feincms.content.medialibrary.models import MediaFileContent
from feincms.content.video.models import VideoContent
# Page.register_extensions('datepublisher', 'translations') # Example set of extensions
# Page.register_extensions('changedate') # in docs but not available
Page.register_templates({
'title': _('Standard template'),
'path': 'pages/base.html',
'regions': (
('main', _('Main content area')),
# ('sidebar', _('Sidebar'), 'inherited'),
),
})
Page.create_content_type(RichTextContent)
Page.create_content_type(MediaFileContent, TYPE_CHOICES=(
('default', _('default')),
('lightbox', _('lightbox')),
))
Page.create_content_type(VideoContent)
|
2c7baf580631fc5a78b59560f65b5283b74f347b
|
tests/functional/test_download_l10n.py
|
tests/functional/test_download_l10n.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/nightly/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
|
Add /firefox/nightly/all/ to download link tests
|
Add /firefox/nightly/all/ to download link tests
|
Python
|
mpl-2.0
|
mozilla/bedrock,alexgibson/bedrock,mkmelin/bedrock,TheoChevalier/bedrock,sgarrity/bedrock,pascalchevrel/bedrock,CSCI-462-01-2017/bedrock,CSCI-462-01-2017/bedrock,schalkneethling/bedrock,flodolo/bedrock,schalkneethling/bedrock,flodolo/bedrock,sylvestre/bedrock,pascalchevrel/bedrock,sylvestre/bedrock,gerv/bedrock,craigcook/bedrock,gerv/bedrock,TheoChevalier/bedrock,sgarrity/bedrock,craigcook/bedrock,ericawright/bedrock,flodolo/bedrock,CSCI-462-01-2017/bedrock,Sancus/bedrock,sgarrity/bedrock,ericawright/bedrock,alexgibson/bedrock,kyoshino/bedrock,pascalchevrel/bedrock,TheoChevalier/bedrock,glogiotatidis/bedrock,mkmelin/bedrock,craigcook/bedrock,ericawright/bedrock,glogiotatidis/bedrock,MichaelKohler/bedrock,gerv/bedrock,mozilla/bedrock,mozilla/bedrock,kyoshino/bedrock,schalkneethling/bedrock,alexgibson/bedrock,alexgibson/bedrock,MichaelKohler/bedrock,Sancus/bedrock,flodolo/bedrock,hoosteeno/bedrock,craigcook/bedrock,hoosteeno/bedrock,kyoshino/bedrock,mkmelin/bedrock,mozilla/bedrock,CSCI-462-01-2017/bedrock,TheoChevalier/bedrock,Sancus/bedrock,sgarrity/bedrock,hoosteeno/bedrock,kyoshino/bedrock,sylvestre/bedrock,glogiotatidis/bedrock,glogiotatidis/bedrock,ericawright/bedrock,Sancus/bedrock,pascalchevrel/bedrock,gerv/bedrock,MichaelKohler/bedrock,sylvestre/bedrock,schalkneethling/bedrock,mkmelin/bedrock,hoosteeno/bedrock,MichaelKohler/bedrock
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
Add /firefox/nightly/all/ to download link tests
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/nightly/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
<commit_msg>Add /firefox/nightly/all/ to download link tests<commit_after>
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/nightly/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
Add /firefox/nightly/all/ to download link tests# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/nightly/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
<commit_msg>Add /firefox/nightly/all/ to download link tests<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from bs4 import BeautifulSoup
import pytest
import requests
def pytest_generate_tests(metafunc):
if 'not headless' in metafunc.config.option.markexpr:
return # test deslected by mark expression
base_url = metafunc.config.option.base_url
if not base_url:
pytest.skip(
'This test requires a base URL to be specified on the command '
'line or in a configuration file.')
paths = (
'/firefox/all/',
'/firefox/beta/all/',
'/firefox/developer/all/',
'/firefox/nightly/all/',
'/firefox/organizations/all/',
'/firefox/android/all/',
'/firefox/android/beta/all/')
argvalues = []
for path in paths:
r = requests.get(base_url + path)
soup = BeautifulSoup(r.content, 'html.parser')
table = soup.find('table', class_='build-table')
urls = [a['href'] for a in table.find_all('a')]
assert len(urls) > 0
argvalues.extend(urls)
metafunc.parametrize('url', argvalues)
@pytest.mark.headless
@pytest.mark.nondestructive
def test_localized_download_links(url):
r = requests.head(url, allow_redirects=True)
assert requests.codes.ok == r.status_code
|
e31a0d76236c27cfae733335bd13528e67f15fa4
|
version.py
|
version.py
|
major = 0
minor=0
patch=0
branch="dev"
timestamp=1376425015.74
|
major = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26
|
Tag commit for v0.0.10-master generated by gitmake.py
|
Tag commit for v0.0.10-master generated by gitmake.py
|
Python
|
mit
|
ryansturmer/gitmake
|
major = 0
minor=0
patch=0
branch="dev"
timestamp=1376425015.74Tag commit for v0.0.10-master generated by gitmake.py
|
major = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26
|
<commit_before>major = 0
minor=0
patch=0
branch="dev"
timestamp=1376425015.74<commit_msg>Tag commit for v0.0.10-master generated by gitmake.py<commit_after>
|
major = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26
|
major = 0
minor=0
patch=0
branch="dev"
timestamp=1376425015.74Tag commit for v0.0.10-master generated by gitmake.pymajor = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26
|
<commit_before>major = 0
minor=0
patch=0
branch="dev"
timestamp=1376425015.74<commit_msg>Tag commit for v0.0.10-master generated by gitmake.py<commit_after>major = 0
minor=0
patch=10
branch="master"
timestamp=1376502388.26
|
99e4557eabf1434708d21d79b75994ce0827c440
|
test/test_app.py
|
test/test_app.py
|
# -*- coding: utf-8 -*-
import json
import app
WORK_END_TASK = 73847457
def test():
body = {
"event_name": "item:completed",
"event_data": {
"id": 85570464,
"content": u'TINA テスト',
"labels": [652234],
"project_id": 156051149
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
def test_reminder_fired():
body = {
"event_name": "reminder:fired",
"event_data": {
"item_id": 85474444,
"id": 33482384
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
test()
|
# -*- coding: utf-8 -*-
import json
import app
WORK_BEGIN_TASK = 72824136
WORK_END_TASK = 73847457
def test():
body = {
"event_name": "item:completed",
"event_data": {
"id": WORK_END_TASK,
"content": u'TINA テスト',
"labels": [652234],
"project_id": 156051149
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
def test_reminder_fired():
body = {
"event_name": "reminder:fired",
"event_data": {
"item_id": 85474444,
"id": 33482384
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
test()
|
Add special task id to test source
|
Add special task id to test source
|
Python
|
mit
|
tadashi-aikawa/tina
|
# -*- coding: utf-8 -*-
import json
import app
WORK_END_TASK = 73847457
def test():
body = {
"event_name": "item:completed",
"event_data": {
"id": 85570464,
"content": u'TINA テスト',
"labels": [652234],
"project_id": 156051149
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
def test_reminder_fired():
body = {
"event_name": "reminder:fired",
"event_data": {
"item_id": 85474444,
"id": 33482384
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
test()
Add special task id to test source
|
# -*- coding: utf-8 -*-
import json
import app
WORK_BEGIN_TASK = 72824136
WORK_END_TASK = 73847457
def test():
body = {
"event_name": "item:completed",
"event_data": {
"id": WORK_END_TASK,
"content": u'TINA テスト',
"labels": [652234],
"project_id": 156051149
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
def test_reminder_fired():
body = {
"event_name": "reminder:fired",
"event_data": {
"item_id": 85474444,
"id": 33482384
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
test()
|
<commit_before># -*- coding: utf-8 -*-
import json
import app
WORK_END_TASK = 73847457
def test():
body = {
"event_name": "item:completed",
"event_data": {
"id": 85570464,
"content": u'TINA テスト',
"labels": [652234],
"project_id": 156051149
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
def test_reminder_fired():
body = {
"event_name": "reminder:fired",
"event_data": {
"item_id": 85474444,
"id": 33482384
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
test()
<commit_msg>Add special task id to test source<commit_after>
|
# -*- coding: utf-8 -*-
import json
import app
WORK_BEGIN_TASK = 72824136
WORK_END_TASK = 73847457
def test():
body = {
"event_name": "item:completed",
"event_data": {
"id": WORK_END_TASK,
"content": u'TINA テスト',
"labels": [652234],
"project_id": 156051149
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
def test_reminder_fired():
body = {
"event_name": "reminder:fired",
"event_data": {
"item_id": 85474444,
"id": 33482384
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
test()
|
# -*- coding: utf-8 -*-
import json
import app
WORK_END_TASK = 73847457
def test():
body = {
"event_name": "item:completed",
"event_data": {
"id": 85570464,
"content": u'TINA テスト',
"labels": [652234],
"project_id": 156051149
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
def test_reminder_fired():
body = {
"event_name": "reminder:fired",
"event_data": {
"item_id": 85474444,
"id": 33482384
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
test()
Add special task id to test source# -*- coding: utf-8 -*-
import json
import app
WORK_BEGIN_TASK = 72824136
WORK_END_TASK = 73847457
def test():
body = {
"event_name": "item:completed",
"event_data": {
"id": WORK_END_TASK,
"content": u'TINA テスト',
"labels": [652234],
"project_id": 156051149
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
def test_reminder_fired():
body = {
"event_name": "reminder:fired",
"event_data": {
"item_id": 85474444,
"id": 33482384
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
test()
|
<commit_before># -*- coding: utf-8 -*-
import json
import app
WORK_END_TASK = 73847457
def test():
body = {
"event_name": "item:completed",
"event_data": {
"id": 85570464,
"content": u'TINA テスト',
"labels": [652234],
"project_id": 156051149
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
def test_reminder_fired():
body = {
"event_name": "reminder:fired",
"event_data": {
"item_id": 85474444,
"id": 33482384
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
test()
<commit_msg>Add special task id to test source<commit_after># -*- coding: utf-8 -*-
import json
import app
WORK_BEGIN_TASK = 72824136
WORK_END_TASK = 73847457
def test():
body = {
"event_name": "item:completed",
"event_data": {
"id": WORK_END_TASK,
"content": u'TINA テスト',
"labels": [652234],
"project_id": 156051149
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
def test_reminder_fired():
body = {
"event_name": "reminder:fired",
"event_data": {
"item_id": 85474444,
"id": 33482384
}
}
with open('../.tinaconfig') as f:
config = json.load(f)
app.exec_todoist(config, body)
test()
|
7ecc832999e825f48c10fdcf3430321eda1bcfcc
|
fabfile.py
|
fabfile.py
|
from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def test():
local('nosetests')
@task
def autotest():
while True:
local('clear')
with settings(warn_only=True):
execute(test)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
|
from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def docs():
local('make -C docs/ html')
@task
def autodocs():
auto(docs)
@task
def test():
local('nosetests')
@task
def autotest():
auto(test)
def auto(task):
while True:
local('clear')
with settings(warn_only=True):
execute(task)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
|
Add docs and autodocs tasks
|
fab: Add docs and autodocs tasks
|
Python
|
apache-2.0
|
felix1m/pyspotify,kotamat/pyspotify,felix1m/pyspotify,jodal/pyspotify,kotamat/pyspotify,jodal/pyspotify,felix1m/pyspotify,kotamat/pyspotify,jodal/pyspotify,mopidy/pyspotify,mopidy/pyspotify
|
from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def test():
local('nosetests')
@task
def autotest():
while True:
local('clear')
with settings(warn_only=True):
execute(test)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
fab: Add docs and autodocs tasks
|
from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def docs():
local('make -C docs/ html')
@task
def autodocs():
auto(docs)
@task
def test():
local('nosetests')
@task
def autotest():
auto(test)
def auto(task):
while True:
local('clear')
with settings(warn_only=True):
execute(task)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
|
<commit_before>from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def test():
local('nosetests')
@task
def autotest():
while True:
local('clear')
with settings(warn_only=True):
execute(test)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
<commit_msg>fab: Add docs and autodocs tasks<commit_after>
|
from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def docs():
local('make -C docs/ html')
@task
def autodocs():
auto(docs)
@task
def test():
local('nosetests')
@task
def autotest():
auto(test)
def auto(task):
while True:
local('clear')
with settings(warn_only=True):
execute(task)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
|
from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def test():
local('nosetests')
@task
def autotest():
while True:
local('clear')
with settings(warn_only=True):
execute(test)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
fab: Add docs and autodocs tasksfrom fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def docs():
local('make -C docs/ html')
@task
def autodocs():
auto(docs)
@task
def test():
local('nosetests')
@task
def autotest():
auto(test)
def auto(task):
while True:
local('clear')
with settings(warn_only=True):
execute(task)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
|
<commit_before>from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def test():
local('nosetests')
@task
def autotest():
while True:
local('clear')
with settings(warn_only=True):
execute(test)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
<commit_msg>fab: Add docs and autodocs tasks<commit_after>from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def docs():
local('make -C docs/ html')
@task
def autodocs():
auto(docs)
@task
def test():
local('nosetests')
@task
def autotest():
auto(test)
def auto(task):
while True:
local('clear')
with settings(warn_only=True):
execute(task)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
|
bd7f9e79ff4a30b2103874d0e5ceba8657b7f6ce
|
flocker/provision/test/test_ssh_monkeypatch.py
|
flocker/provision/test/test_ssh_monkeypatch.py
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
"""
self.assertTrue((not _patch_7672_needed()) or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
This will be the case if the patch is still needed, or if it has been
applied.
"""
self.assertFalse(
# The patch is still needed
_patch_7672_needed()
# Or the patch has already been applied
or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
|
Fix monkey patch test condition.
|
Fix monkey patch test condition.
|
Python
|
apache-2.0
|
mbrukman/flocker,achanda/flocker,mbrukman/flocker,mbrukman/flocker,AndyHuu/flocker,moypray/flocker,agonzalezro/flocker,1d4Nf6/flocker,agonzalezro/flocker,Azulinho/flocker,hackday-profilers/flocker,adamtheturtle/flocker,1d4Nf6/flocker,AndyHuu/flocker,AndyHuu/flocker,moypray/flocker,w4ngyi/flocker,w4ngyi/flocker,adamtheturtle/flocker,hackday-profilers/flocker,w4ngyi/flocker,agonzalezro/flocker,wallnerryan/flocker-profiles,1d4Nf6/flocker,moypray/flocker,achanda/flocker,Azulinho/flocker,adamtheturtle/flocker,wallnerryan/flocker-profiles,Azulinho/flocker,achanda/flocker,hackday-profilers/flocker,wallnerryan/flocker-profiles
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
"""
self.assertTrue((not _patch_7672_needed()) or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
Fix monkey patch test condition.
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
This will be the case if the patch is still needed, or if it has been
applied.
"""
self.assertFalse(
# The patch is still needed
_patch_7672_needed()
# Or the patch has already been applied
or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
|
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
"""
self.assertTrue((not _patch_7672_needed()) or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
<commit_msg>Fix monkey patch test condition.<commit_after>
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
This will be the case if the patch is still needed, or if it has been
applied.
"""
self.assertFalse(
# The patch is still needed
_patch_7672_needed()
# Or the patch has already been applied
or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
"""
self.assertTrue((not _patch_7672_needed()) or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
Fix monkey patch test condition.# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
This will be the case if the patch is still needed, or if it has been
applied.
"""
self.assertFalse(
# The patch is still needed
_patch_7672_needed()
# Or the patch has already been applied
or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
|
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
"""
self.assertTrue((not _patch_7672_needed()) or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
<commit_msg>Fix monkey patch test condition.<commit_after># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
from twisted.trial.unittest import SynchronousTestCase as TestCase
from .._ssh._monkeypatch import _patch_7672_needed, patch_7672_applied
class Twisted7672Tests(TestCase):
""""
Tests for ``flocker.provision._ssh._monkeypatch``.
"""
def test_needsPatch(self):
"""
Check to see if patch is still required.
This will be the case if the patch is still needed, or if it has been
applied.
"""
self.assertFalse(
# The patch is still needed
_patch_7672_needed()
# Or the patch has already been applied
or patch_7672_applied,
"Monkeypatch for twisted bug #7672 can be removed.")
|
350e8bdcb9c6f3eace7839e5dc7270bfeb51e50f
|
tests/grafana_dashboards/test_config.py
|
tests/grafana_dashboards/test_config.py
|
# -*- coding: utf-8 -*-
# Copyright 2015 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from grafana_dashboards.config import Config
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
def test_dict():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
config = Config(config_file)
assert config.get_config('context') == {'component': 'frontend'}
assert config.get_config('unknown') == {}
|
# -*- coding: utf-8 -*-
# Copyright 2015 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from grafana_dashboards.config import Config
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
def test_existent_config_file():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
config = Config(config_file)
assert config.get_config('context') == {'component': 'frontend'}
assert config.get_config('unknown') == {}
def test_nonexistent_config_file():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'no_file.yaml')
config = Config(config_file)
assert config.get_config('context') == {}
assert config.get_config('unknown') == {}
|
Add more tests for Config
|
Add more tests for Config
|
Python
|
apache-2.0
|
jakubplichta/grafana-dashboard-builder
|
# -*- coding: utf-8 -*-
# Copyright 2015 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from grafana_dashboards.config import Config
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
def test_dict():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
config = Config(config_file)
assert config.get_config('context') == {'component': 'frontend'}
assert config.get_config('unknown') == {}
Add more tests for Config
|
# -*- coding: utf-8 -*-
# Copyright 2015 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from grafana_dashboards.config import Config
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
def test_existent_config_file():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
config = Config(config_file)
assert config.get_config('context') == {'component': 'frontend'}
assert config.get_config('unknown') == {}
def test_nonexistent_config_file():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'no_file.yaml')
config = Config(config_file)
assert config.get_config('context') == {}
assert config.get_config('unknown') == {}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2015 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from grafana_dashboards.config import Config
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
def test_dict():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
config = Config(config_file)
assert config.get_config('context') == {'component': 'frontend'}
assert config.get_config('unknown') == {}
<commit_msg>Add more tests for Config<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2015 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from grafana_dashboards.config import Config
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
def test_existent_config_file():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
config = Config(config_file)
assert config.get_config('context') == {'component': 'frontend'}
assert config.get_config('unknown') == {}
def test_nonexistent_config_file():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'no_file.yaml')
config = Config(config_file)
assert config.get_config('context') == {}
assert config.get_config('unknown') == {}
|
# -*- coding: utf-8 -*-
# Copyright 2015 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from grafana_dashboards.config import Config
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
def test_dict():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
config = Config(config_file)
assert config.get_config('context') == {'component': 'frontend'}
assert config.get_config('unknown') == {}
Add more tests for Config# -*- coding: utf-8 -*-
# Copyright 2015 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from grafana_dashboards.config import Config
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
def test_existent_config_file():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
config = Config(config_file)
assert config.get_config('context') == {'component': 'frontend'}
assert config.get_config('unknown') == {}
def test_nonexistent_config_file():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'no_file.yaml')
config = Config(config_file)
assert config.get_config('context') == {}
assert config.get_config('unknown') == {}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2015 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from grafana_dashboards.config import Config
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
def test_dict():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
config = Config(config_file)
assert config.get_config('context') == {'component': 'frontend'}
assert config.get_config('unknown') == {}
<commit_msg>Add more tests for Config<commit_after># -*- coding: utf-8 -*-
# Copyright 2015 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from grafana_dashboards.config import Config
__author__ = 'Jakub Plichta <jakub.plichta@gmail.com>'
def test_existent_config_file():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config.yaml')
config = Config(config_file)
assert config.get_config('context') == {'component': 'frontend'}
assert config.get_config('unknown') == {}
def test_nonexistent_config_file():
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'no_file.yaml')
config = Config(config_file)
assert config.get_config('context') == {}
assert config.get_config('unknown') == {}
|
00e84b51f22f78f0243cd7b7212e70447fd5b552
|
store/tests/test_forms.py
|
store/tests/test_forms.py
|
from django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_save_handles_saving_to_a_list(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_review.product, prod)
|
from django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_save_handles_saving_product_reviews(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_review.product, prod)
def test_empty_name_field_doesnt_raise_errors(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': 'Review', 'rating': 3, 'product':prod.id})
self.assertTrue(form.is_valid())
|
Test that an empty name field doesn't raise errors
|
Test that an empty name field doesn't raise errors
|
Python
|
bsd-3-clause
|
andela-kndungu/compshop,andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop
|
from django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_save_handles_saving_to_a_list(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_review.product, prod)
Test that an empty name field doesn't raise errors
|
from django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_save_handles_saving_product_reviews(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_review.product, prod)
def test_empty_name_field_doesnt_raise_errors(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': 'Review', 'rating': 3, 'product':prod.id})
self.assertTrue(form.is_valid())
|
<commit_before>from django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_save_handles_saving_to_a_list(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_review.product, prod)
<commit_msg>Test that an empty name field doesn't raise errors<commit_after>
|
from django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_save_handles_saving_product_reviews(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_review.product, prod)
def test_empty_name_field_doesnt_raise_errors(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': 'Review', 'rating': 3, 'product':prod.id})
self.assertTrue(form.is_valid())
|
from django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_save_handles_saving_to_a_list(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_review.product, prod)
Test that an empty name field doesn't raise errorsfrom django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_save_handles_saving_product_reviews(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_review.product, prod)
def test_empty_name_field_doesnt_raise_errors(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': 'Review', 'rating': 3, 'product':prod.id})
self.assertTrue(form.is_valid())
|
<commit_before>from django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_save_handles_saving_to_a_list(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_review.product, prod)
<commit_msg>Test that an empty name field doesn't raise errors<commit_after>from django.test import TestCase
from store.forms import ReviewForm
from store.models import Review
from .factories import *
class ReviewFormTest(TestCase):
def test_form_validation_for_blank_items(self):
p1 = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': '', 'product':p1.id})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['text'],["Please fill in the review"])
self.assertEqual(form.errors['rating'],["Please leave a rating"])
def test_form_save_handles_saving_product_reviews(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'Kevin', 'text': 'Review', 'rating': 3, 'product':prod.id})
new_review = form.save()
self.assertEqual(new_review, Review.objects.first())
self.assertEqual(new_review.name, 'Kevin')
self.assertEqual(new_review.product, prod)
def test_empty_name_field_doesnt_raise_errors(self):
prod = ProductFactory.create()
form = ReviewForm(
data={'name':'', 'text': 'Review', 'rating': 3, 'product':prod.id})
self.assertTrue(form.is_valid())
|
327bbdde964f8af0625313922be91665a75d7268
|
fabfile.py
|
fabfile.py
|
from fabric import api
def raspberry_pi(name):
api.env.hosts = ["{0}.local".format(name)]
api.env.user = 'pi'
def deploy():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
|
from StringIO import StringIO
from fabric import api
from fabric.operations import prompt, put
UPSTART_TEMPLATE = """
description "Pi-Sensor-RPC-Service"
start on runlevel [2345]
stop on runlevel [06]
respawn
respawn limit 10 5
env LOGGLY_TOKEN={loggly_token}
env LOGGLY_SUBDOMAIN={loggly_domain}
env SERIAL_ADDRESS={serial_address}
env SERIAL_RATE={serial_rate}
env RABBIT_URL={rabbit_url}
script
cd /home/pi/Pi-Sensor-RPC-Service/app && node main.js
end script
"""
def raspberry_pi():
api.env.hosts = ["{0}.local".format(prompt("Raspberry Pi:"))]
api.env.user = 'pi'
def install():
api.require('hosts', provided_by=[raspberry_pi])
upstart_values = {}
upstart_values['loggly_token'] = prompt("Loggly token:")
upstart_values['loggly_domain'] = prompt("Loggly domain:")
upstart_values['serial_address'] = prompt("Serial address:")
upstart_values['serial_rate'] = prompt("Serial rate:")
upstart_values['rabbit_url'] = prompt("Rabbit URL:")
upstart_file = StringIO(UPSTART_TEMPLATE.format(**upstart_values))
with api.cd('/etc/init'):
upload = put(upstart_file, 'sensor-rpc.conf', use_sudo=True)
assert upload.succeeded
api.run('git clone https://github.com/projectweekend/Pi-Sensor-RPC-Service.git')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
def update():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
|
Add install task to fab file
|
Add install task to fab file
|
Python
|
mit
|
projectweekend/Pi-Sensor-RPC-Service
|
from fabric import api
def raspberry_pi(name):
api.env.hosts = ["{0}.local".format(name)]
api.env.user = 'pi'
def deploy():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
Add install task to fab file
|
from StringIO import StringIO
from fabric import api
from fabric.operations import prompt, put
UPSTART_TEMPLATE = """
description "Pi-Sensor-RPC-Service"
start on runlevel [2345]
stop on runlevel [06]
respawn
respawn limit 10 5
env LOGGLY_TOKEN={loggly_token}
env LOGGLY_SUBDOMAIN={loggly_domain}
env SERIAL_ADDRESS={serial_address}
env SERIAL_RATE={serial_rate}
env RABBIT_URL={rabbit_url}
script
cd /home/pi/Pi-Sensor-RPC-Service/app && node main.js
end script
"""
def raspberry_pi():
api.env.hosts = ["{0}.local".format(prompt("Raspberry Pi:"))]
api.env.user = 'pi'
def install():
api.require('hosts', provided_by=[raspberry_pi])
upstart_values = {}
upstart_values['loggly_token'] = prompt("Loggly token:")
upstart_values['loggly_domain'] = prompt("Loggly domain:")
upstart_values['serial_address'] = prompt("Serial address:")
upstart_values['serial_rate'] = prompt("Serial rate:")
upstart_values['rabbit_url'] = prompt("Rabbit URL:")
upstart_file = StringIO(UPSTART_TEMPLATE.format(**upstart_values))
with api.cd('/etc/init'):
upload = put(upstart_file, 'sensor-rpc.conf', use_sudo=True)
assert upload.succeeded
api.run('git clone https://github.com/projectweekend/Pi-Sensor-RPC-Service.git')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
def update():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
|
<commit_before>from fabric import api
def raspberry_pi(name):
api.env.hosts = ["{0}.local".format(name)]
api.env.user = 'pi'
def deploy():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
<commit_msg>Add install task to fab file<commit_after>
|
from StringIO import StringIO
from fabric import api
from fabric.operations import prompt, put
UPSTART_TEMPLATE = """
description "Pi-Sensor-RPC-Service"
start on runlevel [2345]
stop on runlevel [06]
respawn
respawn limit 10 5
env LOGGLY_TOKEN={loggly_token}
env LOGGLY_SUBDOMAIN={loggly_domain}
env SERIAL_ADDRESS={serial_address}
env SERIAL_RATE={serial_rate}
env RABBIT_URL={rabbit_url}
script
cd /home/pi/Pi-Sensor-RPC-Service/app && node main.js
end script
"""
def raspberry_pi():
api.env.hosts = ["{0}.local".format(prompt("Raspberry Pi:"))]
api.env.user = 'pi'
def install():
api.require('hosts', provided_by=[raspberry_pi])
upstart_values = {}
upstart_values['loggly_token'] = prompt("Loggly token:")
upstart_values['loggly_domain'] = prompt("Loggly domain:")
upstart_values['serial_address'] = prompt("Serial address:")
upstart_values['serial_rate'] = prompt("Serial rate:")
upstart_values['rabbit_url'] = prompt("Rabbit URL:")
upstart_file = StringIO(UPSTART_TEMPLATE.format(**upstart_values))
with api.cd('/etc/init'):
upload = put(upstart_file, 'sensor-rpc.conf', use_sudo=True)
assert upload.succeeded
api.run('git clone https://github.com/projectweekend/Pi-Sensor-RPC-Service.git')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
def update():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
|
from fabric import api
def raspberry_pi(name):
api.env.hosts = ["{0}.local".format(name)]
api.env.user = 'pi'
def deploy():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
Add install task to fab filefrom StringIO import StringIO
from fabric import api
from fabric.operations import prompt, put
UPSTART_TEMPLATE = """
description "Pi-Sensor-RPC-Service"
start on runlevel [2345]
stop on runlevel [06]
respawn
respawn limit 10 5
env LOGGLY_TOKEN={loggly_token}
env LOGGLY_SUBDOMAIN={loggly_domain}
env SERIAL_ADDRESS={serial_address}
env SERIAL_RATE={serial_rate}
env RABBIT_URL={rabbit_url}
script
cd /home/pi/Pi-Sensor-RPC-Service/app && node main.js
end script
"""
def raspberry_pi():
api.env.hosts = ["{0}.local".format(prompt("Raspberry Pi:"))]
api.env.user = 'pi'
def install():
api.require('hosts', provided_by=[raspberry_pi])
upstart_values = {}
upstart_values['loggly_token'] = prompt("Loggly token:")
upstart_values['loggly_domain'] = prompt("Loggly domain:")
upstart_values['serial_address'] = prompt("Serial address:")
upstart_values['serial_rate'] = prompt("Serial rate:")
upstart_values['rabbit_url'] = prompt("Rabbit URL:")
upstart_file = StringIO(UPSTART_TEMPLATE.format(**upstart_values))
with api.cd('/etc/init'):
upload = put(upstart_file, 'sensor-rpc.conf', use_sudo=True)
assert upload.succeeded
api.run('git clone https://github.com/projectweekend/Pi-Sensor-RPC-Service.git')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
def update():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
|
<commit_before>from fabric import api
def raspberry_pi(name):
api.env.hosts = ["{0}.local".format(name)]
api.env.user = 'pi'
def deploy():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
<commit_msg>Add install task to fab file<commit_after>from StringIO import StringIO
from fabric import api
from fabric.operations import prompt, put
UPSTART_TEMPLATE = """
description "Pi-Sensor-RPC-Service"
start on runlevel [2345]
stop on runlevel [06]
respawn
respawn limit 10 5
env LOGGLY_TOKEN={loggly_token}
env LOGGLY_SUBDOMAIN={loggly_domain}
env SERIAL_ADDRESS={serial_address}
env SERIAL_RATE={serial_rate}
env RABBIT_URL={rabbit_url}
script
cd /home/pi/Pi-Sensor-RPC-Service/app && node main.js
end script
"""
def raspberry_pi():
api.env.hosts = ["{0}.local".format(prompt("Raspberry Pi:"))]
api.env.user = 'pi'
def install():
api.require('hosts', provided_by=[raspberry_pi])
upstart_values = {}
upstart_values['loggly_token'] = prompt("Loggly token:")
upstart_values['loggly_domain'] = prompt("Loggly domain:")
upstart_values['serial_address'] = prompt("Serial address:")
upstart_values['serial_rate'] = prompt("Serial rate:")
upstart_values['rabbit_url'] = prompt("Rabbit URL:")
upstart_file = StringIO(UPSTART_TEMPLATE.format(**upstart_values))
with api.cd('/etc/init'):
upload = put(upstart_file, 'sensor-rpc.conf', use_sudo=True)
assert upload.succeeded
api.run('git clone https://github.com/projectweekend/Pi-Sensor-RPC-Service.git')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
def update():
api.require('hosts', provided_by=[raspberry_pi])
with api.settings(warn_only=True):
api.sudo('service sensor-rpc stop')
with api.cd('~/Pi-Sensor-RPC-Service'):
api.run('git pull origin master')
with api.cd('~/Pi-Sensor-RPC-Service/app'):
api.run('npm install')
api.sudo('service sensor-rpc start')
|
71afe1acbdd7ee265983397e237b514482f04f73
|
src/neuralNetwork.py
|
src/neuralNetwork.py
|
import numpy as np
def initializeNetwork(layerSizes = [1,2,3,4,1]):
l = len(layerSizes)
parameters = {}
for i in range(1,l):
parameters['W'+str(i)] = np.random.randn(layerSizes[i],layerSizes[i-1])*0.1
parameters['b'+str(i)] = np.empty((i,1))
return parameters
def forwardProp(X, parameters):
As = {}
A = X
l = len(parameters)//2
for i in range(1, l):
A = np.dot(parameters['W'+str(i)],A)
As['A'+str(i)] = A
return As
parameters = initializeNetwork()
forwardProp(np.array([[1,2,3,1]]),parameters)
|
Add foward propagation and network initialization functions
|
Add foward propagation and network initialization functions
|
Python
|
mit
|
YuelongLi/Deep-Learning
|
Add foward propagation and network initialization functions
|
import numpy as np
def initializeNetwork(layerSizes = [1,2,3,4,1]):
l = len(layerSizes)
parameters = {}
for i in range(1,l):
parameters['W'+str(i)] = np.random.randn(layerSizes[i],layerSizes[i-1])*0.1
parameters['b'+str(i)] = np.empty((i,1))
return parameters
def forwardProp(X, parameters):
As = {}
A = X
l = len(parameters)//2
for i in range(1, l):
A = np.dot(parameters['W'+str(i)],A)
As['A'+str(i)] = A
return As
parameters = initializeNetwork()
forwardProp(np.array([[1,2,3,1]]),parameters)
|
<commit_before>
<commit_msg>Add foward propagation and network initialization functions<commit_after>
|
import numpy as np
def initializeNetwork(layerSizes = [1,2,3,4,1]):
l = len(layerSizes)
parameters = {}
for i in range(1,l):
parameters['W'+str(i)] = np.random.randn(layerSizes[i],layerSizes[i-1])*0.1
parameters['b'+str(i)] = np.empty((i,1))
return parameters
def forwardProp(X, parameters):
As = {}
A = X
l = len(parameters)//2
for i in range(1, l):
A = np.dot(parameters['W'+str(i)],A)
As['A'+str(i)] = A
return As
parameters = initializeNetwork()
forwardProp(np.array([[1,2,3,1]]),parameters)
|
Add foward propagation and network initialization functionsimport numpy as np
def initializeNetwork(layerSizes = [1,2,3,4,1]):
l = len(layerSizes)
parameters = {}
for i in range(1,l):
parameters['W'+str(i)] = np.random.randn(layerSizes[i],layerSizes[i-1])*0.1
parameters['b'+str(i)] = np.empty((i,1))
return parameters
def forwardProp(X, parameters):
As = {}
A = X
l = len(parameters)//2
for i in range(1, l):
A = np.dot(parameters['W'+str(i)],A)
As['A'+str(i)] = A
return As
parameters = initializeNetwork()
forwardProp(np.array([[1,2,3,1]]),parameters)
|
<commit_before>
<commit_msg>Add foward propagation and network initialization functions<commit_after>import numpy as np
def initializeNetwork(layerSizes = [1,2,3,4,1]):
l = len(layerSizes)
parameters = {}
for i in range(1,l):
parameters['W'+str(i)] = np.random.randn(layerSizes[i],layerSizes[i-1])*0.1
parameters['b'+str(i)] = np.empty((i,1))
return parameters
def forwardProp(X, parameters):
As = {}
A = X
l = len(parameters)//2
for i in range(1, l):
A = np.dot(parameters['W'+str(i)],A)
As['A'+str(i)] = A
return As
parameters = initializeNetwork()
forwardProp(np.array([[1,2,3,1]]),parameters)
|
|
97efe99ae964e8f4e866d961282257e6f4293fd8
|
synapse/config/workers.py
|
synapse/config/workers.py
|
# -*- coding: utf-8 -*-
# Copyright 2016 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class WorkerConfig(Config):
"""The workers are processes run separately to the main synapse process.
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
def read_config(self, config):
self.worker_app = config.get("worker_app")
self.worker_listeners = config.get("worker_listeners")
self.worker_daemonize = config.get("worker_daemonize")
self.worker_pid_file = config.get("worker_pid_file")
self.worker_log_file = config.get("worker_log_file")
self.worker_log_config = config.get("worker_log_config")
self.worker_replication_url = config.get("worker_replication_url")
|
# -*- coding: utf-8 -*-
# Copyright 2016 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class WorkerConfig(Config):
"""The workers are processes run separately to the main synapse process.
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
def read_config(self, config):
self.worker_app = config.get("worker_app")
self.worker_listeners = config.get("worker_listeners")
self.worker_daemonize = config.get("worker_daemonize")
self.worker_pid_file = config.get("worker_pid_file")
self.worker_log_file = config.get("worker_log_file")
self.worker_log_config = config.get("worker_log_config")
self.worker_replication_url = config.get("worker_replication_url")
if self.worker_listeners:
for listener in self.worker_listeners:
bind_address = listener.pop("bind_address", None)
bind_addresses = listener.setdefault("bind_addresses", [])
if bind_address:
bind_addresses.append(bind_address)
elif not bind_addresses:
bind_addresses.append('')
|
Make worker listener config backwards compat
|
Make worker listener config backwards compat
|
Python
|
apache-2.0
|
matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,TribeMedia/synapse,TribeMedia/synapse,TribeMedia/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,TribeMedia/synapse,TribeMedia/synapse
|
# -*- coding: utf-8 -*-
# Copyright 2016 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class WorkerConfig(Config):
"""The workers are processes run separately to the main synapse process.
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
def read_config(self, config):
self.worker_app = config.get("worker_app")
self.worker_listeners = config.get("worker_listeners")
self.worker_daemonize = config.get("worker_daemonize")
self.worker_pid_file = config.get("worker_pid_file")
self.worker_log_file = config.get("worker_log_file")
self.worker_log_config = config.get("worker_log_config")
self.worker_replication_url = config.get("worker_replication_url")
Make worker listener config backwards compat
|
# -*- coding: utf-8 -*-
# Copyright 2016 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class WorkerConfig(Config):
"""The workers are processes run separately to the main synapse process.
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
def read_config(self, config):
self.worker_app = config.get("worker_app")
self.worker_listeners = config.get("worker_listeners")
self.worker_daemonize = config.get("worker_daemonize")
self.worker_pid_file = config.get("worker_pid_file")
self.worker_log_file = config.get("worker_log_file")
self.worker_log_config = config.get("worker_log_config")
self.worker_replication_url = config.get("worker_replication_url")
if self.worker_listeners:
for listener in self.worker_listeners:
bind_address = listener.pop("bind_address", None)
bind_addresses = listener.setdefault("bind_addresses", [])
if bind_address:
bind_addresses.append(bind_address)
elif not bind_addresses:
bind_addresses.append('')
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2016 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class WorkerConfig(Config):
"""The workers are processes run separately to the main synapse process.
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
def read_config(self, config):
self.worker_app = config.get("worker_app")
self.worker_listeners = config.get("worker_listeners")
self.worker_daemonize = config.get("worker_daemonize")
self.worker_pid_file = config.get("worker_pid_file")
self.worker_log_file = config.get("worker_log_file")
self.worker_log_config = config.get("worker_log_config")
self.worker_replication_url = config.get("worker_replication_url")
<commit_msg>Make worker listener config backwards compat<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2016 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class WorkerConfig(Config):
"""The workers are processes run separately to the main synapse process.
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
def read_config(self, config):
self.worker_app = config.get("worker_app")
self.worker_listeners = config.get("worker_listeners")
self.worker_daemonize = config.get("worker_daemonize")
self.worker_pid_file = config.get("worker_pid_file")
self.worker_log_file = config.get("worker_log_file")
self.worker_log_config = config.get("worker_log_config")
self.worker_replication_url = config.get("worker_replication_url")
if self.worker_listeners:
for listener in self.worker_listeners:
bind_address = listener.pop("bind_address", None)
bind_addresses = listener.setdefault("bind_addresses", [])
if bind_address:
bind_addresses.append(bind_address)
elif not bind_addresses:
bind_addresses.append('')
|
# -*- coding: utf-8 -*-
# Copyright 2016 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class WorkerConfig(Config):
"""The workers are processes run separately to the main synapse process.
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
def read_config(self, config):
self.worker_app = config.get("worker_app")
self.worker_listeners = config.get("worker_listeners")
self.worker_daemonize = config.get("worker_daemonize")
self.worker_pid_file = config.get("worker_pid_file")
self.worker_log_file = config.get("worker_log_file")
self.worker_log_config = config.get("worker_log_config")
self.worker_replication_url = config.get("worker_replication_url")
Make worker listener config backwards compat# -*- coding: utf-8 -*-
# Copyright 2016 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class WorkerConfig(Config):
"""The workers are processes run separately to the main synapse process.
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
def read_config(self, config):
self.worker_app = config.get("worker_app")
self.worker_listeners = config.get("worker_listeners")
self.worker_daemonize = config.get("worker_daemonize")
self.worker_pid_file = config.get("worker_pid_file")
self.worker_log_file = config.get("worker_log_file")
self.worker_log_config = config.get("worker_log_config")
self.worker_replication_url = config.get("worker_replication_url")
if self.worker_listeners:
for listener in self.worker_listeners:
bind_address = listener.pop("bind_address", None)
bind_addresses = listener.setdefault("bind_addresses", [])
if bind_address:
bind_addresses.append(bind_address)
elif not bind_addresses:
bind_addresses.append('')
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2016 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class WorkerConfig(Config):
"""The workers are processes run separately to the main synapse process.
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
def read_config(self, config):
self.worker_app = config.get("worker_app")
self.worker_listeners = config.get("worker_listeners")
self.worker_daemonize = config.get("worker_daemonize")
self.worker_pid_file = config.get("worker_pid_file")
self.worker_log_file = config.get("worker_log_file")
self.worker_log_config = config.get("worker_log_config")
self.worker_replication_url = config.get("worker_replication_url")
<commit_msg>Make worker listener config backwards compat<commit_after># -*- coding: utf-8 -*-
# Copyright 2016 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class WorkerConfig(Config):
"""The workers are processes run separately to the main synapse process.
They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process."""
def read_config(self, config):
self.worker_app = config.get("worker_app")
self.worker_listeners = config.get("worker_listeners")
self.worker_daemonize = config.get("worker_daemonize")
self.worker_pid_file = config.get("worker_pid_file")
self.worker_log_file = config.get("worker_log_file")
self.worker_log_config = config.get("worker_log_config")
self.worker_replication_url = config.get("worker_replication_url")
if self.worker_listeners:
for listener in self.worker_listeners:
bind_address = listener.pop("bind_address", None)
bind_addresses = listener.setdefault("bind_addresses", [])
if bind_address:
bind_addresses.append(bind_address)
elif not bind_addresses:
bind_addresses.append('')
|
a440ea14d1dabb18aae7d8c7e0b6433dd57866f8
|
overlay/Label.py
|
overlay/Label.py
|
from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 12
self.color = "rgb(64,64,64)"
|
from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 14
self.color = "rgb(0,0,0)"
|
Set default label color to black
|
Set default label color to black
|
Python
|
mit
|
gizmo-cda/g2x,thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x
|
from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 12
self.color = "rgb(64,64,64)"
Set default label color to black
|
from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 14
self.color = "rgb(0,0,0)"
|
<commit_before>from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 12
self.color = "rgb(64,64,64)"
<commit_msg>Set default label color to black<commit_after>
|
from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 14
self.color = "rgb(0,0,0)"
|
from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 12
self.color = "rgb(64,64,64)"
Set default label color to blackfrom SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 14
self.color = "rgb(0,0,0)"
|
<commit_before>from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 12
self.color = "rgb(64,64,64)"
<commit_msg>Set default label color to black<commit_after>from SVGGenerator import SVGGenerator
class Label(SVGGenerator):
def __init__(self, x, y, text):
SVGGenerator.__init__(self, './label.svg.mustache')
self.x = x
self.y = y
self.text = text
self.alignment = "start"
self.font_size = 14
self.color = "rgb(0,0,0)"
|
582811074db86be964648dc9457855db3549a2b5
|
data_structures/Disjoint_Set_Union/Python/dsu.py
|
data_structures/Disjoint_Set_Union/Python/dsu.py
|
parent=[]
size=[]
def initialize(n):
for i in range(0,n+1):
parent.append(i)
size.append(1)
def find(x):
if parent[x] == x:
return x
else:
return find(parent[x])
def join(a,b):
p_a = find(a)
p_b = find(b)
if p_a != p_b:
if size[p_a] < size[p_b]:
parent[p_a] = p_b
size[p_b] += size[p_a]
else:
parent[p_b] = p_a
size[p_a] += size[p_b]
''' Main Program Starts Here '''
n=5
initialize(n)
join(1,2)
join(2,3)
join(4,5)
print(find(3))
|
parent=[]
size=[]
def initialize(n):
for i in range(0,n+1):
parent.append(i)
size.append(1)
def find(x):
if parent[x] == x:
return x
else:
return find(parent[x])
def join(a,b):
p_a = find(a)
p_b = find(b)
if p_a != p_b:
if size[p_a] < size[p_b]:
parent[p_a] = p_b
size[p_b] += size[p_a]
else:
parent[p_b] = p_a
size[p_a] += size[p_b]
''' Main Program Starts Here '''
def main():
n=5
initialize(n)
join(1,2)
assert(find(2) == 1)
assert(find(3) == 3)
join(2,3)
assert(find(3) == 1)
assert(find(5) == 5)
join(4,5)
assert(find(5) == 4)
join(3,4)
assert(find(5) == 1)
if __name__ == '__main__':
main()
|
Test for DSU on Python
|
Test for DSU on Python
|
Python
|
cc0-1.0
|
ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms
|
parent=[]
size=[]
def initialize(n):
for i in range(0,n+1):
parent.append(i)
size.append(1)
def find(x):
if parent[x] == x:
return x
else:
return find(parent[x])
def join(a,b):
p_a = find(a)
p_b = find(b)
if p_a != p_b:
if size[p_a] < size[p_b]:
parent[p_a] = p_b
size[p_b] += size[p_a]
else:
parent[p_b] = p_a
size[p_a] += size[p_b]
''' Main Program Starts Here '''
n=5
initialize(n)
join(1,2)
join(2,3)
join(4,5)
print(find(3))
Test for DSU on Python
|
parent=[]
size=[]
def initialize(n):
for i in range(0,n+1):
parent.append(i)
size.append(1)
def find(x):
if parent[x] == x:
return x
else:
return find(parent[x])
def join(a,b):
p_a = find(a)
p_b = find(b)
if p_a != p_b:
if size[p_a] < size[p_b]:
parent[p_a] = p_b
size[p_b] += size[p_a]
else:
parent[p_b] = p_a
size[p_a] += size[p_b]
''' Main Program Starts Here '''
def main():
n=5
initialize(n)
join(1,2)
assert(find(2) == 1)
assert(find(3) == 3)
join(2,3)
assert(find(3) == 1)
assert(find(5) == 5)
join(4,5)
assert(find(5) == 4)
join(3,4)
assert(find(5) == 1)
if __name__ == '__main__':
main()
|
<commit_before>
parent=[]
size=[]
def initialize(n):
for i in range(0,n+1):
parent.append(i)
size.append(1)
def find(x):
if parent[x] == x:
return x
else:
return find(parent[x])
def join(a,b):
p_a = find(a)
p_b = find(b)
if p_a != p_b:
if size[p_a] < size[p_b]:
parent[p_a] = p_b
size[p_b] += size[p_a]
else:
parent[p_b] = p_a
size[p_a] += size[p_b]
''' Main Program Starts Here '''
n=5
initialize(n)
join(1,2)
join(2,3)
join(4,5)
print(find(3))
<commit_msg>Test for DSU on Python<commit_after>
|
parent=[]
size=[]
def initialize(n):
for i in range(0,n+1):
parent.append(i)
size.append(1)
def find(x):
if parent[x] == x:
return x
else:
return find(parent[x])
def join(a,b):
p_a = find(a)
p_b = find(b)
if p_a != p_b:
if size[p_a] < size[p_b]:
parent[p_a] = p_b
size[p_b] += size[p_a]
else:
parent[p_b] = p_a
size[p_a] += size[p_b]
''' Main Program Starts Here '''
def main():
n=5
initialize(n)
join(1,2)
assert(find(2) == 1)
assert(find(3) == 3)
join(2,3)
assert(find(3) == 1)
assert(find(5) == 5)
join(4,5)
assert(find(5) == 4)
join(3,4)
assert(find(5) == 1)
if __name__ == '__main__':
main()
|
parent=[]
size=[]
def initialize(n):
for i in range(0,n+1):
parent.append(i)
size.append(1)
def find(x):
if parent[x] == x:
return x
else:
return find(parent[x])
def join(a,b):
p_a = find(a)
p_b = find(b)
if p_a != p_b:
if size[p_a] < size[p_b]:
parent[p_a] = p_b
size[p_b] += size[p_a]
else:
parent[p_b] = p_a
size[p_a] += size[p_b]
''' Main Program Starts Here '''
n=5
initialize(n)
join(1,2)
join(2,3)
join(4,5)
print(find(3))
Test for DSU on Python
parent=[]
size=[]
def initialize(n):
for i in range(0,n+1):
parent.append(i)
size.append(1)
def find(x):
if parent[x] == x:
return x
else:
return find(parent[x])
def join(a,b):
p_a = find(a)
p_b = find(b)
if p_a != p_b:
if size[p_a] < size[p_b]:
parent[p_a] = p_b
size[p_b] += size[p_a]
else:
parent[p_b] = p_a
size[p_a] += size[p_b]
''' Main Program Starts Here '''
def main():
n=5
initialize(n)
join(1,2)
assert(find(2) == 1)
assert(find(3) == 3)
join(2,3)
assert(find(3) == 1)
assert(find(5) == 5)
join(4,5)
assert(find(5) == 4)
join(3,4)
assert(find(5) == 1)
if __name__ == '__main__':
main()
|
<commit_before>
parent=[]
size=[]
def initialize(n):
for i in range(0,n+1):
parent.append(i)
size.append(1)
def find(x):
if parent[x] == x:
return x
else:
return find(parent[x])
def join(a,b):
p_a = find(a)
p_b = find(b)
if p_a != p_b:
if size[p_a] < size[p_b]:
parent[p_a] = p_b
size[p_b] += size[p_a]
else:
parent[p_b] = p_a
size[p_a] += size[p_b]
''' Main Program Starts Here '''
n=5
initialize(n)
join(1,2)
join(2,3)
join(4,5)
print(find(3))
<commit_msg>Test for DSU on Python<commit_after>
parent=[]
size=[]
def initialize(n):
for i in range(0,n+1):
parent.append(i)
size.append(1)
def find(x):
if parent[x] == x:
return x
else:
return find(parent[x])
def join(a,b):
p_a = find(a)
p_b = find(b)
if p_a != p_b:
if size[p_a] < size[p_b]:
parent[p_a] = p_b
size[p_b] += size[p_a]
else:
parent[p_b] = p_a
size[p_a] += size[p_b]
''' Main Program Starts Here '''
def main():
n=5
initialize(n)
join(1,2)
assert(find(2) == 1)
assert(find(3) == 3)
join(2,3)
assert(find(3) == 1)
assert(find(5) == 5)
join(4,5)
assert(find(5) == 4)
join(3,4)
assert(find(5) == 1)
if __name__ == '__main__':
main()
|
049a01d148c757a17e9804a2b1e42c918e29b094
|
tests/basics/for_break.py
|
tests/basics/for_break.py
|
# Testcase for break in a for [within bunch of other code]
# https://github.com/micropython/micropython/issues/635
def foo():
seq = [1, 2, 3]
v = 100
i = 5
while i > 0:
print(i)
for a in seq:
if a == 2:
break
i -= 1
foo()
|
# Testcase for break in a for [within bunch of other code]
# https://github.com/micropython/micropython/issues/635
def foo():
seq = [1, 2, 3]
v = 100
i = 5
while i > 0:
print(i)
for a in seq:
if a == 2:
break
i -= 1
foo()
# break from within nested for loop
def bar():
l = [1, 2, 3]
for e1 in l:
print(e1)
for e2 in l:
print(e1, e2)
if e2 == 2:
break
bar()
|
Add another test for break-from-for-loop.
|
tests: Add another test for break-from-for-loop.
|
Python
|
mit
|
neilh10/micropython,galenhz/micropython,heisewangluo/micropython,heisewangluo/micropython,ruffy91/micropython,mgyenik/micropython,ericsnowcurrently/micropython,cnoviello/micropython,lbattraw/micropython,lbattraw/micropython,hosaka/micropython,EcmaXp/micropython,alex-march/micropython,Timmenem/micropython,cwyark/micropython,xhat/micropython,blazewicz/micropython,jlillest/micropython,jlillest/micropython,hosaka/micropython,tralamazza/micropython,MrSurly/micropython,ryannathans/micropython,feilongfl/micropython,PappaPeppar/micropython,orionrobots/micropython,pozetroninc/micropython,dhylands/micropython,MrSurly/micropython-esp32,adafruit/circuitpython,stonegithubs/micropython,slzatz/micropython,oopy/micropython,alex-robbins/micropython,martinribelotta/micropython,paul-xxx/micropython,firstval/micropython,kerneltask/micropython,utopiaprince/micropython,dinau/micropython,aethaniel/micropython,deshipu/micropython,dinau/micropython,tdautc19841202/micropython,tdautc19841202/micropython,puuu/micropython,orionrobots/micropython,pfalcon/micropython,AriZuu/micropython,galenhz/micropython,alex-march/micropython,bvernoux/micropython,bvernoux/micropython,infinnovation/micropython,feilongfl/micropython,xuxiaoxin/micropython,TDAbboud/micropython,praemdonck/micropython,micropython/micropython-esp32,matthewelse/micropython,hiway/micropython,vriera/micropython,pfalcon/micropython,vriera/micropython,deshipu/micropython,stonegithubs/micropython,suda/micropython,EcmaXp/micropython,jimkmc/micropython,redbear/micropython,tuc-osg/micropython,turbinenreiter/micropython,firstval/micropython,neilh10/micropython,mpalomer/micropython,ericsnowcurrently/micropython,mgyenik/micropython,skybird6672/micropython,lowRISC/micropython,redbear/micropython,chrisdearman/micropython,martinribelotta/micropython,KISSMonX/micropython,praemdonck/micropython,cnoviello/micropython,adafruit/circuitpython,mpalomer/micropython,chrisdearman/micropython,adafruit/micropython,selste/micropython,jmarcelino/pycom-micropython,dmazzella/micropython,martinribelotta/micropython,ryannathans/micropython,xhat/micropython,xuxiaoxin/micropython,MrSurly/micropython,deshipu/micropython,noahchense/micropython,TDAbboud/micropython,MrSurly/micropython-esp32,selste/micropython,pozetroninc/micropython,rubencabrera/micropython,utopiaprince/micropython,dxxb/micropython,jimkmc/micropython,neilh10/micropython,vitiral/micropython,paul-xxx/micropython,ericsnowcurrently/micropython,selste/micropython,suda/micropython,tobbad/micropython,jimkmc/micropython,MrSurly/micropython,adamkh/micropython,trezor/micropython,AriZuu/micropython,tralamazza/micropython,feilongfl/micropython,ruffy91/micropython,ahotam/micropython,KISSMonX/micropython,stonegithubs/micropython,xuxiaoxin/micropython,kostyll/micropython,jimkmc/micropython,cnoviello/micropython,dxxb/micropython,misterdanb/micropython,omtinez/micropython,HenrikSolver/micropython,supergis/micropython,jlillest/micropython,xyb/micropython,swegener/micropython,tobbad/micropython,SHA2017-badge/micropython-esp32,dhylands/micropython,ceramos/micropython,kerneltask/micropython,torwag/micropython,TDAbboud/micropython,oopy/micropython,stonegithubs/micropython,bvernoux/micropython,cloudformdesign/micropython,Vogtinator/micropython,tobbad/micropython,swegener/micropython,turbinenreiter/micropython,bvernoux/micropython,ericsnowcurrently/micropython,tralamazza/micropython,danicampora/micropython,ChuckM/micropython,kerneltask/micropython,mhoffma/micropython,utopiaprince/micropython,dxxb/micropython,hosaka/micropython,pramasoul/micropython,adafruit/micropython,noahwilliamsson/micropython,cwyark/micropython,xhat/micropython,ernesto-g/micropython,oopy/micropython,adamkh/micropython,mpalomer/micropython,Peetz0r/micropython-esp32,hiway/micropython,mgyenik/micropython,SHA2017-badge/micropython-esp32,mpalomer/micropython,skybird6672/micropython,jimkmc/micropython,dhylands/micropython,KISSMonX/micropython,xyb/micropython,ganshun666/micropython,mgyenik/micropython,bvernoux/micropython,pozetroninc/micropython,blmorris/micropython,matthewelse/micropython,henriknelson/micropython,tuc-osg/micropython,alex-march/micropython,utopiaprince/micropython,mianos/micropython,MrSurly/micropython,orionrobots/micropython,TDAbboud/micropython,tobbad/micropython,ryannathans/micropython,noahwilliamsson/micropython,tralamazza/micropython,tdautc19841202/micropython,jlillest/micropython,mianos/micropython,torwag/micropython,alex-march/micropython,redbear/micropython,omtinez/micropython,ganshun666/micropython,ahotam/micropython,turbinenreiter/micropython,infinnovation/micropython,matthewelse/micropython,firstval/micropython,MrSurly/micropython-esp32,tobbad/micropython,methoxid/micropystat,EcmaXp/micropython,matthewelse/micropython,ernesto-g/micropython,neilh10/micropython,adafruit/circuitpython,pfalcon/micropython,utopiaprince/micropython,torwag/micropython,emfcamp/micropython,ruffy91/micropython,lowRISC/micropython,noahchense/micropython,ahotam/micropython,selste/micropython,hiway/micropython,hiway/micropython,EcmaXp/micropython,firstval/micropython,oopy/micropython,rubencabrera/micropython,pramasoul/micropython,ceramos/micropython,firstval/micropython,slzatz/micropython,xyb/micropython,mgyenik/micropython,drrk/micropython,dinau/micropython,supergis/micropython,suda/micropython,kostyll/micropython,adafruit/circuitpython,rubencabrera/micropython,stonegithubs/micropython,hiway/micropython,Timmenem/micropython,Vogtinator/micropython,Peetz0r/micropython-esp32,SungEun-Steve-Kim/test-mp,tuc-osg/micropython,skybird6672/micropython,supergis/micropython,tdautc19841202/micropython,hosaka/micropython,adamkh/micropython,swegener/micropython,ceramos/micropython,dmazzella/micropython,danicampora/micropython,tuc-osg/micropython,tuc-osg/micropython,tdautc19841202/micropython,jmarcelino/pycom-micropython,PappaPeppar/micropython,lbattraw/micropython,ernesto-g/micropython,pramasoul/micropython,skybird6672/micropython,dhylands/micropython,Peetz0r/micropython-esp32,heisewangluo/micropython,TDAbboud/micropython,dxxb/micropython,ruffy91/micropython,noahwilliamsson/micropython,selste/micropython,swegener/micropython,galenhz/micropython,supergis/micropython,turbinenreiter/micropython,ceramos/micropython,danicampora/micropython,neilh10/micropython,paul-xxx/micropython,toolmacher/micropython,SHA2017-badge/micropython-esp32,noahchense/micropython,slzatz/micropython,omtinez/micropython,dmazzella/micropython,chrisdearman/micropython,deshipu/micropython,pfalcon/micropython,cloudformdesign/micropython,feilongfl/micropython,aethaniel/micropython,cwyark/micropython,cloudformdesign/micropython,adafruit/micropython,misterdanb/micropython,blazewicz/micropython,misterdanb/micropython,cnoviello/micropython,ceramos/micropython,xuxiaoxin/micropython,micropython/micropython-esp32,danicampora/micropython,deshipu/micropython,jlillest/micropython,dmazzella/micropython,HenrikSolver/micropython,emfcamp/micropython,warner83/micropython,ganshun666/micropython,HenrikSolver/micropython,adamkh/micropython,xhat/micropython,cloudformdesign/micropython,ganshun666/micropython,SungEun-Steve-Kim/test-mp,dhylands/micropython,misterdanb/micropython,micropython/micropython-esp32,mhoffma/micropython,orionrobots/micropython,skybird6672/micropython,MrSurly/micropython-esp32,lbattraw/micropython,pramasoul/micropython,cwyark/micropython,blazewicz/micropython,ernesto-g/micropython,warner83/micropython,lbattraw/micropython,vitiral/micropython,praemdonck/micropython,torwag/micropython,puuu/micropython,Peetz0r/micropython-esp32,omtinez/micropython,noahchense/micropython,MrSurly/micropython-esp32,emfcamp/micropython,misterdanb/micropython,redbear/micropython,toolmacher/micropython,ahotam/micropython,AriZuu/micropython,kostyll/micropython,alex-robbins/micropython,adafruit/circuitpython,trezor/micropython,ChuckM/micropython,warner83/micropython,cloudformdesign/micropython,orionrobots/micropython,danicampora/micropython,galenhz/micropython,micropython/micropython-esp32,martinribelotta/micropython,methoxid/micropystat,heisewangluo/micropython,mpalomer/micropython,adamkh/micropython,supergis/micropython,henriknelson/micropython,paul-xxx/micropython,drrk/micropython,feilongfl/micropython,SungEun-Steve-Kim/test-mp,AriZuu/micropython,vriera/micropython,aethaniel/micropython,PappaPeppar/micropython,puuu/micropython,matthewelse/micropython,chrisdearman/micropython,HenrikSolver/micropython,lowRISC/micropython,Timmenem/micropython,jmarcelino/pycom-micropython,methoxid/micropystat,jmarcelino/pycom-micropython,pozetroninc/micropython,galenhz/micropython,torwag/micropython,drrk/micropython,KISSMonX/micropython,adafruit/circuitpython,mianos/micropython,drrk/micropython,AriZuu/micropython,Peetz0r/micropython-esp32,ChuckM/micropython,rubencabrera/micropython,noahwilliamsson/micropython,blazewicz/micropython,pozetroninc/micropython,aethaniel/micropython,noahwilliamsson/micropython,pramasoul/micropython,warner83/micropython,ruffy91/micropython,SungEun-Steve-Kim/test-mp,Vogtinator/micropython,cnoviello/micropython,martinribelotta/micropython,suda/micropython,xhat/micropython,Timmenem/micropython,blmorris/micropython,infinnovation/micropython,infinnovation/micropython,praemdonck/micropython,jmarcelino/pycom-micropython,PappaPeppar/micropython,trezor/micropython,puuu/micropython,hosaka/micropython,paul-xxx/micropython,blazewicz/micropython,chrisdearman/micropython,mianos/micropython,methoxid/micropystat,ericsnowcurrently/micropython,vitiral/micropython,Vogtinator/micropython,mhoffma/micropython,toolmacher/micropython,blmorris/micropython,lowRISC/micropython,heisewangluo/micropython,adafruit/micropython,EcmaXp/micropython,drrk/micropython,MrSurly/micropython,toolmacher/micropython,emfcamp/micropython,redbear/micropython,praemdonck/micropython,ChuckM/micropython,xyb/micropython,aethaniel/micropython,slzatz/micropython,alex-robbins/micropython,ryannathans/micropython,suda/micropython,emfcamp/micropython,lowRISC/micropython,kerneltask/micropython,HenrikSolver/micropython,rubencabrera/micropython,vriera/micropython,Vogtinator/micropython,SHA2017-badge/micropython-esp32,henriknelson/micropython,noahchense/micropython,vriera/micropython,puuu/micropython,toolmacher/micropython,xuxiaoxin/micropython,ernesto-g/micropython,PappaPeppar/micropython,pfalcon/micropython,SHA2017-badge/micropython-esp32,dinau/micropython,matthewelse/micropython,kostyll/micropython,alex-robbins/micropython,slzatz/micropython,trezor/micropython,alex-robbins/micropython,methoxid/micropystat,dxxb/micropython,trezor/micropython,adafruit/micropython,vitiral/micropython,SungEun-Steve-Kim/test-mp,xyb/micropython,kerneltask/micropython,mhoffma/micropython,vitiral/micropython,henriknelson/micropython,infinnovation/micropython,blmorris/micropython,micropython/micropython-esp32,swegener/micropython,ChuckM/micropython,kostyll/micropython,cwyark/micropython,KISSMonX/micropython,ganshun666/micropython,Timmenem/micropython,mianos/micropython,ryannathans/micropython,omtinez/micropython,turbinenreiter/micropython,oopy/micropython,dinau/micropython,mhoffma/micropython,blmorris/micropython,henriknelson/micropython,alex-march/micropython,warner83/micropython,ahotam/micropython
|
# Testcase for break in a for [within bunch of other code]
# https://github.com/micropython/micropython/issues/635
def foo():
seq = [1, 2, 3]
v = 100
i = 5
while i > 0:
print(i)
for a in seq:
if a == 2:
break
i -= 1
foo()
tests: Add another test for break-from-for-loop.
|
# Testcase for break in a for [within bunch of other code]
# https://github.com/micropython/micropython/issues/635
def foo():
seq = [1, 2, 3]
v = 100
i = 5
while i > 0:
print(i)
for a in seq:
if a == 2:
break
i -= 1
foo()
# break from within nested for loop
def bar():
l = [1, 2, 3]
for e1 in l:
print(e1)
for e2 in l:
print(e1, e2)
if e2 == 2:
break
bar()
|
<commit_before># Testcase for break in a for [within bunch of other code]
# https://github.com/micropython/micropython/issues/635
def foo():
seq = [1, 2, 3]
v = 100
i = 5
while i > 0:
print(i)
for a in seq:
if a == 2:
break
i -= 1
foo()
<commit_msg>tests: Add another test for break-from-for-loop.<commit_after>
|
# Testcase for break in a for [within bunch of other code]
# https://github.com/micropython/micropython/issues/635
def foo():
seq = [1, 2, 3]
v = 100
i = 5
while i > 0:
print(i)
for a in seq:
if a == 2:
break
i -= 1
foo()
# break from within nested for loop
def bar():
l = [1, 2, 3]
for e1 in l:
print(e1)
for e2 in l:
print(e1, e2)
if e2 == 2:
break
bar()
|
# Testcase for break in a for [within bunch of other code]
# https://github.com/micropython/micropython/issues/635
def foo():
seq = [1, 2, 3]
v = 100
i = 5
while i > 0:
print(i)
for a in seq:
if a == 2:
break
i -= 1
foo()
tests: Add another test for break-from-for-loop.# Testcase for break in a for [within bunch of other code]
# https://github.com/micropython/micropython/issues/635
def foo():
seq = [1, 2, 3]
v = 100
i = 5
while i > 0:
print(i)
for a in seq:
if a == 2:
break
i -= 1
foo()
# break from within nested for loop
def bar():
l = [1, 2, 3]
for e1 in l:
print(e1)
for e2 in l:
print(e1, e2)
if e2 == 2:
break
bar()
|
<commit_before># Testcase for break in a for [within bunch of other code]
# https://github.com/micropython/micropython/issues/635
def foo():
seq = [1, 2, 3]
v = 100
i = 5
while i > 0:
print(i)
for a in seq:
if a == 2:
break
i -= 1
foo()
<commit_msg>tests: Add another test for break-from-for-loop.<commit_after># Testcase for break in a for [within bunch of other code]
# https://github.com/micropython/micropython/issues/635
def foo():
seq = [1, 2, 3]
v = 100
i = 5
while i > 0:
print(i)
for a in seq:
if a == 2:
break
i -= 1
foo()
# break from within nested for loop
def bar():
l = [1, 2, 3]
for e1 in l:
print(e1)
for e2 in l:
print(e1, e2)
if e2 == 2:
break
bar()
|
9a8f27fb6b3cec373d841b0973ee59f2ddd0b875
|
fabfile.py
|
fabfile.py
|
from fabric.api import env, local, cd, run
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
run('sudo -u skylines ./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
|
from fabric.api import env, local, cd, run, settings, sudo
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
with settings(sudo_user='skylines'):
sudo('./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
|
Use sudo() function for db migration call
|
fabric: Use sudo() function for db migration call
|
Python
|
agpl-3.0
|
RBE-Avionik/skylines,shadowoneau/skylines,RBE-Avionik/skylines,Harry-R/skylines,Turbo87/skylines,Harry-R/skylines,skylines-project/skylines,TobiasLohner/SkyLines,shadowoneau/skylines,RBE-Avionik/skylines,kerel-fs/skylines,kerel-fs/skylines,snip/skylines,skylines-project/skylines,shadowoneau/skylines,Harry-R/skylines,shadowoneau/skylines,snip/skylines,TobiasLohner/SkyLines,Turbo87/skylines,Harry-R/skylines,snip/skylines,skylines-project/skylines,Turbo87/skylines,kerel-fs/skylines,Turbo87/skylines,TobiasLohner/SkyLines,RBE-Avionik/skylines,skylines-project/skylines
|
from fabric.api import env, local, cd, run
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
run('sudo -u skylines ./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
fabric: Use sudo() function for db migration call
|
from fabric.api import env, local, cd, run, settings, sudo
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
with settings(sudo_user='skylines'):
sudo('./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
|
<commit_before>from fabric.api import env, local, cd, run
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
run('sudo -u skylines ./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
<commit_msg>fabric: Use sudo() function for db migration call<commit_after>
|
from fabric.api import env, local, cd, run, settings, sudo
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
with settings(sudo_user='skylines'):
sudo('./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
|
from fabric.api import env, local, cd, run
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
run('sudo -u skylines ./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
fabric: Use sudo() function for db migration callfrom fabric.api import env, local, cd, run, settings, sudo
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
with settings(sudo_user='skylines'):
sudo('./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
|
<commit_before>from fabric.api import env, local, cd, run
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
run('sudo -u skylines ./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
<commit_msg>fabric: Use sudo() function for db migration call<commit_after>from fabric.api import env, local, cd, run, settings, sudo
env.use_ssh_config = True
env.hosts = ['root@skylines']
def deploy(branch='master', force=False):
push(branch, force)
restart()
def push(branch='master', force=False):
cmd = 'git push %s:/opt/skylines/src/ %s:master' % (env.host_string, branch)
if force:
cmd += ' --force'
local(cmd)
def restart():
with cd('/opt/skylines/src'):
run('git reset --hard')
# compile i18n .mo files
run('./manage.py babel compile')
# generate JS/CSS assets
run('./manage.py assets build')
# do database migrations
with settings(sudo_user='skylines'):
sudo('./manage.py migrate upgrade')
# restart services
restart_service('skylines-fastcgi')
restart_service('mapserver-fastcgi')
restart_service('skylines-daemon')
restart_service('celery-daemon')
def restart_service(service):
run('sv restart ' + service)
|
63d250d89dca72d91ad4470ba2af22b326d15454
|
app/utils.py
|
app/utils.py
|
import traceback
import sys
import logging
# always print stuff on the screen:
logging.basicConfig(level=logging.INFO)
def log_exception(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
logging.info("Exception raised")
etype, value, tb = sys.exc_info()
s = "".join(traceback.format_exception(etype, value, tb))
logging.info(s)
logging.info("-"*40)
raise
return wrapper
class Eval(object):
def __init__(self):
self._namespace = {}
def eval(self, x):
globals = self._namespace
try:
x = x.strip()
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
eval(compile(s, '', 'exec'), globals, globals)
if not z is None:
r = str(eval(z, globals))
else:
r = ''
return r
except:
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
|
import traceback
import sys
import logging
# always print stuff on the screen:
logging.basicConfig(level=logging.INFO)
def log_exception(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
logging.info("Exception raised")
etype, value, tb = sys.exc_info()
s = "".join(traceback.format_exception(etype, value, tb))
logging.info(s)
logging.info("-"*40)
raise
return wrapper
class Eval(object):
def __init__(self):
self._namespace = {}
def eval(self, x):
globals = self._namespace
try:
x = x.strip()
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
eval(compile(s, '', 'exec'), globals, globals)
if not z is None:
r = repr(eval(z, globals))
else:
r = ''
return r
except:
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
|
Use repr() instead of str() for printing
|
Use repr() instead of str() for printing
|
Python
|
bsd-3-clause
|
kaichogami/sympy_gamma,kaichogami/sympy_gamma,iScienceLuvr/sympy_gamma,iScienceLuvr/sympy_gamma,debugger22/sympy_gamma,debugger22/sympy_gamma,bolshoibooze/sympy_gamma,github4ry/sympy_gamma,bolshoibooze/sympy_gamma,kaichogami/sympy_gamma,iScienceLuvr/sympy_gamma,github4ry/sympy_gamma,bolshoibooze/sympy_gamma,github4ry/sympy_gamma
|
import traceback
import sys
import logging
# always print stuff on the screen:
logging.basicConfig(level=logging.INFO)
def log_exception(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
logging.info("Exception raised")
etype, value, tb = sys.exc_info()
s = "".join(traceback.format_exception(etype, value, tb))
logging.info(s)
logging.info("-"*40)
raise
return wrapper
class Eval(object):
def __init__(self):
self._namespace = {}
def eval(self, x):
globals = self._namespace
try:
x = x.strip()
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
eval(compile(s, '', 'exec'), globals, globals)
if not z is None:
r = str(eval(z, globals))
else:
r = ''
return r
except:
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
Use repr() instead of str() for printing
|
import traceback
import sys
import logging
# always print stuff on the screen:
logging.basicConfig(level=logging.INFO)
def log_exception(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
logging.info("Exception raised")
etype, value, tb = sys.exc_info()
s = "".join(traceback.format_exception(etype, value, tb))
logging.info(s)
logging.info("-"*40)
raise
return wrapper
class Eval(object):
def __init__(self):
self._namespace = {}
def eval(self, x):
globals = self._namespace
try:
x = x.strip()
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
eval(compile(s, '', 'exec'), globals, globals)
if not z is None:
r = repr(eval(z, globals))
else:
r = ''
return r
except:
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
|
<commit_before>import traceback
import sys
import logging
# always print stuff on the screen:
logging.basicConfig(level=logging.INFO)
def log_exception(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
logging.info("Exception raised")
etype, value, tb = sys.exc_info()
s = "".join(traceback.format_exception(etype, value, tb))
logging.info(s)
logging.info("-"*40)
raise
return wrapper
class Eval(object):
def __init__(self):
self._namespace = {}
def eval(self, x):
globals = self._namespace
try:
x = x.strip()
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
eval(compile(s, '', 'exec'), globals, globals)
if not z is None:
r = str(eval(z, globals))
else:
r = ''
return r
except:
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
<commit_msg>Use repr() instead of str() for printing<commit_after>
|
import traceback
import sys
import logging
# always print stuff on the screen:
logging.basicConfig(level=logging.INFO)
def log_exception(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
logging.info("Exception raised")
etype, value, tb = sys.exc_info()
s = "".join(traceback.format_exception(etype, value, tb))
logging.info(s)
logging.info("-"*40)
raise
return wrapper
class Eval(object):
def __init__(self):
self._namespace = {}
def eval(self, x):
globals = self._namespace
try:
x = x.strip()
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
eval(compile(s, '', 'exec'), globals, globals)
if not z is None:
r = repr(eval(z, globals))
else:
r = ''
return r
except:
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
|
import traceback
import sys
import logging
# always print stuff on the screen:
logging.basicConfig(level=logging.INFO)
def log_exception(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
logging.info("Exception raised")
etype, value, tb = sys.exc_info()
s = "".join(traceback.format_exception(etype, value, tb))
logging.info(s)
logging.info("-"*40)
raise
return wrapper
class Eval(object):
def __init__(self):
self._namespace = {}
def eval(self, x):
globals = self._namespace
try:
x = x.strip()
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
eval(compile(s, '', 'exec'), globals, globals)
if not z is None:
r = str(eval(z, globals))
else:
r = ''
return r
except:
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
Use repr() instead of str() for printingimport traceback
import sys
import logging
# always print stuff on the screen:
logging.basicConfig(level=logging.INFO)
def log_exception(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
logging.info("Exception raised")
etype, value, tb = sys.exc_info()
s = "".join(traceback.format_exception(etype, value, tb))
logging.info(s)
logging.info("-"*40)
raise
return wrapper
class Eval(object):
def __init__(self):
self._namespace = {}
def eval(self, x):
globals = self._namespace
try:
x = x.strip()
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
eval(compile(s, '', 'exec'), globals, globals)
if not z is None:
r = repr(eval(z, globals))
else:
r = ''
return r
except:
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
|
<commit_before>import traceback
import sys
import logging
# always print stuff on the screen:
logging.basicConfig(level=logging.INFO)
def log_exception(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
logging.info("Exception raised")
etype, value, tb = sys.exc_info()
s = "".join(traceback.format_exception(etype, value, tb))
logging.info(s)
logging.info("-"*40)
raise
return wrapper
class Eval(object):
def __init__(self):
self._namespace = {}
def eval(self, x):
globals = self._namespace
try:
x = x.strip()
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
eval(compile(s, '', 'exec'), globals, globals)
if not z is None:
r = str(eval(z, globals))
else:
r = ''
return r
except:
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
<commit_msg>Use repr() instead of str() for printing<commit_after>import traceback
import sys
import logging
# always print stuff on the screen:
logging.basicConfig(level=logging.INFO)
def log_exception(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
logging.info("Exception raised")
etype, value, tb = sys.exc_info()
s = "".join(traceback.format_exception(etype, value, tb))
logging.info(s)
logging.info("-"*40)
raise
return wrapper
class Eval(object):
def __init__(self):
self._namespace = {}
def eval(self, x):
globals = self._namespace
try:
x = x.strip()
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
eval(compile(s, '', 'exec'), globals, globals)
if not z is None:
r = repr(eval(z, globals))
else:
r = ''
return r
except:
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
|
1f863b45e6e3fd5491a59d3d522ddd7ca5a5bfbd
|
ibutton.py
|
ibutton.py
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if len(code)==12:
return code
code += byte
print("Reading ID: %s" % code)
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if byte == ' ':
continue
if len(code)==12:
return code
code += byte
print("Reading ID: %s" % code)
|
Fix RDID reading code to ignore leading space
|
Fix RDID reading code to ignore leading space
|
Python
|
mit
|
stevenmirabito/DrinkTouchClient-2.0,harlanhaskins/DrinkTouchClient-2.0
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if len(code)==12:
return code
code += byte
print("Reading ID: %s" % code)
Fix RDID reading code to ignore leading space
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if byte == ' ':
continue
if len(code)==12:
return code
code += byte
print("Reading ID: %s" % code)
|
<commit_before>import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if len(code)==12:
return code
code += byte
print("Reading ID: %s" % code)
<commit_msg>Fix RDID reading code to ignore leading space<commit_after>
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if byte == ' ':
continue
if len(code)==12:
return code
code += byte
print("Reading ID: %s" % code)
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if len(code)==12:
return code
code += byte
print("Reading ID: %s" % code)
Fix RDID reading code to ignore leading spaceimport serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if byte == ' ':
continue
if len(code)==12:
return code
code += byte
print("Reading ID: %s" % code)
|
<commit_before>import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if len(code)==12:
return code
code += byte
print("Reading ID: %s" % code)
<commit_msg>Fix RDID reading code to ignore leading space<commit_after>import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if byte == ' ':
continue
if len(code)==12:
return code
code += byte
print("Reading ID: %s" % code)
|
c3881c1146d5dbe77a332698ac7c292c68c5a420
|
app.py
|
app.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import requests
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
session = requests.Session()
session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux \
x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'})
date = request.args.get('date')
if date:
r = session.get(
'http://news.at.zhihu.com/api/1.2/news/before/{0}'.format(date))
story_list = []
else:
r = session.get('http://news.at.zhihu.com/api/1.2/news/latest')
print 'yes'
story_list = [
[item['title'], item['share_url']] for item in r.json()['top_stories']]
display_date = r.json()['display_date']
date = r.json()["date"]
news_list = [
[item['title'], item['share_url']] for item in r.json()['news']]
lists = story_list + news_list
return render_template("index.html", lists=lists, display_date=display_date, date=date)
if __name__ == '__main__':
app.run(debug=True)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import requests
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
session = requests.Session()
session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux \
x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'})
date = request.args.get('date')
if date:
r = session.get(
'http://news.at.zhihu.com/api/1.2/news/before/{0}'.format(date))
# story_list = []
else:
r = session.get('http://news.at.zhihu.com/api/1.2/news/latest')
print 'yes'
# story_list = [
# [item['title'], item['share_url']] for item in r.json()['top_stories']]
display_date = r.json()['display_date']
date = r.json()["date"]
news_list = [
[item['title'], item['share_url']] for item in r.json()['news']]
# lists = story_list + news_list
return render_template("index.html", lists=news_list,
display_date=display_date, date=date)
if __name__ == '__main__':
app.run(debug=True)
|
Remove the dumplicate top stories
|
Remove the dumplicate top stories
|
Python
|
mit
|
lord63/zhihudaily,lord63/zhihudaily,lord63/zhihudaily
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import requests
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
session = requests.Session()
session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux \
x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'})
date = request.args.get('date')
if date:
r = session.get(
'http://news.at.zhihu.com/api/1.2/news/before/{0}'.format(date))
story_list = []
else:
r = session.get('http://news.at.zhihu.com/api/1.2/news/latest')
print 'yes'
story_list = [
[item['title'], item['share_url']] for item in r.json()['top_stories']]
display_date = r.json()['display_date']
date = r.json()["date"]
news_list = [
[item['title'], item['share_url']] for item in r.json()['news']]
lists = story_list + news_list
return render_template("index.html", lists=lists, display_date=display_date, date=date)
if __name__ == '__main__':
app.run(debug=True)
Remove the dumplicate top stories
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import requests
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
session = requests.Session()
session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux \
x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'})
date = request.args.get('date')
if date:
r = session.get(
'http://news.at.zhihu.com/api/1.2/news/before/{0}'.format(date))
# story_list = []
else:
r = session.get('http://news.at.zhihu.com/api/1.2/news/latest')
print 'yes'
# story_list = [
# [item['title'], item['share_url']] for item in r.json()['top_stories']]
display_date = r.json()['display_date']
date = r.json()["date"]
news_list = [
[item['title'], item['share_url']] for item in r.json()['news']]
# lists = story_list + news_list
return render_template("index.html", lists=news_list,
display_date=display_date, date=date)
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import requests
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
session = requests.Session()
session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux \
x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'})
date = request.args.get('date')
if date:
r = session.get(
'http://news.at.zhihu.com/api/1.2/news/before/{0}'.format(date))
story_list = []
else:
r = session.get('http://news.at.zhihu.com/api/1.2/news/latest')
print 'yes'
story_list = [
[item['title'], item['share_url']] for item in r.json()['top_stories']]
display_date = r.json()['display_date']
date = r.json()["date"]
news_list = [
[item['title'], item['share_url']] for item in r.json()['news']]
lists = story_list + news_list
return render_template("index.html", lists=lists, display_date=display_date, date=date)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Remove the dumplicate top stories<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import requests
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
session = requests.Session()
session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux \
x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'})
date = request.args.get('date')
if date:
r = session.get(
'http://news.at.zhihu.com/api/1.2/news/before/{0}'.format(date))
# story_list = []
else:
r = session.get('http://news.at.zhihu.com/api/1.2/news/latest')
print 'yes'
# story_list = [
# [item['title'], item['share_url']] for item in r.json()['top_stories']]
display_date = r.json()['display_date']
date = r.json()["date"]
news_list = [
[item['title'], item['share_url']] for item in r.json()['news']]
# lists = story_list + news_list
return render_template("index.html", lists=news_list,
display_date=display_date, date=date)
if __name__ == '__main__':
app.run(debug=True)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import requests
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
session = requests.Session()
session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux \
x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'})
date = request.args.get('date')
if date:
r = session.get(
'http://news.at.zhihu.com/api/1.2/news/before/{0}'.format(date))
story_list = []
else:
r = session.get('http://news.at.zhihu.com/api/1.2/news/latest')
print 'yes'
story_list = [
[item['title'], item['share_url']] for item in r.json()['top_stories']]
display_date = r.json()['display_date']
date = r.json()["date"]
news_list = [
[item['title'], item['share_url']] for item in r.json()['news']]
lists = story_list + news_list
return render_template("index.html", lists=lists, display_date=display_date, date=date)
if __name__ == '__main__':
app.run(debug=True)
Remove the dumplicate top stories#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import requests
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
session = requests.Session()
session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux \
x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'})
date = request.args.get('date')
if date:
r = session.get(
'http://news.at.zhihu.com/api/1.2/news/before/{0}'.format(date))
# story_list = []
else:
r = session.get('http://news.at.zhihu.com/api/1.2/news/latest')
print 'yes'
# story_list = [
# [item['title'], item['share_url']] for item in r.json()['top_stories']]
display_date = r.json()['display_date']
date = r.json()["date"]
news_list = [
[item['title'], item['share_url']] for item in r.json()['news']]
# lists = story_list + news_list
return render_template("index.html", lists=news_list,
display_date=display_date, date=date)
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import requests
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
session = requests.Session()
session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux \
x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'})
date = request.args.get('date')
if date:
r = session.get(
'http://news.at.zhihu.com/api/1.2/news/before/{0}'.format(date))
story_list = []
else:
r = session.get('http://news.at.zhihu.com/api/1.2/news/latest')
print 'yes'
story_list = [
[item['title'], item['share_url']] for item in r.json()['top_stories']]
display_date = r.json()['display_date']
date = r.json()["date"]
news_list = [
[item['title'], item['share_url']] for item in r.json()['news']]
lists = story_list + news_list
return render_template("index.html", lists=lists, display_date=display_date, date=date)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Remove the dumplicate top stories<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import requests
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
session = requests.Session()
session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux \
x86_64; rv:28.0) Gecko/20100101 Firefox/28.0'})
date = request.args.get('date')
if date:
r = session.get(
'http://news.at.zhihu.com/api/1.2/news/before/{0}'.format(date))
# story_list = []
else:
r = session.get('http://news.at.zhihu.com/api/1.2/news/latest')
print 'yes'
# story_list = [
# [item['title'], item['share_url']] for item in r.json()['top_stories']]
display_date = r.json()['display_date']
date = r.json()["date"]
news_list = [
[item['title'], item['share_url']] for item in r.json()['news']]
# lists = story_list + news_list
return render_template("index.html", lists=news_list,
display_date=display_date, date=date)
if __name__ == '__main__':
app.run(debug=True)
|
2036e978a22cf980a6bc28a8e7276886fa8857e8
|
activeusers/__init__.py
|
activeusers/__init__.py
|
VERSION = (0, 1, 1)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
|
VERSION = (0, 1, 2)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
|
Increment version to 0.1.2 to fix pip not updating to edge
|
Increment version to 0.1.2 to fix pip not updating to edge
|
Python
|
mit
|
asavoy/django-activeusers,arteria/django-activeusers
|
VERSION = (0, 1, 1)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
Increment version to 0.1.2 to fix pip not updating to edge
|
VERSION = (0, 1, 2)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
|
<commit_before>
VERSION = (0, 1, 1)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
<commit_msg>Increment version to 0.1.2 to fix pip not updating to edge<commit_after>
|
VERSION = (0, 1, 2)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
|
VERSION = (0, 1, 1)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
Increment version to 0.1.2 to fix pip not updating to edge
VERSION = (0, 1, 2)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
|
<commit_before>
VERSION = (0, 1, 1)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
<commit_msg>Increment version to 0.1.2 to fix pip not updating to edge<commit_after>
VERSION = (0, 1, 2)
def get_version():
"Returns the version as a human-format string."
return '.'.join([str(i) for i in VERSION])
|
989abdc718973551bbb3565859d75ea0408776d0
|
example_project/example_project/urls.py
|
example_project/example_project/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", include(admin.site.urls)),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", admin.site.urls),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"^markitup/", include("markitup.urls")),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
Fix URLconf for example project.
|
Fix URLconf for example project.
|
Python
|
mit
|
zsiciarz/django-pgallery,zsiciarz/django-pgallery
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", include(admin.site.urls)),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
Fix URLconf for example project.
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", admin.site.urls),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"^markitup/", include("markitup.urls")),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", include(admin.site.urls)),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
<commit_msg>Fix URLconf for example project.<commit_after>
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", admin.site.urls),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"^markitup/", include("markitup.urls")),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", include(admin.site.urls)),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
Fix URLconf for example project.from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", admin.site.urls),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"^markitup/", include("markitup.urls")),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", include(admin.site.urls)),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
<commit_msg>Fix URLconf for example project.<commit_after>from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", admin.site.urls),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"^markitup/", include("markitup.urls")),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
cd9b2e375587fdf0bc6b2d61a983ca40e6680218
|
osf/migrations/0139_rename_aspredicted_schema.py
|
osf/migrations/0139_rename_aspredicted_schema.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-16 20:22
from __future__ import unicode_literals
from django.db import migrations
from osf.models import RegistrationSchema
OLD_NAME = 'AsPredicted Preregistration'
NEW_NAME = 'Preregistration Template from AsPredicted.org'
def rename_schema(from_name, to_name):
try:
schema = RegistrationSchema.objects.get(name=from_name)
except RegistrationSchema.DoesNotExist:
return
schema.name = to_name
schema.schema['name'] = to_name
schema.schema['title'] = to_name
schema.schema['pages'][0]['title'] = to_name
return schema.save()
def rename_aspredicted_schema(*args, **kwargs):
return rename_schema(OLD_NAME, NEW_NAME)
def undo_aspredicted_rename(*args, **kwargs):
return rename_schema(NEW_NAME, OLD_NAME)
class Migration(migrations.Migration):
dependencies = [
('osf', '0138_merge_20181012_1944'),
]
operations = [
migrations.RunPython(rename_aspredicted_schema, undo_aspredicted_rename)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-16 20:22
from __future__ import unicode_literals
from django.db import migrations
OLD_NAME = 'AsPredicted Preregistration'
NEW_NAME = 'Preregistration Template from AsPredicted.org'
def rename_schema(model, from_name, to_name):
try:
schema = model.objects.get(name=from_name)
except model.DoesNotExist:
return
schema.name = to_name
schema.schema['name'] = to_name
schema.schema['title'] = to_name
schema.schema['pages'][0]['title'] = to_name
return schema.save()
def rename_aspredicted_schema(state, schema):
RegistrationSchema = state.get_model('osf.registrationschema')
return rename_schema(RegistrationSchema, OLD_NAME, NEW_NAME)
def undo_aspredicted_rename(state, schema):
RegistrationSchema = state.get_model('osf.registrationschema')
return rename_schema(RegistrationSchema, NEW_NAME, OLD_NAME)
class Migration(migrations.Migration):
dependencies = [
('osf', '0138_merge_20181012_1944'),
]
operations = [
migrations.RunPython(rename_aspredicted_schema, undo_aspredicted_rename)
]
|
Migrate with model from app state - Not from app code
|
Migrate with model from app state
- Not from app code
|
Python
|
apache-2.0
|
adlius/osf.io,mattclark/osf.io,cslzchen/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,felliott/osf.io,pattisdr/osf.io,mfraezz/osf.io,adlius/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,cslzchen/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,felliott/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,saradbowman/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,adlius/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,felliott/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,baylee-d/osf.io,Johnetordoff/osf.io
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-16 20:22
from __future__ import unicode_literals
from django.db import migrations
from osf.models import RegistrationSchema
OLD_NAME = 'AsPredicted Preregistration'
NEW_NAME = 'Preregistration Template from AsPredicted.org'
def rename_schema(from_name, to_name):
try:
schema = RegistrationSchema.objects.get(name=from_name)
except RegistrationSchema.DoesNotExist:
return
schema.name = to_name
schema.schema['name'] = to_name
schema.schema['title'] = to_name
schema.schema['pages'][0]['title'] = to_name
return schema.save()
def rename_aspredicted_schema(*args, **kwargs):
return rename_schema(OLD_NAME, NEW_NAME)
def undo_aspredicted_rename(*args, **kwargs):
return rename_schema(NEW_NAME, OLD_NAME)
class Migration(migrations.Migration):
dependencies = [
('osf', '0138_merge_20181012_1944'),
]
operations = [
migrations.RunPython(rename_aspredicted_schema, undo_aspredicted_rename)
]
Migrate with model from app state
- Not from app code
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-16 20:22
from __future__ import unicode_literals
from django.db import migrations
OLD_NAME = 'AsPredicted Preregistration'
NEW_NAME = 'Preregistration Template from AsPredicted.org'
def rename_schema(model, from_name, to_name):
try:
schema = model.objects.get(name=from_name)
except model.DoesNotExist:
return
schema.name = to_name
schema.schema['name'] = to_name
schema.schema['title'] = to_name
schema.schema['pages'][0]['title'] = to_name
return schema.save()
def rename_aspredicted_schema(state, schema):
RegistrationSchema = state.get_model('osf.registrationschema')
return rename_schema(RegistrationSchema, OLD_NAME, NEW_NAME)
def undo_aspredicted_rename(state, schema):
RegistrationSchema = state.get_model('osf.registrationschema')
return rename_schema(RegistrationSchema, NEW_NAME, OLD_NAME)
class Migration(migrations.Migration):
dependencies = [
('osf', '0138_merge_20181012_1944'),
]
operations = [
migrations.RunPython(rename_aspredicted_schema, undo_aspredicted_rename)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-16 20:22
from __future__ import unicode_literals
from django.db import migrations
from osf.models import RegistrationSchema
OLD_NAME = 'AsPredicted Preregistration'
NEW_NAME = 'Preregistration Template from AsPredicted.org'
def rename_schema(from_name, to_name):
try:
schema = RegistrationSchema.objects.get(name=from_name)
except RegistrationSchema.DoesNotExist:
return
schema.name = to_name
schema.schema['name'] = to_name
schema.schema['title'] = to_name
schema.schema['pages'][0]['title'] = to_name
return schema.save()
def rename_aspredicted_schema(*args, **kwargs):
return rename_schema(OLD_NAME, NEW_NAME)
def undo_aspredicted_rename(*args, **kwargs):
return rename_schema(NEW_NAME, OLD_NAME)
class Migration(migrations.Migration):
dependencies = [
('osf', '0138_merge_20181012_1944'),
]
operations = [
migrations.RunPython(rename_aspredicted_schema, undo_aspredicted_rename)
]
<commit_msg>Migrate with model from app state
- Not from app code<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-16 20:22
from __future__ import unicode_literals
from django.db import migrations
OLD_NAME = 'AsPredicted Preregistration'
NEW_NAME = 'Preregistration Template from AsPredicted.org'
def rename_schema(model, from_name, to_name):
try:
schema = model.objects.get(name=from_name)
except model.DoesNotExist:
return
schema.name = to_name
schema.schema['name'] = to_name
schema.schema['title'] = to_name
schema.schema['pages'][0]['title'] = to_name
return schema.save()
def rename_aspredicted_schema(state, schema):
RegistrationSchema = state.get_model('osf.registrationschema')
return rename_schema(RegistrationSchema, OLD_NAME, NEW_NAME)
def undo_aspredicted_rename(state, schema):
RegistrationSchema = state.get_model('osf.registrationschema')
return rename_schema(RegistrationSchema, NEW_NAME, OLD_NAME)
class Migration(migrations.Migration):
dependencies = [
('osf', '0138_merge_20181012_1944'),
]
operations = [
migrations.RunPython(rename_aspredicted_schema, undo_aspredicted_rename)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-16 20:22
from __future__ import unicode_literals
from django.db import migrations
from osf.models import RegistrationSchema
OLD_NAME = 'AsPredicted Preregistration'
NEW_NAME = 'Preregistration Template from AsPredicted.org'
def rename_schema(from_name, to_name):
try:
schema = RegistrationSchema.objects.get(name=from_name)
except RegistrationSchema.DoesNotExist:
return
schema.name = to_name
schema.schema['name'] = to_name
schema.schema['title'] = to_name
schema.schema['pages'][0]['title'] = to_name
return schema.save()
def rename_aspredicted_schema(*args, **kwargs):
return rename_schema(OLD_NAME, NEW_NAME)
def undo_aspredicted_rename(*args, **kwargs):
return rename_schema(NEW_NAME, OLD_NAME)
class Migration(migrations.Migration):
dependencies = [
('osf', '0138_merge_20181012_1944'),
]
operations = [
migrations.RunPython(rename_aspredicted_schema, undo_aspredicted_rename)
]
Migrate with model from app state
- Not from app code# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-16 20:22
from __future__ import unicode_literals
from django.db import migrations
OLD_NAME = 'AsPredicted Preregistration'
NEW_NAME = 'Preregistration Template from AsPredicted.org'
def rename_schema(model, from_name, to_name):
try:
schema = model.objects.get(name=from_name)
except model.DoesNotExist:
return
schema.name = to_name
schema.schema['name'] = to_name
schema.schema['title'] = to_name
schema.schema['pages'][0]['title'] = to_name
return schema.save()
def rename_aspredicted_schema(state, schema):
RegistrationSchema = state.get_model('osf.registrationschema')
return rename_schema(RegistrationSchema, OLD_NAME, NEW_NAME)
def undo_aspredicted_rename(state, schema):
RegistrationSchema = state.get_model('osf.registrationschema')
return rename_schema(RegistrationSchema, NEW_NAME, OLD_NAME)
class Migration(migrations.Migration):
dependencies = [
('osf', '0138_merge_20181012_1944'),
]
operations = [
migrations.RunPython(rename_aspredicted_schema, undo_aspredicted_rename)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-16 20:22
from __future__ import unicode_literals
from django.db import migrations
from osf.models import RegistrationSchema
OLD_NAME = 'AsPredicted Preregistration'
NEW_NAME = 'Preregistration Template from AsPredicted.org'
def rename_schema(from_name, to_name):
try:
schema = RegistrationSchema.objects.get(name=from_name)
except RegistrationSchema.DoesNotExist:
return
schema.name = to_name
schema.schema['name'] = to_name
schema.schema['title'] = to_name
schema.schema['pages'][0]['title'] = to_name
return schema.save()
def rename_aspredicted_schema(*args, **kwargs):
return rename_schema(OLD_NAME, NEW_NAME)
def undo_aspredicted_rename(*args, **kwargs):
return rename_schema(NEW_NAME, OLD_NAME)
class Migration(migrations.Migration):
dependencies = [
('osf', '0138_merge_20181012_1944'),
]
operations = [
migrations.RunPython(rename_aspredicted_schema, undo_aspredicted_rename)
]
<commit_msg>Migrate with model from app state
- Not from app code<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-16 20:22
from __future__ import unicode_literals
from django.db import migrations
OLD_NAME = 'AsPredicted Preregistration'
NEW_NAME = 'Preregistration Template from AsPredicted.org'
def rename_schema(model, from_name, to_name):
try:
schema = model.objects.get(name=from_name)
except model.DoesNotExist:
return
schema.name = to_name
schema.schema['name'] = to_name
schema.schema['title'] = to_name
schema.schema['pages'][0]['title'] = to_name
return schema.save()
def rename_aspredicted_schema(state, schema):
RegistrationSchema = state.get_model('osf.registrationschema')
return rename_schema(RegistrationSchema, OLD_NAME, NEW_NAME)
def undo_aspredicted_rename(state, schema):
RegistrationSchema = state.get_model('osf.registrationschema')
return rename_schema(RegistrationSchema, NEW_NAME, OLD_NAME)
class Migration(migrations.Migration):
dependencies = [
('osf', '0138_merge_20181012_1944'),
]
operations = [
migrations.RunPython(rename_aspredicted_schema, undo_aspredicted_rename)
]
|
a6e14ac538a40fd98db16a98938acfb6a811dc06
|
fabfile.py
|
fabfile.py
|
from fabric.api import (
cd,
env,
put,
sudo,
task
)
PRODUCTION_IP = ''
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
env.run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
|
from fabric.api import (
cd,
env,
put,
run,
sudo,
task
)
PRODUCTION_IP = '54.154.235.243'
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
|
Fix production ID and run command without sudo
|
Fix production ID and run command without sudo
|
Python
|
mit
|
prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine
|
from fabric.api import (
cd,
env,
put,
sudo,
task
)
PRODUCTION_IP = ''
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
env.run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
Fix production ID and run command without sudo
|
from fabric.api import (
cd,
env,
put,
run,
sudo,
task
)
PRODUCTION_IP = '54.154.235.243'
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
|
<commit_before>from fabric.api import (
cd,
env,
put,
sudo,
task
)
PRODUCTION_IP = ''
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
env.run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
<commit_msg>Fix production ID and run command without sudo<commit_after>
|
from fabric.api import (
cd,
env,
put,
run,
sudo,
task
)
PRODUCTION_IP = '54.154.235.243'
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
|
from fabric.api import (
cd,
env,
put,
sudo,
task
)
PRODUCTION_IP = ''
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
env.run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
Fix production ID and run command without sudofrom fabric.api import (
cd,
env,
put,
run,
sudo,
task
)
PRODUCTION_IP = '54.154.235.243'
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
|
<commit_before>from fabric.api import (
cd,
env,
put,
sudo,
task
)
PRODUCTION_IP = ''
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
env.run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
<commit_msg>Fix production ID and run command without sudo<commit_after>from fabric.api import (
cd,
env,
put,
run,
sudo,
task
)
PRODUCTION_IP = '54.154.235.243'
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
|
82bd8ced41e2aa4a5f0ca9384c902f7e36fd1005
|
fabfile.py
|
fabfile.py
|
import os
from fabric.api import task, run, local, sudo, cd, env
env.hosts = [
os.environ['TWWEB_HOST'],
]
def virtualenv(command, user=None):
run('source /var/www/envs/twweb/bin/activate && ' + command)
@task
def deploy():
local('git push origin development')
local('git checkout master')
local('git merge development')
local('git push origin master')
with cd('/var/www/twweb'):
run('git fetch origin')
run('git merge origin/master')
run('bower install')
run('npm install')
run('ember build --environment=production')
virtualenv('pip install -r /var/www/twweb/requirements.txt')
virtualenv('python manage.py collectstatic --noinput')
virtualenv('python manage.py migrate')
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
sudo('/usr/sbin/service twweb restart', shell=False)
sudo('/usr/sbin/service twweb-status restart', shell=False)
sudo('/usr/sbin/service twweb-celery restart', shell=False)
sudo('/usr/sbin/service twweb-sync-listener restart', shell=False)
sudo('/usr/sbin/service twweb-log-consumer restart', shell=False)
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
local('git checkout development')
|
import os
from fabric.api import task, run, local, sudo, cd, env
env.hosts = [
os.environ['TWWEB_HOST'],
]
def virtualenv(command, user=None):
run('source /var/www/envs/twweb/bin/activate && ' + command)
@task
def deploy():
local('git push origin development')
local('git checkout master')
local('git merge development')
local('git push origin master')
with cd('/var/www/twweb'):
run('git fetch origin')
run('git merge origin/master')
run('bower install')
run('npm install')
run('ember build --environment=production')
virtualenv('pip install -r /var/www/twweb/requirements-frozen.txt')
virtualenv('python manage.py collectstatic --noinput')
virtualenv('python manage.py migrate')
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
sudo('/usr/sbin/service twweb restart', shell=False)
sudo('/usr/sbin/service twweb-status restart', shell=False)
sudo('/usr/sbin/service twweb-celery restart', shell=False)
sudo('/usr/sbin/service twweb-sync-listener restart', shell=False)
sudo('/usr/sbin/service twweb-log-consumer restart', shell=False)
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
local('git checkout development')
|
Update deployment process to use frozen requirements.
|
Update deployment process to use frozen requirements.
|
Python
|
agpl-3.0
|
coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am
|
import os
from fabric.api import task, run, local, sudo, cd, env
env.hosts = [
os.environ['TWWEB_HOST'],
]
def virtualenv(command, user=None):
run('source /var/www/envs/twweb/bin/activate && ' + command)
@task
def deploy():
local('git push origin development')
local('git checkout master')
local('git merge development')
local('git push origin master')
with cd('/var/www/twweb'):
run('git fetch origin')
run('git merge origin/master')
run('bower install')
run('npm install')
run('ember build --environment=production')
virtualenv('pip install -r /var/www/twweb/requirements.txt')
virtualenv('python manage.py collectstatic --noinput')
virtualenv('python manage.py migrate')
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
sudo('/usr/sbin/service twweb restart', shell=False)
sudo('/usr/sbin/service twweb-status restart', shell=False)
sudo('/usr/sbin/service twweb-celery restart', shell=False)
sudo('/usr/sbin/service twweb-sync-listener restart', shell=False)
sudo('/usr/sbin/service twweb-log-consumer restart', shell=False)
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
local('git checkout development')
Update deployment process to use frozen requirements.
|
import os
from fabric.api import task, run, local, sudo, cd, env
env.hosts = [
os.environ['TWWEB_HOST'],
]
def virtualenv(command, user=None):
run('source /var/www/envs/twweb/bin/activate && ' + command)
@task
def deploy():
local('git push origin development')
local('git checkout master')
local('git merge development')
local('git push origin master')
with cd('/var/www/twweb'):
run('git fetch origin')
run('git merge origin/master')
run('bower install')
run('npm install')
run('ember build --environment=production')
virtualenv('pip install -r /var/www/twweb/requirements-frozen.txt')
virtualenv('python manage.py collectstatic --noinput')
virtualenv('python manage.py migrate')
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
sudo('/usr/sbin/service twweb restart', shell=False)
sudo('/usr/sbin/service twweb-status restart', shell=False)
sudo('/usr/sbin/service twweb-celery restart', shell=False)
sudo('/usr/sbin/service twweb-sync-listener restart', shell=False)
sudo('/usr/sbin/service twweb-log-consumer restart', shell=False)
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
local('git checkout development')
|
<commit_before>import os
from fabric.api import task, run, local, sudo, cd, env
env.hosts = [
os.environ['TWWEB_HOST'],
]
def virtualenv(command, user=None):
run('source /var/www/envs/twweb/bin/activate && ' + command)
@task
def deploy():
local('git push origin development')
local('git checkout master')
local('git merge development')
local('git push origin master')
with cd('/var/www/twweb'):
run('git fetch origin')
run('git merge origin/master')
run('bower install')
run('npm install')
run('ember build --environment=production')
virtualenv('pip install -r /var/www/twweb/requirements.txt')
virtualenv('python manage.py collectstatic --noinput')
virtualenv('python manage.py migrate')
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
sudo('/usr/sbin/service twweb restart', shell=False)
sudo('/usr/sbin/service twweb-status restart', shell=False)
sudo('/usr/sbin/service twweb-celery restart', shell=False)
sudo('/usr/sbin/service twweb-sync-listener restart', shell=False)
sudo('/usr/sbin/service twweb-log-consumer restart', shell=False)
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
local('git checkout development')
<commit_msg>Update deployment process to use frozen requirements.<commit_after>
|
import os
from fabric.api import task, run, local, sudo, cd, env
env.hosts = [
os.environ['TWWEB_HOST'],
]
def virtualenv(command, user=None):
run('source /var/www/envs/twweb/bin/activate && ' + command)
@task
def deploy():
local('git push origin development')
local('git checkout master')
local('git merge development')
local('git push origin master')
with cd('/var/www/twweb'):
run('git fetch origin')
run('git merge origin/master')
run('bower install')
run('npm install')
run('ember build --environment=production')
virtualenv('pip install -r /var/www/twweb/requirements-frozen.txt')
virtualenv('python manage.py collectstatic --noinput')
virtualenv('python manage.py migrate')
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
sudo('/usr/sbin/service twweb restart', shell=False)
sudo('/usr/sbin/service twweb-status restart', shell=False)
sudo('/usr/sbin/service twweb-celery restart', shell=False)
sudo('/usr/sbin/service twweb-sync-listener restart', shell=False)
sudo('/usr/sbin/service twweb-log-consumer restart', shell=False)
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
local('git checkout development')
|
import os
from fabric.api import task, run, local, sudo, cd, env
env.hosts = [
os.environ['TWWEB_HOST'],
]
def virtualenv(command, user=None):
run('source /var/www/envs/twweb/bin/activate && ' + command)
@task
def deploy():
local('git push origin development')
local('git checkout master')
local('git merge development')
local('git push origin master')
with cd('/var/www/twweb'):
run('git fetch origin')
run('git merge origin/master')
run('bower install')
run('npm install')
run('ember build --environment=production')
virtualenv('pip install -r /var/www/twweb/requirements.txt')
virtualenv('python manage.py collectstatic --noinput')
virtualenv('python manage.py migrate')
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
sudo('/usr/sbin/service twweb restart', shell=False)
sudo('/usr/sbin/service twweb-status restart', shell=False)
sudo('/usr/sbin/service twweb-celery restart', shell=False)
sudo('/usr/sbin/service twweb-sync-listener restart', shell=False)
sudo('/usr/sbin/service twweb-log-consumer restart', shell=False)
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
local('git checkout development')
Update deployment process to use frozen requirements.import os
from fabric.api import task, run, local, sudo, cd, env
env.hosts = [
os.environ['TWWEB_HOST'],
]
def virtualenv(command, user=None):
run('source /var/www/envs/twweb/bin/activate && ' + command)
@task
def deploy():
local('git push origin development')
local('git checkout master')
local('git merge development')
local('git push origin master')
with cd('/var/www/twweb'):
run('git fetch origin')
run('git merge origin/master')
run('bower install')
run('npm install')
run('ember build --environment=production')
virtualenv('pip install -r /var/www/twweb/requirements-frozen.txt')
virtualenv('python manage.py collectstatic --noinput')
virtualenv('python manage.py migrate')
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
sudo('/usr/sbin/service twweb restart', shell=False)
sudo('/usr/sbin/service twweb-status restart', shell=False)
sudo('/usr/sbin/service twweb-celery restart', shell=False)
sudo('/usr/sbin/service twweb-sync-listener restart', shell=False)
sudo('/usr/sbin/service twweb-log-consumer restart', shell=False)
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
local('git checkout development')
|
<commit_before>import os
from fabric.api import task, run, local, sudo, cd, env
env.hosts = [
os.environ['TWWEB_HOST'],
]
def virtualenv(command, user=None):
run('source /var/www/envs/twweb/bin/activate && ' + command)
@task
def deploy():
local('git push origin development')
local('git checkout master')
local('git merge development')
local('git push origin master')
with cd('/var/www/twweb'):
run('git fetch origin')
run('git merge origin/master')
run('bower install')
run('npm install')
run('ember build --environment=production')
virtualenv('pip install -r /var/www/twweb/requirements.txt')
virtualenv('python manage.py collectstatic --noinput')
virtualenv('python manage.py migrate')
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
sudo('/usr/sbin/service twweb restart', shell=False)
sudo('/usr/sbin/service twweb-status restart', shell=False)
sudo('/usr/sbin/service twweb-celery restart', shell=False)
sudo('/usr/sbin/service twweb-sync-listener restart', shell=False)
sudo('/usr/sbin/service twweb-log-consumer restart', shell=False)
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
local('git checkout development')
<commit_msg>Update deployment process to use frozen requirements.<commit_after>import os
from fabric.api import task, run, local, sudo, cd, env
env.hosts = [
os.environ['TWWEB_HOST'],
]
def virtualenv(command, user=None):
run('source /var/www/envs/twweb/bin/activate && ' + command)
@task
def deploy():
local('git push origin development')
local('git checkout master')
local('git merge development')
local('git push origin master')
with cd('/var/www/twweb'):
run('git fetch origin')
run('git merge origin/master')
run('bower install')
run('npm install')
run('ember build --environment=production')
virtualenv('pip install -r /var/www/twweb/requirements-frozen.txt')
virtualenv('python manage.py collectstatic --noinput')
virtualenv('python manage.py migrate')
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
sudo('/usr/sbin/service twweb restart', shell=False)
sudo('/usr/sbin/service twweb-status restart', shell=False)
sudo('/usr/sbin/service twweb-celery restart', shell=False)
sudo('/usr/sbin/service twweb-sync-listener restart', shell=False)
sudo('/usr/sbin/service twweb-log-consumer restart', shell=False)
sudo('/bin/chown -R www-data:www-data /var/www/twweb/logs/', shell=False)
local('git checkout development')
|
f5143ccb206e5b077f0a80c88555e57064b6acab
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def deploy():
local('git commit -a')
local('git push origin')
remote_pull()
|
from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def commit():
local('git commit -a')
def push():
local('git push origin')
def deploy():
commit()
push()
remote_pull()
|
Add fab commands to push and pull
|
Add fab commands to push and pull
|
Python
|
mit
|
kalail/queen,kalail/queen
|
from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def deploy():
local('git commit -a')
local('git push origin')
remote_pull()
Add fab commands to push and pull
|
from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def commit():
local('git commit -a')
def push():
local('git push origin')
def deploy():
commit()
push()
remote_pull()
|
<commit_before>from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def deploy():
local('git commit -a')
local('git push origin')
remote_pull()
<commit_msg>Add fab commands to push and pull<commit_after>
|
from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def commit():
local('git commit -a')
def push():
local('git push origin')
def deploy():
commit()
push()
remote_pull()
|
from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def deploy():
local('git commit -a')
local('git push origin')
remote_pull()
Add fab commands to push and pullfrom fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def commit():
local('git commit -a')
def push():
local('git push origin')
def deploy():
commit()
push()
remote_pull()
|
<commit_before>from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def deploy():
local('git commit -a')
local('git push origin')
remote_pull()
<commit_msg>Add fab commands to push and pull<commit_after>from fabric.api import *
env.hosts = [
'192.168.1.144'
]
env.user = 'pi'
def prepare_raspberry_pi():
pass
def remote_pull():
with cd('virtualenvs/queen/queen'):
run('git pull')
def commit():
local('git commit -a')
def push():
local('git push origin')
def deploy():
commit()
push()
remote_pull()
|
d8db2744d45e4478b8a2c9a5f0fa76cda33c9964
|
opbeat/instrumentation/packages/jinja2.py
|
opbeat/instrumentation/packages/jinja2.py
|
from opbeat.instrumentation.packages.base import AbstractInstrumentedModule
class Jinja2Instrumentation(AbstractInstrumentedModule):
name = 'pylibmc'
instrument_list = [
("jinja2", "Template.render"),
]
def call(self, module, method, wrapped, instance, args, kwargs):
signature = instance.name or instance.filename
with self.client.capture_trace(signature, "template.jinja2"):
return wrapped(*args, **kwargs)
|
from opbeat.instrumentation.packages.base import AbstractInstrumentedModule
class Jinja2Instrumentation(AbstractInstrumentedModule):
name = 'jinja2'
instrument_list = [
("jinja2", "Template.render"),
]
def call(self, module, method, wrapped, instance, args, kwargs):
signature = instance.name or instance.filename
with self.client.capture_trace(signature, "template.jinja2"):
return wrapped(*args, **kwargs)
|
Use the correct "name" for Jinja2 instrumentation.
|
Use the correct "name" for Jinja2 instrumentation.
|
Python
|
bsd-3-clause
|
ticosax/opbeat_python,beniwohli/apm-agent-python,dirtycoder/opbeat_python,beniwohli/apm-agent-python,patrys/opbeat_python,beniwohli/apm-agent-python,ticosax/opbeat_python,dirtycoder/opbeat_python,ticosax/opbeat_python,patrys/opbeat_python,tarkatronic/opbeat_python,tarkatronic/opbeat_python,patrys/opbeat_python,daikeren/opbeat_python,beniwohli/apm-agent-python,daikeren/opbeat_python,patrys/opbeat_python,dirtycoder/opbeat_python,daikeren/opbeat_python,tarkatronic/opbeat_python
|
from opbeat.instrumentation.packages.base import AbstractInstrumentedModule
class Jinja2Instrumentation(AbstractInstrumentedModule):
name = 'pylibmc'
instrument_list = [
("jinja2", "Template.render"),
]
def call(self, module, method, wrapped, instance, args, kwargs):
signature = instance.name or instance.filename
with self.client.capture_trace(signature, "template.jinja2"):
return wrapped(*args, **kwargs)
Use the correct "name" for Jinja2 instrumentation.
|
from opbeat.instrumentation.packages.base import AbstractInstrumentedModule
class Jinja2Instrumentation(AbstractInstrumentedModule):
name = 'jinja2'
instrument_list = [
("jinja2", "Template.render"),
]
def call(self, module, method, wrapped, instance, args, kwargs):
signature = instance.name or instance.filename
with self.client.capture_trace(signature, "template.jinja2"):
return wrapped(*args, **kwargs)
|
<commit_before>from opbeat.instrumentation.packages.base import AbstractInstrumentedModule
class Jinja2Instrumentation(AbstractInstrumentedModule):
name = 'pylibmc'
instrument_list = [
("jinja2", "Template.render"),
]
def call(self, module, method, wrapped, instance, args, kwargs):
signature = instance.name or instance.filename
with self.client.capture_trace(signature, "template.jinja2"):
return wrapped(*args, **kwargs)
<commit_msg>Use the correct "name" for Jinja2 instrumentation.<commit_after>
|
from opbeat.instrumentation.packages.base import AbstractInstrumentedModule
class Jinja2Instrumentation(AbstractInstrumentedModule):
name = 'jinja2'
instrument_list = [
("jinja2", "Template.render"),
]
def call(self, module, method, wrapped, instance, args, kwargs):
signature = instance.name or instance.filename
with self.client.capture_trace(signature, "template.jinja2"):
return wrapped(*args, **kwargs)
|
from opbeat.instrumentation.packages.base import AbstractInstrumentedModule
class Jinja2Instrumentation(AbstractInstrumentedModule):
name = 'pylibmc'
instrument_list = [
("jinja2", "Template.render"),
]
def call(self, module, method, wrapped, instance, args, kwargs):
signature = instance.name or instance.filename
with self.client.capture_trace(signature, "template.jinja2"):
return wrapped(*args, **kwargs)
Use the correct "name" for Jinja2 instrumentation.from opbeat.instrumentation.packages.base import AbstractInstrumentedModule
class Jinja2Instrumentation(AbstractInstrumentedModule):
name = 'jinja2'
instrument_list = [
("jinja2", "Template.render"),
]
def call(self, module, method, wrapped, instance, args, kwargs):
signature = instance.name or instance.filename
with self.client.capture_trace(signature, "template.jinja2"):
return wrapped(*args, **kwargs)
|
<commit_before>from opbeat.instrumentation.packages.base import AbstractInstrumentedModule
class Jinja2Instrumentation(AbstractInstrumentedModule):
name = 'pylibmc'
instrument_list = [
("jinja2", "Template.render"),
]
def call(self, module, method, wrapped, instance, args, kwargs):
signature = instance.name or instance.filename
with self.client.capture_trace(signature, "template.jinja2"):
return wrapped(*args, **kwargs)
<commit_msg>Use the correct "name" for Jinja2 instrumentation.<commit_after>from opbeat.instrumentation.packages.base import AbstractInstrumentedModule
class Jinja2Instrumentation(AbstractInstrumentedModule):
name = 'jinja2'
instrument_list = [
("jinja2", "Template.render"),
]
def call(self, module, method, wrapped, instance, args, kwargs):
signature = instance.name or instance.filename
with self.client.capture_trace(signature, "template.jinja2"):
return wrapped(*args, **kwargs)
|
8b3132f9aec26d71498a153a29ea8d2049f07da6
|
studygroups/admin.py
|
studygroups/admin.py
|
from django.contrib import admin
# Register your models here.
from studygroups.models import Course, StudyGroup, StudyGroupSignup, Application
class CourseAdmin(admin.ModelAdmin):
pass
class StudyGroupSignupInline(admin.TabularInline):
model = StudyGroupSignup
class StudyGroupAdmin(admin.ModelAdmin):
inlines = [StudyGroupSignupInline]
class StudyGroupSignupAdmin(admin.ModelAdmin):
pass
class ApplicationAdmin(admin.ModelAdmin):
pass
admin.site.register(Course, CourseAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(StudyGroup, StudyGroupAdmin)
admin.site.register(StudyGroupSignup, StudyGroupSignupAdmin)
|
from django.contrib import admin
# Register your models here.
from studygroups.models import Course, StudyGroup, StudyGroupSignup, Application
class StudyGroupSignupInline(admin.TabularInline):
model = StudyGroupSignup
class ApplicationInline(admin.TabularInline):
model = Application.study_groups.through
readonly_fields = ['user_name']
def user_name(self, instance):
return instance.application.name
user_name.short_description = 'user name'
class StudyGroupAdmin(admin.ModelAdmin):
inlines = [
#StudyGroupSignupInline,
ApplicationInline,
]
class StudyGroupSignupAdmin(admin.ModelAdmin):
pass
class CourseAdmin(admin.ModelAdmin):
pass
class ApplicationAdmin(admin.ModelAdmin):
list_display = ('name', 'contact_method', 'created_at')
admin.site.register(Course, CourseAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(StudyGroup, StudyGroupAdmin)
admin.site.register(StudyGroupSignup, StudyGroupSignupAdmin)
|
Add applications to study groups
|
Add applications to study groups
|
Python
|
mit
|
p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles
|
from django.contrib import admin
# Register your models here.
from studygroups.models import Course, StudyGroup, StudyGroupSignup, Application
class CourseAdmin(admin.ModelAdmin):
pass
class StudyGroupSignupInline(admin.TabularInline):
model = StudyGroupSignup
class StudyGroupAdmin(admin.ModelAdmin):
inlines = [StudyGroupSignupInline]
class StudyGroupSignupAdmin(admin.ModelAdmin):
pass
class ApplicationAdmin(admin.ModelAdmin):
pass
admin.site.register(Course, CourseAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(StudyGroup, StudyGroupAdmin)
admin.site.register(StudyGroupSignup, StudyGroupSignupAdmin)
Add applications to study groups
|
from django.contrib import admin
# Register your models here.
from studygroups.models import Course, StudyGroup, StudyGroupSignup, Application
class StudyGroupSignupInline(admin.TabularInline):
model = StudyGroupSignup
class ApplicationInline(admin.TabularInline):
model = Application.study_groups.through
readonly_fields = ['user_name']
def user_name(self, instance):
return instance.application.name
user_name.short_description = 'user name'
class StudyGroupAdmin(admin.ModelAdmin):
inlines = [
#StudyGroupSignupInline,
ApplicationInline,
]
class StudyGroupSignupAdmin(admin.ModelAdmin):
pass
class CourseAdmin(admin.ModelAdmin):
pass
class ApplicationAdmin(admin.ModelAdmin):
list_display = ('name', 'contact_method', 'created_at')
admin.site.register(Course, CourseAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(StudyGroup, StudyGroupAdmin)
admin.site.register(StudyGroupSignup, StudyGroupSignupAdmin)
|
<commit_before>from django.contrib import admin
# Register your models here.
from studygroups.models import Course, StudyGroup, StudyGroupSignup, Application
class CourseAdmin(admin.ModelAdmin):
pass
class StudyGroupSignupInline(admin.TabularInline):
model = StudyGroupSignup
class StudyGroupAdmin(admin.ModelAdmin):
inlines = [StudyGroupSignupInline]
class StudyGroupSignupAdmin(admin.ModelAdmin):
pass
class ApplicationAdmin(admin.ModelAdmin):
pass
admin.site.register(Course, CourseAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(StudyGroup, StudyGroupAdmin)
admin.site.register(StudyGroupSignup, StudyGroupSignupAdmin)
<commit_msg>Add applications to study groups<commit_after>
|
from django.contrib import admin
# Register your models here.
from studygroups.models import Course, StudyGroup, StudyGroupSignup, Application
class StudyGroupSignupInline(admin.TabularInline):
model = StudyGroupSignup
class ApplicationInline(admin.TabularInline):
model = Application.study_groups.through
readonly_fields = ['user_name']
def user_name(self, instance):
return instance.application.name
user_name.short_description = 'user name'
class StudyGroupAdmin(admin.ModelAdmin):
inlines = [
#StudyGroupSignupInline,
ApplicationInline,
]
class StudyGroupSignupAdmin(admin.ModelAdmin):
pass
class CourseAdmin(admin.ModelAdmin):
pass
class ApplicationAdmin(admin.ModelAdmin):
list_display = ('name', 'contact_method', 'created_at')
admin.site.register(Course, CourseAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(StudyGroup, StudyGroupAdmin)
admin.site.register(StudyGroupSignup, StudyGroupSignupAdmin)
|
from django.contrib import admin
# Register your models here.
from studygroups.models import Course, StudyGroup, StudyGroupSignup, Application
class CourseAdmin(admin.ModelAdmin):
pass
class StudyGroupSignupInline(admin.TabularInline):
model = StudyGroupSignup
class StudyGroupAdmin(admin.ModelAdmin):
inlines = [StudyGroupSignupInline]
class StudyGroupSignupAdmin(admin.ModelAdmin):
pass
class ApplicationAdmin(admin.ModelAdmin):
pass
admin.site.register(Course, CourseAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(StudyGroup, StudyGroupAdmin)
admin.site.register(StudyGroupSignup, StudyGroupSignupAdmin)
Add applications to study groupsfrom django.contrib import admin
# Register your models here.
from studygroups.models import Course, StudyGroup, StudyGroupSignup, Application
class StudyGroupSignupInline(admin.TabularInline):
model = StudyGroupSignup
class ApplicationInline(admin.TabularInline):
model = Application.study_groups.through
readonly_fields = ['user_name']
def user_name(self, instance):
return instance.application.name
user_name.short_description = 'user name'
class StudyGroupAdmin(admin.ModelAdmin):
inlines = [
#StudyGroupSignupInline,
ApplicationInline,
]
class StudyGroupSignupAdmin(admin.ModelAdmin):
pass
class CourseAdmin(admin.ModelAdmin):
pass
class ApplicationAdmin(admin.ModelAdmin):
list_display = ('name', 'contact_method', 'created_at')
admin.site.register(Course, CourseAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(StudyGroup, StudyGroupAdmin)
admin.site.register(StudyGroupSignup, StudyGroupSignupAdmin)
|
<commit_before>from django.contrib import admin
# Register your models here.
from studygroups.models import Course, StudyGroup, StudyGroupSignup, Application
class CourseAdmin(admin.ModelAdmin):
pass
class StudyGroupSignupInline(admin.TabularInline):
model = StudyGroupSignup
class StudyGroupAdmin(admin.ModelAdmin):
inlines = [StudyGroupSignupInline]
class StudyGroupSignupAdmin(admin.ModelAdmin):
pass
class ApplicationAdmin(admin.ModelAdmin):
pass
admin.site.register(Course, CourseAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(StudyGroup, StudyGroupAdmin)
admin.site.register(StudyGroupSignup, StudyGroupSignupAdmin)
<commit_msg>Add applications to study groups<commit_after>from django.contrib import admin
# Register your models here.
from studygroups.models import Course, StudyGroup, StudyGroupSignup, Application
class StudyGroupSignupInline(admin.TabularInline):
model = StudyGroupSignup
class ApplicationInline(admin.TabularInline):
model = Application.study_groups.through
readonly_fields = ['user_name']
def user_name(self, instance):
return instance.application.name
user_name.short_description = 'user name'
class StudyGroupAdmin(admin.ModelAdmin):
inlines = [
#StudyGroupSignupInline,
ApplicationInline,
]
class StudyGroupSignupAdmin(admin.ModelAdmin):
pass
class CourseAdmin(admin.ModelAdmin):
pass
class ApplicationAdmin(admin.ModelAdmin):
list_display = ('name', 'contact_method', 'created_at')
admin.site.register(Course, CourseAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(StudyGroup, StudyGroupAdmin)
admin.site.register(StudyGroupSignup, StudyGroupSignupAdmin)
|
42e17593dd9fdbeac02f2b71beebae3b1a7f94c8
|
openhatch-issues/openhatch-issues.py
|
openhatch-issues/openhatch-issues.py
|
# -*- coding: utf-8 -*-
from github3 import login
mytoken = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
gh = login('xxxxx', token=mytoken)
user = gh.user()
print(user.name)
print(user.login)
print(user.followers)
for f in gh.iter_followers():
print(str(f))
print(gh.zen())
|
# -*- coding: utf-8 -*-
from github3 import login
mytoken = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
gh = login('xxxxx', token=mytoken)
user = gh.user()
print(user.name)
print(user.login)
print(user.followers)
for f in gh.iter_followers():
print(str(f))
print(gh.zen())
issue = gh.issue('openhatch', 'oh-mainline', 1403)
print(issue)
print(issue.title)
print(issue.state)
for l in issue.iter_labels():
print(str(l))
if 'stat' in l.name:
print("this is a status label")
if issue.is_closed() and 'resolved' not in l.name:
print("This issue should be marked resolved.")
print("Removing old status")
issue.remove_label(l)
issue.add_labels('stat:resolved')
|
Add hack experiment for issues and labels
|
Add hack experiment for issues and labels
|
Python
|
bsd-3-clause
|
willingc/openhatch-issues,willingc/openhatch-issues
|
# -*- coding: utf-8 -*-
from github3 import login
mytoken = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
gh = login('xxxxx', token=mytoken)
user = gh.user()
print(user.name)
print(user.login)
print(user.followers)
for f in gh.iter_followers():
print(str(f))
print(gh.zen())
Add hack experiment for issues and labels
|
# -*- coding: utf-8 -*-
from github3 import login
mytoken = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
gh = login('xxxxx', token=mytoken)
user = gh.user()
print(user.name)
print(user.login)
print(user.followers)
for f in gh.iter_followers():
print(str(f))
print(gh.zen())
issue = gh.issue('openhatch', 'oh-mainline', 1403)
print(issue)
print(issue.title)
print(issue.state)
for l in issue.iter_labels():
print(str(l))
if 'stat' in l.name:
print("this is a status label")
if issue.is_closed() and 'resolved' not in l.name:
print("This issue should be marked resolved.")
print("Removing old status")
issue.remove_label(l)
issue.add_labels('stat:resolved')
|
<commit_before># -*- coding: utf-8 -*-
from github3 import login
mytoken = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
gh = login('xxxxx', token=mytoken)
user = gh.user()
print(user.name)
print(user.login)
print(user.followers)
for f in gh.iter_followers():
print(str(f))
print(gh.zen())
<commit_msg>Add hack experiment for issues and labels<commit_after>
|
# -*- coding: utf-8 -*-
from github3 import login
mytoken = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
gh = login('xxxxx', token=mytoken)
user = gh.user()
print(user.name)
print(user.login)
print(user.followers)
for f in gh.iter_followers():
print(str(f))
print(gh.zen())
issue = gh.issue('openhatch', 'oh-mainline', 1403)
print(issue)
print(issue.title)
print(issue.state)
for l in issue.iter_labels():
print(str(l))
if 'stat' in l.name:
print("this is a status label")
if issue.is_closed() and 'resolved' not in l.name:
print("This issue should be marked resolved.")
print("Removing old status")
issue.remove_label(l)
issue.add_labels('stat:resolved')
|
# -*- coding: utf-8 -*-
from github3 import login
mytoken = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
gh = login('xxxxx', token=mytoken)
user = gh.user()
print(user.name)
print(user.login)
print(user.followers)
for f in gh.iter_followers():
print(str(f))
print(gh.zen())
Add hack experiment for issues and labels# -*- coding: utf-8 -*-
from github3 import login
mytoken = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
gh = login('xxxxx', token=mytoken)
user = gh.user()
print(user.name)
print(user.login)
print(user.followers)
for f in gh.iter_followers():
print(str(f))
print(gh.zen())
issue = gh.issue('openhatch', 'oh-mainline', 1403)
print(issue)
print(issue.title)
print(issue.state)
for l in issue.iter_labels():
print(str(l))
if 'stat' in l.name:
print("this is a status label")
if issue.is_closed() and 'resolved' not in l.name:
print("This issue should be marked resolved.")
print("Removing old status")
issue.remove_label(l)
issue.add_labels('stat:resolved')
|
<commit_before># -*- coding: utf-8 -*-
from github3 import login
mytoken = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
gh = login('xxxxx', token=mytoken)
user = gh.user()
print(user.name)
print(user.login)
print(user.followers)
for f in gh.iter_followers():
print(str(f))
print(gh.zen())
<commit_msg>Add hack experiment for issues and labels<commit_after># -*- coding: utf-8 -*-
from github3 import login
mytoken = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
gh = login('xxxxx', token=mytoken)
user = gh.user()
print(user.name)
print(user.login)
print(user.followers)
for f in gh.iter_followers():
print(str(f))
print(gh.zen())
issue = gh.issue('openhatch', 'oh-mainline', 1403)
print(issue)
print(issue.title)
print(issue.state)
for l in issue.iter_labels():
print(str(l))
if 'stat' in l.name:
print("this is a status label")
if issue.is_closed() and 'resolved' not in l.name:
print("This issue should be marked resolved.")
print("Removing old status")
issue.remove_label(l)
issue.add_labels('stat:resolved')
|
daf29f52c01d83619585a1f2fa2e6f03a397e1cc
|
tests/test_utils.py
|
tests/test_utils.py
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os
import unittest
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from fs.archive import _utils
class TestUtils(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix', 'POSIX platform needed')
def test_writable_path(self):
self.assertFalse(_utils.writable_path('/'))
self.assertFalse(_utils.writable_path('/root_location'))
self.assertTrue(_utils.writable_path(__file__))
def test_writable_stream(self):
with tempfile.NamedTemporaryFile() as tmp:
self.assertTrue(_utils.writable_stream(tmp))
with open(tmp.name) as tmp2:
self.assertFalse(_utils.writable_stream(tmp2))
buff = io.BytesIO()
self.assertTrue(_utils.writable_stream(buff))
buff = io.BufferedReader(buff)
self.assertFalse(_utils.writable_stream(buff))
buff = mock.MagicMock()
buff.write = mock.MagicMock(side_effect=IOError("not writable"))
self.assertFalse(_utils.writable_stream(buff))
def test_import_from_names(self):
imp = _utils.import_from_names
self.assertIs(imp('os'), os)
self.assertIs(imp('akjhkjhsk', 'os'), os)
self.assertIs(imp('akeskjhk'), None)
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os
import unittest
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from fs.archive import _utils
class TestUtils(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix', 'POSIX platform needed')
def test_writable_path(self):
self.assertFalse(_utils.writable_path('/'))
self.assertFalse(_utils.writable_path('/root_location'))
self.assertTrue(_utils.writable_path(__file__))
def test_writable_stream(self):
with tempfile.NamedTemporaryFile(mode='wb+') as tmp:
self.assertTrue(_utils.writable_stream(tmp))
with open(tmp.name, 'rb') as tmp2:
self.assertFalse(_utils.writable_stream(tmp2))
buff = io.BytesIO()
self.assertTrue(_utils.writable_stream(buff))
buff = io.BufferedReader(buff)
self.assertFalse(_utils.writable_stream(buff))
buff = mock.MagicMock()
buff.write = mock.MagicMock(side_effect=IOError("not writable"))
self.assertFalse(_utils.writable_stream(buff))
def test_import_from_names(self):
imp = _utils.import_from_names
self.assertIs(imp('os'), os)
self.assertIs(imp('akjhkjhsk', 'os'), os)
self.assertIs(imp('akeskjhk'), None)
|
Fix test_writable_stream failing in Python 3.3 and 3.4
|
Fix test_writable_stream failing in Python 3.3 and 3.4
|
Python
|
mit
|
althonos/fs.archive
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os
import unittest
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from fs.archive import _utils
class TestUtils(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix', 'POSIX platform needed')
def test_writable_path(self):
self.assertFalse(_utils.writable_path('/'))
self.assertFalse(_utils.writable_path('/root_location'))
self.assertTrue(_utils.writable_path(__file__))
def test_writable_stream(self):
with tempfile.NamedTemporaryFile() as tmp:
self.assertTrue(_utils.writable_stream(tmp))
with open(tmp.name) as tmp2:
self.assertFalse(_utils.writable_stream(tmp2))
buff = io.BytesIO()
self.assertTrue(_utils.writable_stream(buff))
buff = io.BufferedReader(buff)
self.assertFalse(_utils.writable_stream(buff))
buff = mock.MagicMock()
buff.write = mock.MagicMock(side_effect=IOError("not writable"))
self.assertFalse(_utils.writable_stream(buff))
def test_import_from_names(self):
imp = _utils.import_from_names
self.assertIs(imp('os'), os)
self.assertIs(imp('akjhkjhsk', 'os'), os)
self.assertIs(imp('akeskjhk'), None)
Fix test_writable_stream failing in Python 3.3 and 3.4
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os
import unittest
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from fs.archive import _utils
class TestUtils(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix', 'POSIX platform needed')
def test_writable_path(self):
self.assertFalse(_utils.writable_path('/'))
self.assertFalse(_utils.writable_path('/root_location'))
self.assertTrue(_utils.writable_path(__file__))
def test_writable_stream(self):
with tempfile.NamedTemporaryFile(mode='wb+') as tmp:
self.assertTrue(_utils.writable_stream(tmp))
with open(tmp.name, 'rb') as tmp2:
self.assertFalse(_utils.writable_stream(tmp2))
buff = io.BytesIO()
self.assertTrue(_utils.writable_stream(buff))
buff = io.BufferedReader(buff)
self.assertFalse(_utils.writable_stream(buff))
buff = mock.MagicMock()
buff.write = mock.MagicMock(side_effect=IOError("not writable"))
self.assertFalse(_utils.writable_stream(buff))
def test_import_from_names(self):
imp = _utils.import_from_names
self.assertIs(imp('os'), os)
self.assertIs(imp('akjhkjhsk', 'os'), os)
self.assertIs(imp('akeskjhk'), None)
|
<commit_before># coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os
import unittest
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from fs.archive import _utils
class TestUtils(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix', 'POSIX platform needed')
def test_writable_path(self):
self.assertFalse(_utils.writable_path('/'))
self.assertFalse(_utils.writable_path('/root_location'))
self.assertTrue(_utils.writable_path(__file__))
def test_writable_stream(self):
with tempfile.NamedTemporaryFile() as tmp:
self.assertTrue(_utils.writable_stream(tmp))
with open(tmp.name) as tmp2:
self.assertFalse(_utils.writable_stream(tmp2))
buff = io.BytesIO()
self.assertTrue(_utils.writable_stream(buff))
buff = io.BufferedReader(buff)
self.assertFalse(_utils.writable_stream(buff))
buff = mock.MagicMock()
buff.write = mock.MagicMock(side_effect=IOError("not writable"))
self.assertFalse(_utils.writable_stream(buff))
def test_import_from_names(self):
imp = _utils.import_from_names
self.assertIs(imp('os'), os)
self.assertIs(imp('akjhkjhsk', 'os'), os)
self.assertIs(imp('akeskjhk'), None)
<commit_msg>Fix test_writable_stream failing in Python 3.3 and 3.4<commit_after>
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os
import unittest
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from fs.archive import _utils
class TestUtils(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix', 'POSIX platform needed')
def test_writable_path(self):
self.assertFalse(_utils.writable_path('/'))
self.assertFalse(_utils.writable_path('/root_location'))
self.assertTrue(_utils.writable_path(__file__))
def test_writable_stream(self):
with tempfile.NamedTemporaryFile(mode='wb+') as tmp:
self.assertTrue(_utils.writable_stream(tmp))
with open(tmp.name, 'rb') as tmp2:
self.assertFalse(_utils.writable_stream(tmp2))
buff = io.BytesIO()
self.assertTrue(_utils.writable_stream(buff))
buff = io.BufferedReader(buff)
self.assertFalse(_utils.writable_stream(buff))
buff = mock.MagicMock()
buff.write = mock.MagicMock(side_effect=IOError("not writable"))
self.assertFalse(_utils.writable_stream(buff))
def test_import_from_names(self):
imp = _utils.import_from_names
self.assertIs(imp('os'), os)
self.assertIs(imp('akjhkjhsk', 'os'), os)
self.assertIs(imp('akeskjhk'), None)
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os
import unittest
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from fs.archive import _utils
class TestUtils(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix', 'POSIX platform needed')
def test_writable_path(self):
self.assertFalse(_utils.writable_path('/'))
self.assertFalse(_utils.writable_path('/root_location'))
self.assertTrue(_utils.writable_path(__file__))
def test_writable_stream(self):
with tempfile.NamedTemporaryFile() as tmp:
self.assertTrue(_utils.writable_stream(tmp))
with open(tmp.name) as tmp2:
self.assertFalse(_utils.writable_stream(tmp2))
buff = io.BytesIO()
self.assertTrue(_utils.writable_stream(buff))
buff = io.BufferedReader(buff)
self.assertFalse(_utils.writable_stream(buff))
buff = mock.MagicMock()
buff.write = mock.MagicMock(side_effect=IOError("not writable"))
self.assertFalse(_utils.writable_stream(buff))
def test_import_from_names(self):
imp = _utils.import_from_names
self.assertIs(imp('os'), os)
self.assertIs(imp('akjhkjhsk', 'os'), os)
self.assertIs(imp('akeskjhk'), None)
Fix test_writable_stream failing in Python 3.3 and 3.4# coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os
import unittest
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from fs.archive import _utils
class TestUtils(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix', 'POSIX platform needed')
def test_writable_path(self):
self.assertFalse(_utils.writable_path('/'))
self.assertFalse(_utils.writable_path('/root_location'))
self.assertTrue(_utils.writable_path(__file__))
def test_writable_stream(self):
with tempfile.NamedTemporaryFile(mode='wb+') as tmp:
self.assertTrue(_utils.writable_stream(tmp))
with open(tmp.name, 'rb') as tmp2:
self.assertFalse(_utils.writable_stream(tmp2))
buff = io.BytesIO()
self.assertTrue(_utils.writable_stream(buff))
buff = io.BufferedReader(buff)
self.assertFalse(_utils.writable_stream(buff))
buff = mock.MagicMock()
buff.write = mock.MagicMock(side_effect=IOError("not writable"))
self.assertFalse(_utils.writable_stream(buff))
def test_import_from_names(self):
imp = _utils.import_from_names
self.assertIs(imp('os'), os)
self.assertIs(imp('akjhkjhsk', 'os'), os)
self.assertIs(imp('akeskjhk'), None)
|
<commit_before># coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os
import unittest
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from fs.archive import _utils
class TestUtils(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix', 'POSIX platform needed')
def test_writable_path(self):
self.assertFalse(_utils.writable_path('/'))
self.assertFalse(_utils.writable_path('/root_location'))
self.assertTrue(_utils.writable_path(__file__))
def test_writable_stream(self):
with tempfile.NamedTemporaryFile() as tmp:
self.assertTrue(_utils.writable_stream(tmp))
with open(tmp.name) as tmp2:
self.assertFalse(_utils.writable_stream(tmp2))
buff = io.BytesIO()
self.assertTrue(_utils.writable_stream(buff))
buff = io.BufferedReader(buff)
self.assertFalse(_utils.writable_stream(buff))
buff = mock.MagicMock()
buff.write = mock.MagicMock(side_effect=IOError("not writable"))
self.assertFalse(_utils.writable_stream(buff))
def test_import_from_names(self):
imp = _utils.import_from_names
self.assertIs(imp('os'), os)
self.assertIs(imp('akjhkjhsk', 'os'), os)
self.assertIs(imp('akeskjhk'), None)
<commit_msg>Fix test_writable_stream failing in Python 3.3 and 3.4<commit_after># coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import os
import unittest
import tempfile
try:
from unittest import mock
except ImportError:
import mock
from fs.archive import _utils
class TestUtils(unittest.TestCase):
@unittest.skipUnless(os.name == 'posix', 'POSIX platform needed')
def test_writable_path(self):
self.assertFalse(_utils.writable_path('/'))
self.assertFalse(_utils.writable_path('/root_location'))
self.assertTrue(_utils.writable_path(__file__))
def test_writable_stream(self):
with tempfile.NamedTemporaryFile(mode='wb+') as tmp:
self.assertTrue(_utils.writable_stream(tmp))
with open(tmp.name, 'rb') as tmp2:
self.assertFalse(_utils.writable_stream(tmp2))
buff = io.BytesIO()
self.assertTrue(_utils.writable_stream(buff))
buff = io.BufferedReader(buff)
self.assertFalse(_utils.writable_stream(buff))
buff = mock.MagicMock()
buff.write = mock.MagicMock(side_effect=IOError("not writable"))
self.assertFalse(_utils.writable_stream(buff))
def test_import_from_names(self):
imp = _utils.import_from_names
self.assertIs(imp('os'), os)
self.assertIs(imp('akjhkjhsk', 'os'), os)
self.assertIs(imp('akeskjhk'), None)
|
3a5dc4332e7f13119563e2190e6ef7d66b464054
|
tests/test_utils.py
|
tests/test_utils.py
|
"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.utilities module. Requires "pytest" to run.
"""
from geomdl import utilities
def test_autogen_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result == True
|
"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.utilities module. Requires "pytest" to run.
"""
from geomdl import utilities
def test_autogen_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result
def test_normalize_knot_vector():
input_kv = (-5, -5, -3, -2, 2, 3, 5, 5)
output_kv = [0.0, 0.0, 0.2, 0.3, 0.7, 0.8, 1.0, 1.0]
to_check = utilities.normalize_knot_vector(input_kv)
assert to_check == output_kv
|
Add knot vector normalization test
|
Add knot vector normalization test
|
Python
|
mit
|
orbingol/NURBS-Python,orbingol/NURBS-Python
|
"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.utilities module. Requires "pytest" to run.
"""
from geomdl import utilities
def test_autogen_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result == True
Add knot vector normalization test
|
"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.utilities module. Requires "pytest" to run.
"""
from geomdl import utilities
def test_autogen_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result
def test_normalize_knot_vector():
input_kv = (-5, -5, -3, -2, 2, 3, 5, 5)
output_kv = [0.0, 0.0, 0.2, 0.3, 0.7, 0.8, 1.0, 1.0]
to_check = utilities.normalize_knot_vector(input_kv)
assert to_check == output_kv
|
<commit_before>"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.utilities module. Requires "pytest" to run.
"""
from geomdl import utilities
def test_autogen_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result == True
<commit_msg>Add knot vector normalization test<commit_after>
|
"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.utilities module. Requires "pytest" to run.
"""
from geomdl import utilities
def test_autogen_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result
def test_normalize_knot_vector():
input_kv = (-5, -5, -3, -2, 2, 3, 5, 5)
output_kv = [0.0, 0.0, 0.2, 0.3, 0.7, 0.8, 1.0, 1.0]
to_check = utilities.normalize_knot_vector(input_kv)
assert to_check == output_kv
|
"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.utilities module. Requires "pytest" to run.
"""
from geomdl import utilities
def test_autogen_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result == True
Add knot vector normalization test"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.utilities module. Requires "pytest" to run.
"""
from geomdl import utilities
def test_autogen_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result
def test_normalize_knot_vector():
input_kv = (-5, -5, -3, -2, 2, 3, 5, 5)
output_kv = [0.0, 0.0, 0.2, 0.3, 0.7, 0.8, 1.0, 1.0]
to_check = utilities.normalize_knot_vector(input_kv)
assert to_check == output_kv
|
<commit_before>"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.utilities module. Requires "pytest" to run.
"""
from geomdl import utilities
def test_autogen_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result == True
<commit_msg>Add knot vector normalization test<commit_after>"""
Tests for the NURBS-Python package
Released under The MIT License. See LICENSE file for details.
Copyright (c) 2018 Onur Rauf Bingol
Tests geomdl.utilities module. Requires "pytest" to run.
"""
from geomdl import utilities
def test_autogen_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result
def test_normalize_knot_vector():
input_kv = (-5, -5, -3, -2, 2, 3, 5, 5)
output_kv = [0.0, 0.0, 0.2, 0.3, 0.7, 0.8, 1.0, 1.0]
to_check = utilities.normalize_knot_vector(input_kv)
assert to_check == output_kv
|
d18f595b771ce68730cc5fea099f57dda6690157
|
piecewise/piecewise/bigquery.py
|
piecewise/piecewise/bigquery.py
|
import httplib2
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import os
PROJECT_NUMBER = '422648324111'
PARENT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_FILE = os.path.join(PARENT_PATH, 'client_secrets.json')
CREDENTIALS_FILE = os.path.join(PARENT_PATH, 'bigquery_credentials.dat')
FLOW = flow_from_clientsecrets(SECRETS_FILE, scope='https://www.googleapis.com/auth/bigquery')
storage = Storage(CREDENTIALS_FILE)
credentials = storage.get()
class FlowFlags():
noauth_local_webserver = True
logging_level = 'ERROR'
if credentials is None or credentials.invalid:
# Run oauth2 flow with default arguments.
credentials = tools.run_flow(FLOW, storage, FlowFlags())
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http = http)
|
import httplib2
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import os
PROJECT_NUMBER = '233384409938'
PARENT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_FILE = os.path.join(PARENT_PATH, 'client_secrets.json')
CREDENTIALS_FILE = os.path.join(PARENT_PATH, 'bigquery_credentials.dat')
FLOW = flow_from_clientsecrets(SECRETS_FILE, scope='https://www.googleapis.com/auth/bigquery')
storage = Storage(CREDENTIALS_FILE)
credentials = storage.get()
class FlowFlags():
noauth_local_webserver = True
logging_level = 'ERROR'
if credentials is None or credentials.invalid:
# Run oauth2 flow with default arguments.
credentials = tools.run_flow(FLOW, storage, FlowFlags())
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http = http)
|
Use M-Lab project number for BigQuery access.
|
Use M-Lab project number for BigQuery access.
|
Python
|
apache-2.0
|
opentechinstitute/piecewise,critzo/piecewise,critzo/piecewise,opentechinstitute/piecewise,opentechinstitute/piecewise,opentechinstitute/piecewise,critzo/piecewise,critzo/piecewise
|
import httplib2
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import os
PROJECT_NUMBER = '422648324111'
PARENT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_FILE = os.path.join(PARENT_PATH, 'client_secrets.json')
CREDENTIALS_FILE = os.path.join(PARENT_PATH, 'bigquery_credentials.dat')
FLOW = flow_from_clientsecrets(SECRETS_FILE, scope='https://www.googleapis.com/auth/bigquery')
storage = Storage(CREDENTIALS_FILE)
credentials = storage.get()
class FlowFlags():
noauth_local_webserver = True
logging_level = 'ERROR'
if credentials is None or credentials.invalid:
# Run oauth2 flow with default arguments.
credentials = tools.run_flow(FLOW, storage, FlowFlags())
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http = http)
Use M-Lab project number for BigQuery access.
|
import httplib2
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import os
PROJECT_NUMBER = '233384409938'
PARENT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_FILE = os.path.join(PARENT_PATH, 'client_secrets.json')
CREDENTIALS_FILE = os.path.join(PARENT_PATH, 'bigquery_credentials.dat')
FLOW = flow_from_clientsecrets(SECRETS_FILE, scope='https://www.googleapis.com/auth/bigquery')
storage = Storage(CREDENTIALS_FILE)
credentials = storage.get()
class FlowFlags():
noauth_local_webserver = True
logging_level = 'ERROR'
if credentials is None or credentials.invalid:
# Run oauth2 flow with default arguments.
credentials = tools.run_flow(FLOW, storage, FlowFlags())
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http = http)
|
<commit_before>import httplib2
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import os
PROJECT_NUMBER = '422648324111'
PARENT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_FILE = os.path.join(PARENT_PATH, 'client_secrets.json')
CREDENTIALS_FILE = os.path.join(PARENT_PATH, 'bigquery_credentials.dat')
FLOW = flow_from_clientsecrets(SECRETS_FILE, scope='https://www.googleapis.com/auth/bigquery')
storage = Storage(CREDENTIALS_FILE)
credentials = storage.get()
class FlowFlags():
noauth_local_webserver = True
logging_level = 'ERROR'
if credentials is None or credentials.invalid:
# Run oauth2 flow with default arguments.
credentials = tools.run_flow(FLOW, storage, FlowFlags())
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http = http)
<commit_msg>Use M-Lab project number for BigQuery access.<commit_after>
|
import httplib2
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import os
PROJECT_NUMBER = '233384409938'
PARENT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_FILE = os.path.join(PARENT_PATH, 'client_secrets.json')
CREDENTIALS_FILE = os.path.join(PARENT_PATH, 'bigquery_credentials.dat')
FLOW = flow_from_clientsecrets(SECRETS_FILE, scope='https://www.googleapis.com/auth/bigquery')
storage = Storage(CREDENTIALS_FILE)
credentials = storage.get()
class FlowFlags():
noauth_local_webserver = True
logging_level = 'ERROR'
if credentials is None or credentials.invalid:
# Run oauth2 flow with default arguments.
credentials = tools.run_flow(FLOW, storage, FlowFlags())
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http = http)
|
import httplib2
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import os
PROJECT_NUMBER = '422648324111'
PARENT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_FILE = os.path.join(PARENT_PATH, 'client_secrets.json')
CREDENTIALS_FILE = os.path.join(PARENT_PATH, 'bigquery_credentials.dat')
FLOW = flow_from_clientsecrets(SECRETS_FILE, scope='https://www.googleapis.com/auth/bigquery')
storage = Storage(CREDENTIALS_FILE)
credentials = storage.get()
class FlowFlags():
noauth_local_webserver = True
logging_level = 'ERROR'
if credentials is None or credentials.invalid:
# Run oauth2 flow with default arguments.
credentials = tools.run_flow(FLOW, storage, FlowFlags())
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http = http)
Use M-Lab project number for BigQuery access.import httplib2
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import os
PROJECT_NUMBER = '233384409938'
PARENT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_FILE = os.path.join(PARENT_PATH, 'client_secrets.json')
CREDENTIALS_FILE = os.path.join(PARENT_PATH, 'bigquery_credentials.dat')
FLOW = flow_from_clientsecrets(SECRETS_FILE, scope='https://www.googleapis.com/auth/bigquery')
storage = Storage(CREDENTIALS_FILE)
credentials = storage.get()
class FlowFlags():
noauth_local_webserver = True
logging_level = 'ERROR'
if credentials is None or credentials.invalid:
# Run oauth2 flow with default arguments.
credentials = tools.run_flow(FLOW, storage, FlowFlags())
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http = http)
|
<commit_before>import httplib2
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import os
PROJECT_NUMBER = '422648324111'
PARENT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_FILE = os.path.join(PARENT_PATH, 'client_secrets.json')
CREDENTIALS_FILE = os.path.join(PARENT_PATH, 'bigquery_credentials.dat')
FLOW = flow_from_clientsecrets(SECRETS_FILE, scope='https://www.googleapis.com/auth/bigquery')
storage = Storage(CREDENTIALS_FILE)
credentials = storage.get()
class FlowFlags():
noauth_local_webserver = True
logging_level = 'ERROR'
if credentials is None or credentials.invalid:
# Run oauth2 flow with default arguments.
credentials = tools.run_flow(FLOW, storage, FlowFlags())
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http = http)
<commit_msg>Use M-Lab project number for BigQuery access.<commit_after>import httplib2
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import os
PROJECT_NUMBER = '233384409938'
PARENT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRETS_FILE = os.path.join(PARENT_PATH, 'client_secrets.json')
CREDENTIALS_FILE = os.path.join(PARENT_PATH, 'bigquery_credentials.dat')
FLOW = flow_from_clientsecrets(SECRETS_FILE, scope='https://www.googleapis.com/auth/bigquery')
storage = Storage(CREDENTIALS_FILE)
credentials = storage.get()
class FlowFlags():
noauth_local_webserver = True
logging_level = 'ERROR'
if credentials is None or credentials.invalid:
# Run oauth2 flow with default arguments.
credentials = tools.run_flow(FLOW, storage, FlowFlags())
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http = http)
|
d0613f3e77b87ad8df92730e5aa50aebf651ccc6
|
tests/test_plotting.py
|
tests/test_plotting.py
|
import unittest
import numpy as np
from numpy import pi
import matplotlib.pyplot as plt
from robot_arm import RobotArm
from plotting import path_figure
class TestPlotting(unittest.TestCase):
def setUp(self):
lengths = (3, 2, 2,)
destinations = (
(5, 4, 6, 4, 5),
(0, 2, 0.5, -2, -1),
)
theta = (pi, pi / 2, 0,)
self.robot_arm = RobotArm(
lengths=lengths,
destinations=destinations,
theta=theta
)
n = len(lengths)
s = len(destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
|
import unittest
import numpy as np
from plotting import path_figure
from fixtures import robot_arm1
class TestPlotting(unittest.TestCase):
def setUp(self):
self.robot_arm = robot_arm1
n = len(self.robot_arm.lengths)
s = len(self.robot_arm.destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
|
Use fixture for robot_arm test
|
Use fixture for robot_arm test
|
Python
|
mit
|
JakobGM/robotarm-optimization
|
import unittest
import numpy as np
from numpy import pi
import matplotlib.pyplot as plt
from robot_arm import RobotArm
from plotting import path_figure
class TestPlotting(unittest.TestCase):
def setUp(self):
lengths = (3, 2, 2,)
destinations = (
(5, 4, 6, 4, 5),
(0, 2, 0.5, -2, -1),
)
theta = (pi, pi / 2, 0,)
self.robot_arm = RobotArm(
lengths=lengths,
destinations=destinations,
theta=theta
)
n = len(lengths)
s = len(destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
Use fixture for robot_arm test
|
import unittest
import numpy as np
from plotting import path_figure
from fixtures import robot_arm1
class TestPlotting(unittest.TestCase):
def setUp(self):
self.robot_arm = robot_arm1
n = len(self.robot_arm.lengths)
s = len(self.robot_arm.destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
|
<commit_before>import unittest
import numpy as np
from numpy import pi
import matplotlib.pyplot as plt
from robot_arm import RobotArm
from plotting import path_figure
class TestPlotting(unittest.TestCase):
def setUp(self):
lengths = (3, 2, 2,)
destinations = (
(5, 4, 6, 4, 5),
(0, 2, 0.5, -2, -1),
)
theta = (pi, pi / 2, 0,)
self.robot_arm = RobotArm(
lengths=lengths,
destinations=destinations,
theta=theta
)
n = len(lengths)
s = len(destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
<commit_msg>Use fixture for robot_arm test<commit_after>
|
import unittest
import numpy as np
from plotting import path_figure
from fixtures import robot_arm1
class TestPlotting(unittest.TestCase):
def setUp(self):
self.robot_arm = robot_arm1
n = len(self.robot_arm.lengths)
s = len(self.robot_arm.destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
|
import unittest
import numpy as np
from numpy import pi
import matplotlib.pyplot as plt
from robot_arm import RobotArm
from plotting import path_figure
class TestPlotting(unittest.TestCase):
def setUp(self):
lengths = (3, 2, 2,)
destinations = (
(5, 4, 6, 4, 5),
(0, 2, 0.5, -2, -1),
)
theta = (pi, pi / 2, 0,)
self.robot_arm = RobotArm(
lengths=lengths,
destinations=destinations,
theta=theta
)
n = len(lengths)
s = len(destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
Use fixture for robot_arm testimport unittest
import numpy as np
from plotting import path_figure
from fixtures import robot_arm1
class TestPlotting(unittest.TestCase):
def setUp(self):
self.robot_arm = robot_arm1
n = len(self.robot_arm.lengths)
s = len(self.robot_arm.destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
|
<commit_before>import unittest
import numpy as np
from numpy import pi
import matplotlib.pyplot as plt
from robot_arm import RobotArm
from plotting import path_figure
class TestPlotting(unittest.TestCase):
def setUp(self):
lengths = (3, 2, 2,)
destinations = (
(5, 4, 6, 4, 5),
(0, 2, 0.5, -2, -1),
)
theta = (pi, pi / 2, 0,)
self.robot_arm = RobotArm(
lengths=lengths,
destinations=destinations,
theta=theta
)
n = len(lengths)
s = len(destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
<commit_msg>Use fixture for robot_arm test<commit_after>import unittest
import numpy as np
from plotting import path_figure
from fixtures import robot_arm1
class TestPlotting(unittest.TestCase):
def setUp(self):
self.robot_arm = robot_arm1
n = len(self.robot_arm.lengths)
s = len(self.robot_arm.destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.