commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
56e11c3df02874867626551534693b488db82fb7
|
example.py
|
example.py
|
import os
import pickle as pkl
from gso import load_up_answers, load_up_questions
#for result in load_up_questions("How to write a bubble sort", "python"):
#print result
#break
#question_url = 'https://stackoverflow.com/questions/895371/bubble-sort-homework'
#with open("html_dump.pkl", 'wb') as myfile:
#pkl.dump(load_up_answers(question_url), myfile)
html_dump = []
with open("html_dump.pkl", 'rb') as myfile:
html_dump = pkl.load(myfile)
print html_dump
|
import os
import pickle as pkl
from lxml import etree
from gso import load_up_answers, load_up_questions
#for result in load_up_questions("How to write a bubble sort", "python"):
#print result
#break
#question_url = 'https://stackoverflow.com/questions/895371/bubble-sort-homework'
#with open("html_dump.pkl", 'wb') as myfile:
#pkl.dump(load_up_answers(question_url), myfile)
html_dump = []
with open("html_dump.pkl", 'rb') as myfile:
html_dump = pkl.load(myfile)
def wrapper_tag(xml_string):
xml_string = "<root>"+xml_string+"</root>"
return xml_string
root = etree.fromstring(wrapper_tag(html_dump[0][1]))
print etree.tostring(root)
|
Create wrapper tag for SO html
|
Create wrapper tag for SO html
|
Python
|
mit
|
mdtmc/gso
|
import os
import pickle as pkl
from gso import load_up_answers, load_up_questions
#for result in load_up_questions("How to write a bubble sort", "python"):
#print result
#break
#question_url = 'https://stackoverflow.com/questions/895371/bubble-sort-homework'
#with open("html_dump.pkl", 'wb') as myfile:
#pkl.dump(load_up_answers(question_url), myfile)
html_dump = []
with open("html_dump.pkl", 'rb') as myfile:
html_dump = pkl.load(myfile)
print html_dump
Create wrapper tag for SO html
|
import os
import pickle as pkl
from lxml import etree
from gso import load_up_answers, load_up_questions
#for result in load_up_questions("How to write a bubble sort", "python"):
#print result
#break
#question_url = 'https://stackoverflow.com/questions/895371/bubble-sort-homework'
#with open("html_dump.pkl", 'wb') as myfile:
#pkl.dump(load_up_answers(question_url), myfile)
html_dump = []
with open("html_dump.pkl", 'rb') as myfile:
html_dump = pkl.load(myfile)
def wrapper_tag(xml_string):
xml_string = "<root>"+xml_string+"</root>"
return xml_string
root = etree.fromstring(wrapper_tag(html_dump[0][1]))
print etree.tostring(root)
|
<commit_before>import os
import pickle as pkl
from gso import load_up_answers, load_up_questions
#for result in load_up_questions("How to write a bubble sort", "python"):
#print result
#break
#question_url = 'https://stackoverflow.com/questions/895371/bubble-sort-homework'
#with open("html_dump.pkl", 'wb') as myfile:
#pkl.dump(load_up_answers(question_url), myfile)
html_dump = []
with open("html_dump.pkl", 'rb') as myfile:
html_dump = pkl.load(myfile)
print html_dump
<commit_msg>Create wrapper tag for SO html<commit_after>
|
import os
import pickle as pkl
from lxml import etree
from gso import load_up_answers, load_up_questions
#for result in load_up_questions("How to write a bubble sort", "python"):
#print result
#break
#question_url = 'https://stackoverflow.com/questions/895371/bubble-sort-homework'
#with open("html_dump.pkl", 'wb') as myfile:
#pkl.dump(load_up_answers(question_url), myfile)
html_dump = []
with open("html_dump.pkl", 'rb') as myfile:
html_dump = pkl.load(myfile)
def wrapper_tag(xml_string):
xml_string = "<root>"+xml_string+"</root>"
return xml_string
root = etree.fromstring(wrapper_tag(html_dump[0][1]))
print etree.tostring(root)
|
import os
import pickle as pkl
from gso import load_up_answers, load_up_questions
#for result in load_up_questions("How to write a bubble sort", "python"):
#print result
#break
#question_url = 'https://stackoverflow.com/questions/895371/bubble-sort-homework'
#with open("html_dump.pkl", 'wb') as myfile:
#pkl.dump(load_up_answers(question_url), myfile)
html_dump = []
with open("html_dump.pkl", 'rb') as myfile:
html_dump = pkl.load(myfile)
print html_dump
Create wrapper tag for SO htmlimport os
import pickle as pkl
from lxml import etree
from gso import load_up_answers, load_up_questions
#for result in load_up_questions("How to write a bubble sort", "python"):
#print result
#break
#question_url = 'https://stackoverflow.com/questions/895371/bubble-sort-homework'
#with open("html_dump.pkl", 'wb') as myfile:
#pkl.dump(load_up_answers(question_url), myfile)
html_dump = []
with open("html_dump.pkl", 'rb') as myfile:
html_dump = pkl.load(myfile)
def wrapper_tag(xml_string):
xml_string = "<root>"+xml_string+"</root>"
return xml_string
root = etree.fromstring(wrapper_tag(html_dump[0][1]))
print etree.tostring(root)
|
<commit_before>import os
import pickle as pkl
from gso import load_up_answers, load_up_questions
#for result in load_up_questions("How to write a bubble sort", "python"):
#print result
#break
#question_url = 'https://stackoverflow.com/questions/895371/bubble-sort-homework'
#with open("html_dump.pkl", 'wb') as myfile:
#pkl.dump(load_up_answers(question_url), myfile)
html_dump = []
with open("html_dump.pkl", 'rb') as myfile:
html_dump = pkl.load(myfile)
print html_dump
<commit_msg>Create wrapper tag for SO html<commit_after>import os
import pickle as pkl
from lxml import etree
from gso import load_up_answers, load_up_questions
#for result in load_up_questions("How to write a bubble sort", "python"):
#print result
#break
#question_url = 'https://stackoverflow.com/questions/895371/bubble-sort-homework'
#with open("html_dump.pkl", 'wb') as myfile:
#pkl.dump(load_up_answers(question_url), myfile)
html_dump = []
with open("html_dump.pkl", 'rb') as myfile:
html_dump = pkl.load(myfile)
def wrapper_tag(xml_string):
xml_string = "<root>"+xml_string+"</root>"
return xml_string
root = etree.fromstring(wrapper_tag(html_dump[0][1]))
print etree.tostring(root)
|
3c0fa80bcdd5a493e7415a49566b4eb7524c534b
|
fabfile.py
|
fabfile.py
|
from __future__ import with_statement
from fabric.api import local, cd, env, run
from fabric.colors import green
env.use_ssh_config = True
env.user = 'ubuntu'
env.hosts = [ 'dhlab-backend' ]
PRODUCTION_DIR = 'backend'
SUPERVISOR_NAME = 'dhlab_backend'
MONGODB_NAME = 'dhlab'
def backup_db():
'''Backup local MongoDB database'''
local( 'mongodump -d %s -o _data/dhlab-backup' % ( MONGODB_NAME ) )
def restore_db():
'''Restore MongoDB database from backup. DELETES DATA'''
local( 'mongorestore --drop _data/dhlab-backup' )
def clean():
'''Clean up project directory.'''
local( "find . -name '*.pyc' -delete" )
def deploy():
'''Deploy the backend to the server'''
print green( 'Deploy to EC2 instance...' )
with cd( PRODUCTION_DIR ):
# Stop all running processes
run( 'supervisorctl stop %s' % ( SUPERVISOR_NAME ) )
# Pull latest code from git
run( 'git pull origin master' )
# Start up all processes again
run( 'supervisorctl start all' )
def test():
print green( 'Running tests...' )
local( 'coverage run manage.py test --settings=settings.test' )
print green( 'Generating coverage report...' )
local( 'coverage html --omit="*.pyenvs*"' )
|
from __future__ import with_statement
from fabric.api import local, cd, env, run
from fabric.colors import green
env.use_ssh_config = True
env.user = 'ubuntu'
env.hosts = [ 'dhlab-backend' ]
PRODUCTION_DIR = 'backend'
SUPERVISOR_NAME = 'dhlab_backend'
MONGODB_NAME = 'dhlab'
def backup_db():
'''Backup local MongoDB database'''
local( 'mongodump -d %s -o _data/dhlab-backup' % ( MONGODB_NAME ) )
def restore_db():
'''Restore MongoDB database from backup. DELETES DATA'''
local( 'mongorestore --drop _data/dhlab-backup' )
def clean():
'''Clean up project directory.'''
local( "find . -name '*.pyc' -delete" )
def deploy():
'''Deploy the backend to the server'''
print green( 'Deploy to EC2 instance...' )
with cd( PRODUCTION_DIR ):
# Stop all running processes
run( 'supervisorctl stop %s' % ( SUPERVISOR_NAME ) )
# Pull latest code from git
run( 'git pull origin master' )
# Ensure we have the latest dependencies
run( 'workon dhlab-backend' )
run( 'pip install -r deps.txt' )
# Start up all processes again
run( 'supervisorctl start all' )
def test():
print green( 'Running tests...' )
local( 'coverage run manage.py test --settings=settings.test' )
print green( 'Generating coverage report...' )
local( 'coverage html --omit="*.pyenvs*"' )
|
Deploy script now checks to see if virtualenv has the latest dependencies
|
Deploy script now checks to see if virtualenv has
the latest dependencies
|
Python
|
mit
|
DHLabs/keep,9929105/KEEP,DHLabs/keep,9929105/KEEP,DHLabs/keep,9929105/KEEP
|
from __future__ import with_statement
from fabric.api import local, cd, env, run
from fabric.colors import green
env.use_ssh_config = True
env.user = 'ubuntu'
env.hosts = [ 'dhlab-backend' ]
PRODUCTION_DIR = 'backend'
SUPERVISOR_NAME = 'dhlab_backend'
MONGODB_NAME = 'dhlab'
def backup_db():
'''Backup local MongoDB database'''
local( 'mongodump -d %s -o _data/dhlab-backup' % ( MONGODB_NAME ) )
def restore_db():
'''Restore MongoDB database from backup. DELETES DATA'''
local( 'mongorestore --drop _data/dhlab-backup' )
def clean():
'''Clean up project directory.'''
local( "find . -name '*.pyc' -delete" )
def deploy():
'''Deploy the backend to the server'''
print green( 'Deploy to EC2 instance...' )
with cd( PRODUCTION_DIR ):
# Stop all running processes
run( 'supervisorctl stop %s' % ( SUPERVISOR_NAME ) )
# Pull latest code from git
run( 'git pull origin master' )
# Start up all processes again
run( 'supervisorctl start all' )
def test():
print green( 'Running tests...' )
local( 'coverage run manage.py test --settings=settings.test' )
print green( 'Generating coverage report...' )
local( 'coverage html --omit="*.pyenvs*"' )
Deploy script now checks to see if virtualenv has
the latest dependencies
|
from __future__ import with_statement
from fabric.api import local, cd, env, run
from fabric.colors import green
env.use_ssh_config = True
env.user = 'ubuntu'
env.hosts = [ 'dhlab-backend' ]
PRODUCTION_DIR = 'backend'
SUPERVISOR_NAME = 'dhlab_backend'
MONGODB_NAME = 'dhlab'
def backup_db():
'''Backup local MongoDB database'''
local( 'mongodump -d %s -o _data/dhlab-backup' % ( MONGODB_NAME ) )
def restore_db():
'''Restore MongoDB database from backup. DELETES DATA'''
local( 'mongorestore --drop _data/dhlab-backup' )
def clean():
'''Clean up project directory.'''
local( "find . -name '*.pyc' -delete" )
def deploy():
'''Deploy the backend to the server'''
print green( 'Deploy to EC2 instance...' )
with cd( PRODUCTION_DIR ):
# Stop all running processes
run( 'supervisorctl stop %s' % ( SUPERVISOR_NAME ) )
# Pull latest code from git
run( 'git pull origin master' )
# Ensure we have the latest dependencies
run( 'workon dhlab-backend' )
run( 'pip install -r deps.txt' )
# Start up all processes again
run( 'supervisorctl start all' )
def test():
print green( 'Running tests...' )
local( 'coverage run manage.py test --settings=settings.test' )
print green( 'Generating coverage report...' )
local( 'coverage html --omit="*.pyenvs*"' )
|
<commit_before>from __future__ import with_statement
from fabric.api import local, cd, env, run
from fabric.colors import green
env.use_ssh_config = True
env.user = 'ubuntu'
env.hosts = [ 'dhlab-backend' ]
PRODUCTION_DIR = 'backend'
SUPERVISOR_NAME = 'dhlab_backend'
MONGODB_NAME = 'dhlab'
def backup_db():
'''Backup local MongoDB database'''
local( 'mongodump -d %s -o _data/dhlab-backup' % ( MONGODB_NAME ) )
def restore_db():
'''Restore MongoDB database from backup. DELETES DATA'''
local( 'mongorestore --drop _data/dhlab-backup' )
def clean():
'''Clean up project directory.'''
local( "find . -name '*.pyc' -delete" )
def deploy():
'''Deploy the backend to the server'''
print green( 'Deploy to EC2 instance...' )
with cd( PRODUCTION_DIR ):
# Stop all running processes
run( 'supervisorctl stop %s' % ( SUPERVISOR_NAME ) )
# Pull latest code from git
run( 'git pull origin master' )
# Start up all processes again
run( 'supervisorctl start all' )
def test():
print green( 'Running tests...' )
local( 'coverage run manage.py test --settings=settings.test' )
print green( 'Generating coverage report...' )
local( 'coverage html --omit="*.pyenvs*"' )
<commit_msg>Deploy script now checks to see if virtualenv has
the latest dependencies<commit_after>
|
from __future__ import with_statement
from fabric.api import local, cd, env, run
from fabric.colors import green
env.use_ssh_config = True
env.user = 'ubuntu'
env.hosts = [ 'dhlab-backend' ]
PRODUCTION_DIR = 'backend'
SUPERVISOR_NAME = 'dhlab_backend'
MONGODB_NAME = 'dhlab'
def backup_db():
'''Backup local MongoDB database'''
local( 'mongodump -d %s -o _data/dhlab-backup' % ( MONGODB_NAME ) )
def restore_db():
'''Restore MongoDB database from backup. DELETES DATA'''
local( 'mongorestore --drop _data/dhlab-backup' )
def clean():
'''Clean up project directory.'''
local( "find . -name '*.pyc' -delete" )
def deploy():
'''Deploy the backend to the server'''
print green( 'Deploy to EC2 instance...' )
with cd( PRODUCTION_DIR ):
# Stop all running processes
run( 'supervisorctl stop %s' % ( SUPERVISOR_NAME ) )
# Pull latest code from git
run( 'git pull origin master' )
# Ensure we have the latest dependencies
run( 'workon dhlab-backend' )
run( 'pip install -r deps.txt' )
# Start up all processes again
run( 'supervisorctl start all' )
def test():
print green( 'Running tests...' )
local( 'coverage run manage.py test --settings=settings.test' )
print green( 'Generating coverage report...' )
local( 'coverage html --omit="*.pyenvs*"' )
|
from __future__ import with_statement
from fabric.api import local, cd, env, run
from fabric.colors import green
env.use_ssh_config = True
env.user = 'ubuntu'
env.hosts = [ 'dhlab-backend' ]
PRODUCTION_DIR = 'backend'
SUPERVISOR_NAME = 'dhlab_backend'
MONGODB_NAME = 'dhlab'
def backup_db():
'''Backup local MongoDB database'''
local( 'mongodump -d %s -o _data/dhlab-backup' % ( MONGODB_NAME ) )
def restore_db():
'''Restore MongoDB database from backup. DELETES DATA'''
local( 'mongorestore --drop _data/dhlab-backup' )
def clean():
'''Clean up project directory.'''
local( "find . -name '*.pyc' -delete" )
def deploy():
'''Deploy the backend to the server'''
print green( 'Deploy to EC2 instance...' )
with cd( PRODUCTION_DIR ):
# Stop all running processes
run( 'supervisorctl stop %s' % ( SUPERVISOR_NAME ) )
# Pull latest code from git
run( 'git pull origin master' )
# Start up all processes again
run( 'supervisorctl start all' )
def test():
print green( 'Running tests...' )
local( 'coverage run manage.py test --settings=settings.test' )
print green( 'Generating coverage report...' )
local( 'coverage html --omit="*.pyenvs*"' )
Deploy script now checks to see if virtualenv has
the latest dependenciesfrom __future__ import with_statement
from fabric.api import local, cd, env, run
from fabric.colors import green
env.use_ssh_config = True
env.user = 'ubuntu'
env.hosts = [ 'dhlab-backend' ]
PRODUCTION_DIR = 'backend'
SUPERVISOR_NAME = 'dhlab_backend'
MONGODB_NAME = 'dhlab'
def backup_db():
'''Backup local MongoDB database'''
local( 'mongodump -d %s -o _data/dhlab-backup' % ( MONGODB_NAME ) )
def restore_db():
'''Restore MongoDB database from backup. DELETES DATA'''
local( 'mongorestore --drop _data/dhlab-backup' )
def clean():
'''Clean up project directory.'''
local( "find . -name '*.pyc' -delete" )
def deploy():
'''Deploy the backend to the server'''
print green( 'Deploy to EC2 instance...' )
with cd( PRODUCTION_DIR ):
# Stop all running processes
run( 'supervisorctl stop %s' % ( SUPERVISOR_NAME ) )
# Pull latest code from git
run( 'git pull origin master' )
# Ensure we have the latest dependencies
run( 'workon dhlab-backend' )
run( 'pip install -r deps.txt' )
# Start up all processes again
run( 'supervisorctl start all' )
def test():
print green( 'Running tests...' )
local( 'coverage run manage.py test --settings=settings.test' )
print green( 'Generating coverage report...' )
local( 'coverage html --omit="*.pyenvs*"' )
|
<commit_before>from __future__ import with_statement
from fabric.api import local, cd, env, run
from fabric.colors import green
env.use_ssh_config = True
env.user = 'ubuntu'
env.hosts = [ 'dhlab-backend' ]
PRODUCTION_DIR = 'backend'
SUPERVISOR_NAME = 'dhlab_backend'
MONGODB_NAME = 'dhlab'
def backup_db():
'''Backup local MongoDB database'''
local( 'mongodump -d %s -o _data/dhlab-backup' % ( MONGODB_NAME ) )
def restore_db():
'''Restore MongoDB database from backup. DELETES DATA'''
local( 'mongorestore --drop _data/dhlab-backup' )
def clean():
'''Clean up project directory.'''
local( "find . -name '*.pyc' -delete" )
def deploy():
'''Deploy the backend to the server'''
print green( 'Deploy to EC2 instance...' )
with cd( PRODUCTION_DIR ):
# Stop all running processes
run( 'supervisorctl stop %s' % ( SUPERVISOR_NAME ) )
# Pull latest code from git
run( 'git pull origin master' )
# Start up all processes again
run( 'supervisorctl start all' )
def test():
print green( 'Running tests...' )
local( 'coverage run manage.py test --settings=settings.test' )
print green( 'Generating coverage report...' )
local( 'coverage html --omit="*.pyenvs*"' )
<commit_msg>Deploy script now checks to see if virtualenv has
the latest dependencies<commit_after>from __future__ import with_statement
from fabric.api import local, cd, env, run
from fabric.colors import green
env.use_ssh_config = True
env.user = 'ubuntu'
env.hosts = [ 'dhlab-backend' ]
PRODUCTION_DIR = 'backend'
SUPERVISOR_NAME = 'dhlab_backend'
MONGODB_NAME = 'dhlab'
def backup_db():
'''Backup local MongoDB database'''
local( 'mongodump -d %s -o _data/dhlab-backup' % ( MONGODB_NAME ) )
def restore_db():
'''Restore MongoDB database from backup. DELETES DATA'''
local( 'mongorestore --drop _data/dhlab-backup' )
def clean():
'''Clean up project directory.'''
local( "find . -name '*.pyc' -delete" )
def deploy():
'''Deploy the backend to the server'''
print green( 'Deploy to EC2 instance...' )
with cd( PRODUCTION_DIR ):
# Stop all running processes
run( 'supervisorctl stop %s' % ( SUPERVISOR_NAME ) )
# Pull latest code from git
run( 'git pull origin master' )
# Ensure we have the latest dependencies
run( 'workon dhlab-backend' )
run( 'pip install -r deps.txt' )
# Start up all processes again
run( 'supervisorctl start all' )
def test():
print green( 'Running tests...' )
local( 'coverage run manage.py test --settings=settings.test' )
print green( 'Generating coverage report...' )
local( 'coverage html --omit="*.pyenvs*"' )
|
423ea9128f01eb74790a3bb5a876c066acc9c2c1
|
firesim.py
|
firesim.py
|
import functools
import signal
import sys
import logging as log
from firesimgui import FireSimGUI
from lib.arguments import parse_args
def sig_handler(app, sig, frame):
log.info("Firesim received signal %d. Shutting down.", sig)
try:
app.quit()
except Exception:
log.exception("Ignoring exception during shutdown request")
def main():
log.basicConfig(level=log.WARN)
log.info("Booting FireSim...")
args = parse_args()
sim = FireSimGUI(args)
signal.signal(signal.SIGINT, functools.partial(sig_handler, sim))
sys.exit(sim.run())
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import functools
import signal
import sys
import logging as log
from firesimgui import FireSimGUI
from lib.arguments import parse_args
def sig_handler(app, sig, frame):
log.info("Firesim received signal %d. Shutting down.", sig)
try:
app.quit()
except Exception:
log.exception("Ignoring exception during shutdown request")
def main():
log.basicConfig(level=log.WARN)
log.info("Booting FireSim...")
args = parse_args()
sim = FireSimGUI(args)
signal.signal(signal.SIGINT, functools.partial(sig_handler, sim))
sys.exit(sim.run())
if __name__ == "__main__":
main()
|
Add shebang to main script and switch to Unix line endings
|
Add shebang to main script and switch to Unix line endings
|
Python
|
mit
|
Openlights/firesim
|
import functools
import signal
import sys
import logging as log
from firesimgui import FireSimGUI
from lib.arguments import parse_args
def sig_handler(app, sig, frame):
log.info("Firesim received signal %d. Shutting down.", sig)
try:
app.quit()
except Exception:
log.exception("Ignoring exception during shutdown request")
def main():
log.basicConfig(level=log.WARN)
log.info("Booting FireSim...")
args = parse_args()
sim = FireSimGUI(args)
signal.signal(signal.SIGINT, functools.partial(sig_handler, sim))
sys.exit(sim.run())
if __name__ == "__main__":
main()
Add shebang to main script and switch to Unix line endings
|
#!/usr/bin/env python3
import functools
import signal
import sys
import logging as log
from firesimgui import FireSimGUI
from lib.arguments import parse_args
def sig_handler(app, sig, frame):
log.info("Firesim received signal %d. Shutting down.", sig)
try:
app.quit()
except Exception:
log.exception("Ignoring exception during shutdown request")
def main():
log.basicConfig(level=log.WARN)
log.info("Booting FireSim...")
args = parse_args()
sim = FireSimGUI(args)
signal.signal(signal.SIGINT, functools.partial(sig_handler, sim))
sys.exit(sim.run())
if __name__ == "__main__":
main()
|
<commit_before>import functools
import signal
import sys
import logging as log
from firesimgui import FireSimGUI
from lib.arguments import parse_args
def sig_handler(app, sig, frame):
log.info("Firesim received signal %d. Shutting down.", sig)
try:
app.quit()
except Exception:
log.exception("Ignoring exception during shutdown request")
def main():
log.basicConfig(level=log.WARN)
log.info("Booting FireSim...")
args = parse_args()
sim = FireSimGUI(args)
signal.signal(signal.SIGINT, functools.partial(sig_handler, sim))
sys.exit(sim.run())
if __name__ == "__main__":
main()
<commit_msg>Add shebang to main script and switch to Unix line endings<commit_after>
|
#!/usr/bin/env python3
import functools
import signal
import sys
import logging as log
from firesimgui import FireSimGUI
from lib.arguments import parse_args
def sig_handler(app, sig, frame):
log.info("Firesim received signal %d. Shutting down.", sig)
try:
app.quit()
except Exception:
log.exception("Ignoring exception during shutdown request")
def main():
log.basicConfig(level=log.WARN)
log.info("Booting FireSim...")
args = parse_args()
sim = FireSimGUI(args)
signal.signal(signal.SIGINT, functools.partial(sig_handler, sim))
sys.exit(sim.run())
if __name__ == "__main__":
main()
|
import functools
import signal
import sys
import logging as log
from firesimgui import FireSimGUI
from lib.arguments import parse_args
def sig_handler(app, sig, frame):
log.info("Firesim received signal %d. Shutting down.", sig)
try:
app.quit()
except Exception:
log.exception("Ignoring exception during shutdown request")
def main():
log.basicConfig(level=log.WARN)
log.info("Booting FireSim...")
args = parse_args()
sim = FireSimGUI(args)
signal.signal(signal.SIGINT, functools.partial(sig_handler, sim))
sys.exit(sim.run())
if __name__ == "__main__":
main()
Add shebang to main script and switch to Unix line endings#!/usr/bin/env python3
import functools
import signal
import sys
import logging as log
from firesimgui import FireSimGUI
from lib.arguments import parse_args
def sig_handler(app, sig, frame):
log.info("Firesim received signal %d. Shutting down.", sig)
try:
app.quit()
except Exception:
log.exception("Ignoring exception during shutdown request")
def main():
log.basicConfig(level=log.WARN)
log.info("Booting FireSim...")
args = parse_args()
sim = FireSimGUI(args)
signal.signal(signal.SIGINT, functools.partial(sig_handler, sim))
sys.exit(sim.run())
if __name__ == "__main__":
main()
|
<commit_before>import functools
import signal
import sys
import logging as log
from firesimgui import FireSimGUI
from lib.arguments import parse_args
def sig_handler(app, sig, frame):
log.info("Firesim received signal %d. Shutting down.", sig)
try:
app.quit()
except Exception:
log.exception("Ignoring exception during shutdown request")
def main():
log.basicConfig(level=log.WARN)
log.info("Booting FireSim...")
args = parse_args()
sim = FireSimGUI(args)
signal.signal(signal.SIGINT, functools.partial(sig_handler, sim))
sys.exit(sim.run())
if __name__ == "__main__":
main()
<commit_msg>Add shebang to main script and switch to Unix line endings<commit_after>#!/usr/bin/env python3
import functools
import signal
import sys
import logging as log
from firesimgui import FireSimGUI
from lib.arguments import parse_args
def sig_handler(app, sig, frame):
log.info("Firesim received signal %d. Shutting down.", sig)
try:
app.quit()
except Exception:
log.exception("Ignoring exception during shutdown request")
def main():
log.basicConfig(level=log.WARN)
log.info("Booting FireSim...")
args = parse_args()
sim = FireSimGUI(args)
signal.signal(signal.SIGINT, functools.partial(sig_handler, sim))
sys.exit(sim.run())
if __name__ == "__main__":
main()
|
a99378deee9a802bf107d11e79d2df2f77481495
|
silver/tests/spec/test_plan.py
|
silver/tests/spec/test_plan.py
|
# -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
response = self.client.put('/api/plans', json.dumps({
'name': 'Hydrogen',
'interval': 'month',
'interval_count': 1,
'amount': 150,
'currency': 'USD',
'trial_period_days': 15,
'metered_features': [
{
'name': '100k PageViews',
'price_per_unit': 10,
'included_units': 5
}
],
'due_days': 10,
'generate_after': 86400
}), content_type='application/json')
plan = Plan.objects.filter(name='Hydrogen')
self.assertEqual(plan.count(), 1)
self.assertEqual(response.status_code, 201)
|
# -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
assert True
# response = self.client.put('/api/plans', json.dumps({
# 'name': 'Hydrogen',
# 'interval': 'month',
# 'interval_count': 1,
# 'amount': 150,
# 'currency': 'USD',
# 'trial_period_days': 15,
# 'metered_features': [
# {
# 'name': '100k PageViews',
# 'price_per_unit': 10,
# 'included_units': 5
# }
# ],
# 'due_days': 10,
# 'generate_after': 86400
# }), content_type='application/json')
# plan = Plan.objects.filter(name='Hydrogen')
# self.assertEqual(plan.count(), 1)
# self.assertEqual(response.status_code, 201)
|
Comment out the failing Plan test
|
Comment out the failing Plan test
|
Python
|
apache-2.0
|
PressLabs/silver,PressLabs/silver,PressLabs/silver
|
# -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
response = self.client.put('/api/plans', json.dumps({
'name': 'Hydrogen',
'interval': 'month',
'interval_count': 1,
'amount': 150,
'currency': 'USD',
'trial_period_days': 15,
'metered_features': [
{
'name': '100k PageViews',
'price_per_unit': 10,
'included_units': 5
}
],
'due_days': 10,
'generate_after': 86400
}), content_type='application/json')
plan = Plan.objects.filter(name='Hydrogen')
self.assertEqual(plan.count(), 1)
self.assertEqual(response.status_code, 201)
Comment out the failing Plan test
|
# -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
assert True
# response = self.client.put('/api/plans', json.dumps({
# 'name': 'Hydrogen',
# 'interval': 'month',
# 'interval_count': 1,
# 'amount': 150,
# 'currency': 'USD',
# 'trial_period_days': 15,
# 'metered_features': [
# {
# 'name': '100k PageViews',
# 'price_per_unit': 10,
# 'included_units': 5
# }
# ],
# 'due_days': 10,
# 'generate_after': 86400
# }), content_type='application/json')
# plan = Plan.objects.filter(name='Hydrogen')
# self.assertEqual(plan.count(), 1)
# self.assertEqual(response.status_code, 201)
|
<commit_before># -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
response = self.client.put('/api/plans', json.dumps({
'name': 'Hydrogen',
'interval': 'month',
'interval_count': 1,
'amount': 150,
'currency': 'USD',
'trial_period_days': 15,
'metered_features': [
{
'name': '100k PageViews',
'price_per_unit': 10,
'included_units': 5
}
],
'due_days': 10,
'generate_after': 86400
}), content_type='application/json')
plan = Plan.objects.filter(name='Hydrogen')
self.assertEqual(plan.count(), 1)
self.assertEqual(response.status_code, 201)
<commit_msg>Comment out the failing Plan test<commit_after>
|
# -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
assert True
# response = self.client.put('/api/plans', json.dumps({
# 'name': 'Hydrogen',
# 'interval': 'month',
# 'interval_count': 1,
# 'amount': 150,
# 'currency': 'USD',
# 'trial_period_days': 15,
# 'metered_features': [
# {
# 'name': '100k PageViews',
# 'price_per_unit': 10,
# 'included_units': 5
# }
# ],
# 'due_days': 10,
# 'generate_after': 86400
# }), content_type='application/json')
# plan = Plan.objects.filter(name='Hydrogen')
# self.assertEqual(plan.count(), 1)
# self.assertEqual(response.status_code, 201)
|
# -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
response = self.client.put('/api/plans', json.dumps({
'name': 'Hydrogen',
'interval': 'month',
'interval_count': 1,
'amount': 150,
'currency': 'USD',
'trial_period_days': 15,
'metered_features': [
{
'name': '100k PageViews',
'price_per_unit': 10,
'included_units': 5
}
],
'due_days': 10,
'generate_after': 86400
}), content_type='application/json')
plan = Plan.objects.filter(name='Hydrogen')
self.assertEqual(plan.count(), 1)
self.assertEqual(response.status_code, 201)
Comment out the failing Plan test# -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
assert True
# response = self.client.put('/api/plans', json.dumps({
# 'name': 'Hydrogen',
# 'interval': 'month',
# 'interval_count': 1,
# 'amount': 150,
# 'currency': 'USD',
# 'trial_period_days': 15,
# 'metered_features': [
# {
# 'name': '100k PageViews',
# 'price_per_unit': 10,
# 'included_units': 5
# }
# ],
# 'due_days': 10,
# 'generate_after': 86400
# }), content_type='application/json')
# plan = Plan.objects.filter(name='Hydrogen')
# self.assertEqual(plan.count(), 1)
# self.assertEqual(response.status_code, 201)
|
<commit_before># -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
response = self.client.put('/api/plans', json.dumps({
'name': 'Hydrogen',
'interval': 'month',
'interval_count': 1,
'amount': 150,
'currency': 'USD',
'trial_period_days': 15,
'metered_features': [
{
'name': '100k PageViews',
'price_per_unit': 10,
'included_units': 5
}
],
'due_days': 10,
'generate_after': 86400
}), content_type='application/json')
plan = Plan.objects.filter(name='Hydrogen')
self.assertEqual(plan.count(), 1)
self.assertEqual(response.status_code, 201)
<commit_msg>Comment out the failing Plan test<commit_after># -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
assert True
# response = self.client.put('/api/plans', json.dumps({
# 'name': 'Hydrogen',
# 'interval': 'month',
# 'interval_count': 1,
# 'amount': 150,
# 'currency': 'USD',
# 'trial_period_days': 15,
# 'metered_features': [
# {
# 'name': '100k PageViews',
# 'price_per_unit': 10,
# 'included_units': 5
# }
# ],
# 'due_days': 10,
# 'generate_after': 86400
# }), content_type='application/json')
# plan = Plan.objects.filter(name='Hydrogen')
# self.assertEqual(plan.count(), 1)
# self.assertEqual(response.status_code, 201)
|
7726e51f2e3bb028700e5fc61779f6edc53cee36
|
scripts/init_tree.py
|
scripts/init_tree.py
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/lazy.sh', 'deps/lazy.sh')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
#shutil.copyfile('src/scripts/source_deps.sh', 'build/source_deps.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
Update to copy new scripts
|
Update to copy new scripts
|
Python
|
bsd-3-clause
|
lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
Update to copy new scripts
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/lazy.sh', 'deps/lazy.sh')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
#shutil.copyfile('src/scripts/source_deps.sh', 'build/source_deps.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
<commit_before>import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
<commit_msg>Update to copy new scripts<commit_after>
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/lazy.sh', 'deps/lazy.sh')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
#shutil.copyfile('src/scripts/source_deps.sh', 'build/source_deps.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
Update to copy new scriptsimport os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/lazy.sh', 'deps/lazy.sh')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
#shutil.copyfile('src/scripts/source_deps.sh', 'build/source_deps.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
<commit_before>import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
<commit_msg>Update to copy new scripts<commit_after>import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/lazy.sh', 'deps/lazy.sh')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
#shutil.copyfile('src/scripts/source_deps.sh', 'build/source_deps.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
454c3228db731280eeed8d22c6811c2810018222
|
export_layers/pygimplib/lib/__init__.py
|
export_layers/pygimplib/lib/__init__.py
|
#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
# empty
|
#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
"""
This package contains external libraries used in the `pygimplib` library.
"""
|
Add description for `lib` package
|
Add description for `lib` package
|
Python
|
bsd-3-clause
|
khalim19/gimp-plugin-export-layers,khalim19/gimp-plugin-export-layers
|
#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
# empty
Add description for `lib` package
|
#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
"""
This package contains external libraries used in the `pygimplib` library.
"""
|
<commit_before>#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
# empty
<commit_msg>Add description for `lib` package<commit_after>
|
#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
"""
This package contains external libraries used in the `pygimplib` library.
"""
|
#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
# empty
Add description for `lib` package#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
"""
This package contains external libraries used in the `pygimplib` library.
"""
|
<commit_before>#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
# empty
<commit_msg>Add description for `lib` package<commit_after>#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
"""
This package contains external libraries used in the `pygimplib` library.
"""
|
1b07cb1ec2fbe48af4f38a225c2237846ce8b314
|
pyramid_es/tests/__init__.py
|
pyramid_es/tests/__init__.py
|
import logging
def setUp():
log = logging.getLogger('elasticsearch.trace')
log.setLevel(logging.CRITICAL)
|
import logging
def setUp():
log = logging.getLogger('elasticsearch.trace')
log.addHandler(logging.NullHandler())
|
Use a better method for silencing 'no handlers found' error
|
Use a better method for silencing 'no handlers found' error
|
Python
|
mit
|
storborg/pyramid_es
|
import logging
def setUp():
log = logging.getLogger('elasticsearch.trace')
log.setLevel(logging.CRITICAL)
Use a better method for silencing 'no handlers found' error
|
import logging
def setUp():
log = logging.getLogger('elasticsearch.trace')
log.addHandler(logging.NullHandler())
|
<commit_before>import logging
def setUp():
log = logging.getLogger('elasticsearch.trace')
log.setLevel(logging.CRITICAL)
<commit_msg>Use a better method for silencing 'no handlers found' error<commit_after>
|
import logging
def setUp():
log = logging.getLogger('elasticsearch.trace')
log.addHandler(logging.NullHandler())
|
import logging
def setUp():
log = logging.getLogger('elasticsearch.trace')
log.setLevel(logging.CRITICAL)
Use a better method for silencing 'no handlers found' errorimport logging
def setUp():
log = logging.getLogger('elasticsearch.trace')
log.addHandler(logging.NullHandler())
|
<commit_before>import logging
def setUp():
log = logging.getLogger('elasticsearch.trace')
log.setLevel(logging.CRITICAL)
<commit_msg>Use a better method for silencing 'no handlers found' error<commit_after>import logging
def setUp():
log = logging.getLogger('elasticsearch.trace')
log.addHandler(logging.NullHandler())
|
87955b791e702b67afb61eae0cc7abfbde338993
|
Python/Product/PythonTools/ptvsd/setup.py
|
Python/Product/PythonTools/ptvsd/setup.py
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.1.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://pytools.codeplex.com/',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0b1',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
Update ptvsd version number for 2.2 beta.
|
Update ptvsd version number for 2.2 beta.
|
Python
|
apache-2.0
|
Habatchii/PTVS,DEVSENSE/PTVS,denfromufa/PTVS,alanch-ms/PTVS,zooba/PTVS,MetSystem/PTVS,xNUTs/PTVS,Habatchii/PTVS,DinoV/PTVS,crwilcox/PTVS,bolabola/PTVS,DEVSENSE/PTVS,bolabola/PTVS,bolabola/PTVS,int19h/PTVS,crwilcox/PTVS,dut3062796s/PTVS,zooba/PTVS,juanyaw/PTVS,Microsoft/PTVS,Habatchii/PTVS,msunardi/PTVS,msunardi/PTVS,MetSystem/PTVS,christer155/PTVS,xNUTs/PTVS,denfromufa/PTVS,juanyaw/PTVS,crwilcox/PTVS,bolabola/PTVS,alanch-ms/PTVS,jkorell/PTVS,ChinaQuants/PTVS,mlorbetske/PTVS,dut3062796s/PTVS,int19h/PTVS,bolabola/PTVS,xNUTs/PTVS,fivejjs/PTVS,Microsoft/PTVS,christer155/PTVS,gomiero/PTVS,huguesv/PTVS,christer155/PTVS,Microsoft/PTVS,gilbertw/PTVS,fjxhkj/PTVS,gilbertw/PTVS,Microsoft/PTVS,Habatchii/PTVS,christer155/PTVS,juanyaw/PTVS,fivejjs/PTVS,gilbertw/PTVS,mlorbetske/PTVS,ChinaQuants/PTVS,gilbertw/PTVS,fivejjs/PTVS,juanyaw/PTVS,DinoV/PTVS,gomiero/PTVS,modulexcite/PTVS,fivejjs/PTVS,DinoV/PTVS,denfromufa/PTVS,denfromufa/PTVS,huguesv/PTVS,int19h/PTVS,MetSystem/PTVS,alanch-ms/PTVS,gilbertw/PTVS,gomiero/PTVS,int19h/PTVS,zooba/PTVS,xNUTs/PTVS,christer155/PTVS,mlorbetske/PTVS,fivejjs/PTVS,modulexcite/PTVS,zooba/PTVS,alanch-ms/PTVS,int19h/PTVS,dut3062796s/PTVS,crwilcox/PTVS,mlorbetske/PTVS,DEVSENSE/PTVS,MetSystem/PTVS,gilbertw/PTVS,jkorell/PTVS,gomiero/PTVS,jkorell/PTVS,DEVSENSE/PTVS,christer155/PTVS,fjxhkj/PTVS,fjxhkj/PTVS,MetSystem/PTVS,huguesv/PTVS,dut3062796s/PTVS,modulexcite/PTVS,modulexcite/PTVS,ChinaQuants/PTVS,gomiero/PTVS,denfromufa/PTVS,msunardi/PTVS,Microsoft/PTVS,dut3062796s/PTVS,DinoV/PTVS,DinoV/PTVS,zooba/PTVS,msunardi/PTVS,juanyaw/PTVS,Microsoft/PTVS,alanch-ms/PTVS,mlorbetske/PTVS,xNUTs/PTVS,huguesv/PTVS,Habatchii/PTVS,MetSystem/PTVS,jkorell/PTVS,Habatchii/PTVS,crwilcox/PTVS,msunardi/PTVS,jkorell/PTVS,huguesv/PTVS,jkorell/PTVS,fjxhkj/PTVS,modulexcite/PTVS,fjxhkj/PTVS,dut3062796s/PTVS,xNUTs/PTVS,mlorbetske/PTVS,DEVSENSE/PTVS,msunardi/PTVS,ChinaQuants/PTVS,zooba/PTVS,bolabola/PTVS,modulexcite/PTVS,crwilcox/PTVS,juanyaw/PTVS,DEVSENSE/PTVS,huguesv/PTVS,gomiero/PTVS,fivejjs/PTVS,int19h/PTVS,ChinaQuants/PTVS,denfromufa/PTVS,alanch-ms/PTVS,DinoV/PTVS,fjxhkj/PTVS,ChinaQuants/PTVS
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.1.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://pytools.codeplex.com/',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
Update ptvsd version number for 2.2 beta.
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0b1',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
<commit_before>#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.1.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://pytools.codeplex.com/',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
<commit_msg>Update ptvsd version number for 2.2 beta.<commit_after>
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0b1',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.1.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://pytools.codeplex.com/',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
Update ptvsd version number for 2.2 beta.#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0b1',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
<commit_before>#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.1.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://pytools.codeplex.com/',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
<commit_msg>Update ptvsd version number for 2.2 beta.<commit_after>#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0b1',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
4148c03ce666f12b8b04be7103ae6a969dd0c022
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
env.hosts = [
'shaperia@lynx.uberspace.de'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
|
from fabric.api import *
env.hosts = [
'shaperia@lynx.uberspace.de'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('./vendor/bin/carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
|
Use included carton executable on deploy
|
Use included carton executable on deploy
|
Python
|
mit
|
skyshaper/happyman,skyshaper/happyman,skyshaper/happyman
|
from fabric.api import *
env.hosts = [
'shaperia@lynx.uberspace.de'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
Use included carton executable on deploy
|
from fabric.api import *
env.hosts = [
'shaperia@lynx.uberspace.de'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('./vendor/bin/carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
|
<commit_before>from fabric.api import *
env.hosts = [
'shaperia@lynx.uberspace.de'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
<commit_msg>Use included carton executable on deploy<commit_after>
|
from fabric.api import *
env.hosts = [
'shaperia@lynx.uberspace.de'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('./vendor/bin/carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
|
from fabric.api import *
env.hosts = [
'shaperia@lynx.uberspace.de'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
Use included carton executable on deployfrom fabric.api import *
env.hosts = [
'shaperia@lynx.uberspace.de'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('./vendor/bin/carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
|
<commit_before>from fabric.api import *
env.hosts = [
'shaperia@lynx.uberspace.de'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
<commit_msg>Use included carton executable on deploy<commit_after>from fabric.api import *
env.hosts = [
'shaperia@lynx.uberspace.de'
]
env.target_directory = './happyman'
def init():
run('git clone -q https://github.com/skyshaper/happyman.git ' + env.target_directory)
with cd(env.target_directory):
run('virtualenv python_virtualenv')
def deploy():
local('git push')
with cd(env.target_directory):
run('git remote update && git reset --hard origin/master')
run('./vendor/bin/carton install --cached --deployment')
with cd('python_virtualenv'):
run('./bin/pip install -r ../cobe_python_requirements.txt')
execute(restart)
def restart():
run('svc -t ~/service/happyman')
|
ddb3bcf4e5d5eb5dc4f8bb74313f333e54c385d6
|
scripts/wall_stop.py
|
scripts/wall_stop.py
|
#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
def callback_lightsensors(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
Reduce the name of a function
|
Reduce the name of a function
|
Python
|
mit
|
citueda/pimouse_run_corridor,citueda/pimouse_run_corridor
|
#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
def callback_lightsensors(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
Reduce the name of a function
|
#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
<commit_before>#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
def callback_lightsensors(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
<commit_msg>Reduce the name of a function<commit_after>
|
#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
def callback_lightsensors(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
Reduce the name of a function#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
<commit_before>#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors)
def callback_lightsensors(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
<commit_msg>Reduce the name of a function<commit_after>#!/usr/bin/env python
import rospy,copy
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
from pimouse_ros.msg import LightSensorValues
class WallStop():
def __init__(self):
self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
self.sensor_values = LightSensorValues()
rospy.Subscriber('/lightsensors', LightSensorValues, self.callback)
def callback(self,messages):
self.sensor_values = messages
def run(self):
rate = rospy.Rate(10)
data = Twist()
while not rospy.is_shutdown():
data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0
self.cmd_vel.publish(data)
rate.sleep()
if __name__ == '__main__':
rospy.init_node('wall_stop')
rospy.wait_for_service('/motor_on')
rospy.wait_for_service('/motor_off')
rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call)
rospy.ServiceProxy('/motor_on',Trigger).call()
WallStop().run()
|
421b2d75f04717dd8acb461bd698ca8355e70480
|
python2.7/music-organizer.py
|
python2.7/music-organizer.py
|
#!/usr/bin/env python2.7
import os
import re
import sys
from mutagen.easyid3 import EasyID3
replaceChars = (
(" ", "-"),
("(", ""),
(")", ""),
(",", ""),
(".", ""),
("'", ""),
("?", "")
)
def toNeat(s):
s = s.lower()
for r in replaceChars: s = s.replace(r[0], r[1])
search = re.search("[^a-z\-]", s)
if search:
print("Error: Unrecognized character in '" + s + "'")
sys.exit(-42)
return s
for dirname, dirnames, filenames in os.walk('.'):
for subdirname in dirnames:
print("subdir:" + str(subdirname))
for filename in filenames:
fullPath = os.path.join(dirname, filename)
print("file: " + str(fullPath))
audio = EasyID3(fullPath)
title = audio['title'][0].decode()
print(" title: " + title)
neatTitle = toNeat(title)
print(" neat-title: " + neatTitle)
ext = os.path.splitext(filename)[1]
newFullPath = os.path.join(dirname, neatTitle + ext)
print(" newFullPath: " + newFullPath)
if newFullPath != fullPath:
if os.path.isfile(newFullPath):
print("Error: File exists: '" + newFullPath + "'")
sys.exit(-42)
os.rename(fullPath, newFullPath)
print("\nComplete!")
|
#!/usr/bin/env python2.7
import os
import re
import sys
from mutagen.easyid3 import EasyID3
replaceChars = (
(" ", "-"),
("(", ""),
(")", ""),
(",", ""),
(".", ""),
("'", ""),
("?", "")
)
def toNeat(s):
s = s.lower()
for r in replaceChars: s = s.replace(r[0], r[1])
search = re.search("[^0-9a-z\-]", s)
if search:
print("Error: Unrecognized character in '" + s + "'")
sys.exit(-42)
return s
for dirname, dirnames, filenames in os.walk('.'):
for subdirname in dirnames:
print("subdir:" + str(subdirname))
for filename in filenames:
fullPath = os.path.join(dirname, filename)
print("file: " + str(fullPath))
audio = EasyID3(fullPath)
title = audio['title'][0].decode()
print(" title: " + title)
neatTitle = toNeat(title)
print(" neat-title: " + neatTitle)
ext = os.path.splitext(filename)[1]
newFullPath = os.path.join(dirname, neatTitle + ext)
print(" newFullPath: " + newFullPath)
if newFullPath != fullPath:
if os.path.isfile(newFullPath):
print("Error: File exists: '" + newFullPath + "'")
sys.exit(-42)
os.rename(fullPath, newFullPath)
print("\nComplete!")
|
Allow 0-9 in song names.
|
Allow 0-9 in song names.
|
Python
|
mit
|
bamos/python-scripts,bamos/python-scripts
|
#!/usr/bin/env python2.7
import os
import re
import sys
from mutagen.easyid3 import EasyID3
replaceChars = (
(" ", "-"),
("(", ""),
(")", ""),
(",", ""),
(".", ""),
("'", ""),
("?", "")
)
def toNeat(s):
s = s.lower()
for r in replaceChars: s = s.replace(r[0], r[1])
search = re.search("[^a-z\-]", s)
if search:
print("Error: Unrecognized character in '" + s + "'")
sys.exit(-42)
return s
for dirname, dirnames, filenames in os.walk('.'):
for subdirname in dirnames:
print("subdir:" + str(subdirname))
for filename in filenames:
fullPath = os.path.join(dirname, filename)
print("file: " + str(fullPath))
audio = EasyID3(fullPath)
title = audio['title'][0].decode()
print(" title: " + title)
neatTitle = toNeat(title)
print(" neat-title: " + neatTitle)
ext = os.path.splitext(filename)[1]
newFullPath = os.path.join(dirname, neatTitle + ext)
print(" newFullPath: " + newFullPath)
if newFullPath != fullPath:
if os.path.isfile(newFullPath):
print("Error: File exists: '" + newFullPath + "'")
sys.exit(-42)
os.rename(fullPath, newFullPath)
print("\nComplete!")
Allow 0-9 in song names.
|
#!/usr/bin/env python2.7
import os
import re
import sys
from mutagen.easyid3 import EasyID3
replaceChars = (
(" ", "-"),
("(", ""),
(")", ""),
(",", ""),
(".", ""),
("'", ""),
("?", "")
)
def toNeat(s):
s = s.lower()
for r in replaceChars: s = s.replace(r[0], r[1])
search = re.search("[^0-9a-z\-]", s)
if search:
print("Error: Unrecognized character in '" + s + "'")
sys.exit(-42)
return s
for dirname, dirnames, filenames in os.walk('.'):
for subdirname in dirnames:
print("subdir:" + str(subdirname))
for filename in filenames:
fullPath = os.path.join(dirname, filename)
print("file: " + str(fullPath))
audio = EasyID3(fullPath)
title = audio['title'][0].decode()
print(" title: " + title)
neatTitle = toNeat(title)
print(" neat-title: " + neatTitle)
ext = os.path.splitext(filename)[1]
newFullPath = os.path.join(dirname, neatTitle + ext)
print(" newFullPath: " + newFullPath)
if newFullPath != fullPath:
if os.path.isfile(newFullPath):
print("Error: File exists: '" + newFullPath + "'")
sys.exit(-42)
os.rename(fullPath, newFullPath)
print("\nComplete!")
|
<commit_before>#!/usr/bin/env python2.7
import os
import re
import sys
from mutagen.easyid3 import EasyID3
replaceChars = (
(" ", "-"),
("(", ""),
(")", ""),
(",", ""),
(".", ""),
("'", ""),
("?", "")
)
def toNeat(s):
s = s.lower()
for r in replaceChars: s = s.replace(r[0], r[1])
search = re.search("[^a-z\-]", s)
if search:
print("Error: Unrecognized character in '" + s + "'")
sys.exit(-42)
return s
for dirname, dirnames, filenames in os.walk('.'):
for subdirname in dirnames:
print("subdir:" + str(subdirname))
for filename in filenames:
fullPath = os.path.join(dirname, filename)
print("file: " + str(fullPath))
audio = EasyID3(fullPath)
title = audio['title'][0].decode()
print(" title: " + title)
neatTitle = toNeat(title)
print(" neat-title: " + neatTitle)
ext = os.path.splitext(filename)[1]
newFullPath = os.path.join(dirname, neatTitle + ext)
print(" newFullPath: " + newFullPath)
if newFullPath != fullPath:
if os.path.isfile(newFullPath):
print("Error: File exists: '" + newFullPath + "'")
sys.exit(-42)
os.rename(fullPath, newFullPath)
print("\nComplete!")
<commit_msg>Allow 0-9 in song names.<commit_after>
|
#!/usr/bin/env python2.7
import os
import re
import sys
from mutagen.easyid3 import EasyID3
replaceChars = (
(" ", "-"),
("(", ""),
(")", ""),
(",", ""),
(".", ""),
("'", ""),
("?", "")
)
def toNeat(s):
s = s.lower()
for r in replaceChars: s = s.replace(r[0], r[1])
search = re.search("[^0-9a-z\-]", s)
if search:
print("Error: Unrecognized character in '" + s + "'")
sys.exit(-42)
return s
for dirname, dirnames, filenames in os.walk('.'):
for subdirname in dirnames:
print("subdir:" + str(subdirname))
for filename in filenames:
fullPath = os.path.join(dirname, filename)
print("file: " + str(fullPath))
audio = EasyID3(fullPath)
title = audio['title'][0].decode()
print(" title: " + title)
neatTitle = toNeat(title)
print(" neat-title: " + neatTitle)
ext = os.path.splitext(filename)[1]
newFullPath = os.path.join(dirname, neatTitle + ext)
print(" newFullPath: " + newFullPath)
if newFullPath != fullPath:
if os.path.isfile(newFullPath):
print("Error: File exists: '" + newFullPath + "'")
sys.exit(-42)
os.rename(fullPath, newFullPath)
print("\nComplete!")
|
#!/usr/bin/env python2.7
import os
import re
import sys
from mutagen.easyid3 import EasyID3
replaceChars = (
(" ", "-"),
("(", ""),
(")", ""),
(",", ""),
(".", ""),
("'", ""),
("?", "")
)
def toNeat(s):
s = s.lower()
for r in replaceChars: s = s.replace(r[0], r[1])
search = re.search("[^a-z\-]", s)
if search:
print("Error: Unrecognized character in '" + s + "'")
sys.exit(-42)
return s
for dirname, dirnames, filenames in os.walk('.'):
for subdirname in dirnames:
print("subdir:" + str(subdirname))
for filename in filenames:
fullPath = os.path.join(dirname, filename)
print("file: " + str(fullPath))
audio = EasyID3(fullPath)
title = audio['title'][0].decode()
print(" title: " + title)
neatTitle = toNeat(title)
print(" neat-title: " + neatTitle)
ext = os.path.splitext(filename)[1]
newFullPath = os.path.join(dirname, neatTitle + ext)
print(" newFullPath: " + newFullPath)
if newFullPath != fullPath:
if os.path.isfile(newFullPath):
print("Error: File exists: '" + newFullPath + "'")
sys.exit(-42)
os.rename(fullPath, newFullPath)
print("\nComplete!")
Allow 0-9 in song names.#!/usr/bin/env python2.7
import os
import re
import sys
from mutagen.easyid3 import EasyID3
replaceChars = (
(" ", "-"),
("(", ""),
(")", ""),
(",", ""),
(".", ""),
("'", ""),
("?", "")
)
def toNeat(s):
s = s.lower()
for r in replaceChars: s = s.replace(r[0], r[1])
search = re.search("[^0-9a-z\-]", s)
if search:
print("Error: Unrecognized character in '" + s + "'")
sys.exit(-42)
return s
for dirname, dirnames, filenames in os.walk('.'):
for subdirname in dirnames:
print("subdir:" + str(subdirname))
for filename in filenames:
fullPath = os.path.join(dirname, filename)
print("file: " + str(fullPath))
audio = EasyID3(fullPath)
title = audio['title'][0].decode()
print(" title: " + title)
neatTitle = toNeat(title)
print(" neat-title: " + neatTitle)
ext = os.path.splitext(filename)[1]
newFullPath = os.path.join(dirname, neatTitle + ext)
print(" newFullPath: " + newFullPath)
if newFullPath != fullPath:
if os.path.isfile(newFullPath):
print("Error: File exists: '" + newFullPath + "'")
sys.exit(-42)
os.rename(fullPath, newFullPath)
print("\nComplete!")
|
<commit_before>#!/usr/bin/env python2.7
import os
import re
import sys
from mutagen.easyid3 import EasyID3
replaceChars = (
(" ", "-"),
("(", ""),
(")", ""),
(",", ""),
(".", ""),
("'", ""),
("?", "")
)
def toNeat(s):
s = s.lower()
for r in replaceChars: s = s.replace(r[0], r[1])
search = re.search("[^a-z\-]", s)
if search:
print("Error: Unrecognized character in '" + s + "'")
sys.exit(-42)
return s
for dirname, dirnames, filenames in os.walk('.'):
for subdirname in dirnames:
print("subdir:" + str(subdirname))
for filename in filenames:
fullPath = os.path.join(dirname, filename)
print("file: " + str(fullPath))
audio = EasyID3(fullPath)
title = audio['title'][0].decode()
print(" title: " + title)
neatTitle = toNeat(title)
print(" neat-title: " + neatTitle)
ext = os.path.splitext(filename)[1]
newFullPath = os.path.join(dirname, neatTitle + ext)
print(" newFullPath: " + newFullPath)
if newFullPath != fullPath:
if os.path.isfile(newFullPath):
print("Error: File exists: '" + newFullPath + "'")
sys.exit(-42)
os.rename(fullPath, newFullPath)
print("\nComplete!")
<commit_msg>Allow 0-9 in song names.<commit_after>#!/usr/bin/env python2.7
import os
import re
import sys
from mutagen.easyid3 import EasyID3
replaceChars = (
(" ", "-"),
("(", ""),
(")", ""),
(",", ""),
(".", ""),
("'", ""),
("?", "")
)
def toNeat(s):
s = s.lower()
for r in replaceChars: s = s.replace(r[0], r[1])
search = re.search("[^0-9a-z\-]", s)
if search:
print("Error: Unrecognized character in '" + s + "'")
sys.exit(-42)
return s
for dirname, dirnames, filenames in os.walk('.'):
for subdirname in dirnames:
print("subdir:" + str(subdirname))
for filename in filenames:
fullPath = os.path.join(dirname, filename)
print("file: " + str(fullPath))
audio = EasyID3(fullPath)
title = audio['title'][0].decode()
print(" title: " + title)
neatTitle = toNeat(title)
print(" neat-title: " + neatTitle)
ext = os.path.splitext(filename)[1]
newFullPath = os.path.join(dirname, neatTitle + ext)
print(" newFullPath: " + newFullPath)
if newFullPath != fullPath:
if os.path.isfile(newFullPath):
print("Error: File exists: '" + newFullPath + "'")
sys.exit(-42)
os.rename(fullPath, newFullPath)
print("\nComplete!")
|
89e22a252adf6494cf59ae2289eb3f9bb1e2a893
|
sandcats/trivial_tests.py
|
sandcats/trivial_tests.py
|
import requests
def register_asheesh():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()},
)
|
import requests
def register_asheesh():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()},
)
def register_asheesh2_bad_key_type():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh2',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pem').read()},
)
|
Add test validating key format validation
|
Add test validating key format validation
|
Python
|
apache-2.0
|
sandstorm-io/sandcats,sandstorm-io/sandcats,sandstorm-io/sandcats,sandstorm-io/sandcats
|
import requests
def register_asheesh():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()},
)
Add test validating key format validation
|
import requests
def register_asheesh():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()},
)
def register_asheesh2_bad_key_type():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh2',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pem').read()},
)
|
<commit_before>import requests
def register_asheesh():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()},
)
<commit_msg>Add test validating key format validation<commit_after>
|
import requests
def register_asheesh():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()},
)
def register_asheesh2_bad_key_type():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh2',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pem').read()},
)
|
import requests
def register_asheesh():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()},
)
Add test validating key format validationimport requests
def register_asheesh():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()},
)
def register_asheesh2_bad_key_type():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh2',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pem').read()},
)
|
<commit_before>import requests
def register_asheesh():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()},
)
<commit_msg>Add test validating key format validation<commit_after>import requests
def register_asheesh():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()},
)
def register_asheesh2_bad_key_type():
return requests.post(
'http://localhost:3000/register',
{'rawHostname': 'asheesh2',
'email': 'asheesh@asheesh.org',
'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pem').read()},
)
|
b442190966a818338e0e294a6835b30a10753708
|
tests/providers/test_nfsn.py
|
tests/providers/test_nfsn.py
|
# Test for one implementation of the interface
from lexicon.providers.nfsn import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
import os
"""
Some small info about running live tests.
NFSN doesn't have trial accounts, so these tests can only
be run by those with an NFSN account. NFSN also requires
you to have an API key. More info here:
https://members.nearlyfreespeech.net/wiki/API/Introduction
You'll need an account to access that page.
Therefore, the following
parameters must be provided:
- LEXICON_NFSN_USERNAME -> Your NFSN username
- LEXICON_NFSN_TOKEN -> Your API Key
- LEXICON_NFSN_DOMAIN -> Domain you want to test with
"""
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class NFSNProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nfsn'
@property
def domain(self):
_domain = os.environ.get('LEXICON_NFSN_DOMAIN')
if _domain is None:
raise ValueError('LEXICON_NFSN_DOMAIN must be specified.')
return _domain
def _filter_headers(self):
return ['X-NFSN-Authentication']
|
# Test for one implementation of the interface
from lexicon.providers.nfsn import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
import os
"""
Some small info about running live tests.
NFSN doesn't have trial accounts, so these tests can only
be run by those with an NFSN account. NFSN also requires
you to have an API key. More info here:
https://members.nearlyfreespeech.net/wiki/API/Introduction
You'll need an account to access that page.
Therefore, the following
parameters must be provided:
- LEXICON_NFSN_USERNAME -> Your NFSN username
- LEXICON_NFSN_TOKEN -> Your API Key
- LEXICON_NFSN_DOMAIN -> Domain you want to test with
"""
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class NFSNProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nfsn'
default_domain = 'koupia.xyz'
@property
def domain(self):
_domain = os.environ.get('LEXICON_NFSN_DOMAIN')
return _domain or NFSNProviderTests.default_domain
def _filter_headers(self):
return ['X-NFSN-Authentication']
|
Add default NFSN test url
|
Add default NFSN test url
|
Python
|
mit
|
AnalogJ/lexicon,AnalogJ/lexicon
|
# Test for one implementation of the interface
from lexicon.providers.nfsn import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
import os
"""
Some small info about running live tests.
NFSN doesn't have trial accounts, so these tests can only
be run by those with an NFSN account. NFSN also requires
you to have an API key. More info here:
https://members.nearlyfreespeech.net/wiki/API/Introduction
You'll need an account to access that page.
Therefore, the following
parameters must be provided:
- LEXICON_NFSN_USERNAME -> Your NFSN username
- LEXICON_NFSN_TOKEN -> Your API Key
- LEXICON_NFSN_DOMAIN -> Domain you want to test with
"""
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class NFSNProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nfsn'
@property
def domain(self):
_domain = os.environ.get('LEXICON_NFSN_DOMAIN')
if _domain is None:
raise ValueError('LEXICON_NFSN_DOMAIN must be specified.')
return _domain
def _filter_headers(self):
return ['X-NFSN-Authentication']
Add default NFSN test url
|
# Test for one implementation of the interface
from lexicon.providers.nfsn import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
import os
"""
Some small info about running live tests.
NFSN doesn't have trial accounts, so these tests can only
be run by those with an NFSN account. NFSN also requires
you to have an API key. More info here:
https://members.nearlyfreespeech.net/wiki/API/Introduction
You'll need an account to access that page.
Therefore, the following
parameters must be provided:
- LEXICON_NFSN_USERNAME -> Your NFSN username
- LEXICON_NFSN_TOKEN -> Your API Key
- LEXICON_NFSN_DOMAIN -> Domain you want to test with
"""
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class NFSNProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nfsn'
default_domain = 'koupia.xyz'
@property
def domain(self):
_domain = os.environ.get('LEXICON_NFSN_DOMAIN')
return _domain or NFSNProviderTests.default_domain
def _filter_headers(self):
return ['X-NFSN-Authentication']
|
<commit_before># Test for one implementation of the interface
from lexicon.providers.nfsn import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
import os
"""
Some small info about running live tests.
NFSN doesn't have trial accounts, so these tests can only
be run by those with an NFSN account. NFSN also requires
you to have an API key. More info here:
https://members.nearlyfreespeech.net/wiki/API/Introduction
You'll need an account to access that page.
Therefore, the following
parameters must be provided:
- LEXICON_NFSN_USERNAME -> Your NFSN username
- LEXICON_NFSN_TOKEN -> Your API Key
- LEXICON_NFSN_DOMAIN -> Domain you want to test with
"""
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class NFSNProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nfsn'
@property
def domain(self):
_domain = os.environ.get('LEXICON_NFSN_DOMAIN')
if _domain is None:
raise ValueError('LEXICON_NFSN_DOMAIN must be specified.')
return _domain
def _filter_headers(self):
return ['X-NFSN-Authentication']
<commit_msg>Add default NFSN test url<commit_after>
|
# Test for one implementation of the interface
from lexicon.providers.nfsn import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
import os
"""
Some small info about running live tests.
NFSN doesn't have trial accounts, so these tests can only
be run by those with an NFSN account. NFSN also requires
you to have an API key. More info here:
https://members.nearlyfreespeech.net/wiki/API/Introduction
You'll need an account to access that page.
Therefore, the following
parameters must be provided:
- LEXICON_NFSN_USERNAME -> Your NFSN username
- LEXICON_NFSN_TOKEN -> Your API Key
- LEXICON_NFSN_DOMAIN -> Domain you want to test with
"""
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class NFSNProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nfsn'
default_domain = 'koupia.xyz'
@property
def domain(self):
_domain = os.environ.get('LEXICON_NFSN_DOMAIN')
return _domain or NFSNProviderTests.default_domain
def _filter_headers(self):
return ['X-NFSN-Authentication']
|
# Test for one implementation of the interface
from lexicon.providers.nfsn import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
import os
"""
Some small info about running live tests.
NFSN doesn't have trial accounts, so these tests can only
be run by those with an NFSN account. NFSN also requires
you to have an API key. More info here:
https://members.nearlyfreespeech.net/wiki/API/Introduction
You'll need an account to access that page.
Therefore, the following
parameters must be provided:
- LEXICON_NFSN_USERNAME -> Your NFSN username
- LEXICON_NFSN_TOKEN -> Your API Key
- LEXICON_NFSN_DOMAIN -> Domain you want to test with
"""
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class NFSNProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nfsn'
@property
def domain(self):
_domain = os.environ.get('LEXICON_NFSN_DOMAIN')
if _domain is None:
raise ValueError('LEXICON_NFSN_DOMAIN must be specified.')
return _domain
def _filter_headers(self):
return ['X-NFSN-Authentication']
Add default NFSN test url# Test for one implementation of the interface
from lexicon.providers.nfsn import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
import os
"""
Some small info about running live tests.
NFSN doesn't have trial accounts, so these tests can only
be run by those with an NFSN account. NFSN also requires
you to have an API key. More info here:
https://members.nearlyfreespeech.net/wiki/API/Introduction
You'll need an account to access that page.
Therefore, the following
parameters must be provided:
- LEXICON_NFSN_USERNAME -> Your NFSN username
- LEXICON_NFSN_TOKEN -> Your API Key
- LEXICON_NFSN_DOMAIN -> Domain you want to test with
"""
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class NFSNProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nfsn'
default_domain = 'koupia.xyz'
@property
def domain(self):
_domain = os.environ.get('LEXICON_NFSN_DOMAIN')
return _domain or NFSNProviderTests.default_domain
def _filter_headers(self):
return ['X-NFSN-Authentication']
|
<commit_before># Test for one implementation of the interface
from lexicon.providers.nfsn import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
import os
"""
Some small info about running live tests.
NFSN doesn't have trial accounts, so these tests can only
be run by those with an NFSN account. NFSN also requires
you to have an API key. More info here:
https://members.nearlyfreespeech.net/wiki/API/Introduction
You'll need an account to access that page.
Therefore, the following
parameters must be provided:
- LEXICON_NFSN_USERNAME -> Your NFSN username
- LEXICON_NFSN_TOKEN -> Your API Key
- LEXICON_NFSN_DOMAIN -> Domain you want to test with
"""
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class NFSNProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nfsn'
@property
def domain(self):
_domain = os.environ.get('LEXICON_NFSN_DOMAIN')
if _domain is None:
raise ValueError('LEXICON_NFSN_DOMAIN must be specified.')
return _domain
def _filter_headers(self):
return ['X-NFSN-Authentication']
<commit_msg>Add default NFSN test url<commit_after># Test for one implementation of the interface
from lexicon.providers.nfsn import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
import os
"""
Some small info about running live tests.
NFSN doesn't have trial accounts, so these tests can only
be run by those with an NFSN account. NFSN also requires
you to have an API key. More info here:
https://members.nearlyfreespeech.net/wiki/API/Introduction
You'll need an account to access that page.
Therefore, the following
parameters must be provided:
- LEXICON_NFSN_USERNAME -> Your NFSN username
- LEXICON_NFSN_TOKEN -> Your API Key
- LEXICON_NFSN_DOMAIN -> Domain you want to test with
"""
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class NFSNProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nfsn'
default_domain = 'koupia.xyz'
@property
def domain(self):
_domain = os.environ.get('LEXICON_NFSN_DOMAIN')
return _domain or NFSNProviderTests.default_domain
def _filter_headers(self):
return ['X-NFSN-Authentication']
|
78c3589bbb80607321cf2b3e30699cde7df08ed8
|
website/addons/s3/tests/factories.py
|
website/addons/s3/tests/factories.py
|
# -*- coding: utf-8 -*-
"""Factory boy factories for the Box addon."""
import mock
from datetime import datetime
from dateutil.relativedelta import relativedelta
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
|
# -*- coding: utf-8 -*-
"""Factories for the S3 addon."""
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
|
Fix docstring, remove unused import
|
Fix docstring, remove unused import
|
Python
|
apache-2.0
|
SSJohns/osf.io,caseyrollins/osf.io,jnayak1/osf.io,acshi/osf.io,felliott/osf.io,monikagrabowska/osf.io,TomBaxter/osf.io,RomanZWang/osf.io,leb2dg/osf.io,wearpants/osf.io,emetsger/osf.io,acshi/osf.io,chennan47/osf.io,zamattiac/osf.io,mluo613/osf.io,alexschiller/osf.io,chennan47/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,mattclark/osf.io,kch8qx/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,adlius/osf.io,SSJohns/osf.io,sloria/osf.io,leb2dg/osf.io,kwierman/osf.io,zamattiac/osf.io,crcresearch/osf.io,binoculars/osf.io,sloria/osf.io,aaxelb/osf.io,felliott/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,rdhyee/osf.io,erinspace/osf.io,icereval/osf.io,amyshi188/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,kch8qx/osf.io,kch8qx/osf.io,saradbowman/osf.io,samchrisinger/osf.io,abought/osf.io,Nesiehr/osf.io,amyshi188/osf.io,TomHeatwole/osf.io,laurenrevere/osf.io,wearpants/osf.io,jnayak1/osf.io,sloria/osf.io,cslzchen/osf.io,abought/osf.io,kch8qx/osf.io,Nesiehr/osf.io,abought/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,acshi/osf.io,mattclark/osf.io,felliott/osf.io,doublebits/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,zachjanicki/osf.io,acshi/osf.io,crcresearch/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,doublebits/osf.io,zachjanicki/osf.io,SSJohns/osf.io,emetsger/osf.io,cwisecarver/osf.io,chrisseto/osf.io,chennan47/osf.io,hmoco/osf.io,jnayak1/osf.io,chrisseto/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,felliott/osf.io,baylee-d/osf.io,icereval/osf.io,mluke93/osf.io,brianjgeiger/osf.io,rdhyee/osf.io,amyshi188/osf.io,pattisdr/osf.io,caseyrollins/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,emetsger/osf.io,zamattiac/osf.io,alexschiller/osf.io,caneruguz/osf.io,mluo613/osf.io,DanielSBrown/osf.io,laurenrevere/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,RomanZWang/osf.io,monikagrabowska/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,binoculars/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,chrisseto/osf.io,TomBaxter/osf.io,adlius/osf.io,cwisecarver/osf.io,cslzchen/osf.io,adlius/osf.io,acshi/osf.io,asanfilippo7/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,kwierman/osf.io,caseyrollins/osf.io,hmoco/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,mluo613/osf.io,alexschiller/osf.io,crcresearch/osf.io,alexschiller/osf.io,erinspace/osf.io,cwisecarver/osf.io,mluo613/osf.io,cslzchen/osf.io,emetsger/osf.io,doublebits/osf.io,icereval/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,asanfilippo7/osf.io,wearpants/osf.io,baylee-d/osf.io,pattisdr/osf.io,pattisdr/osf.io,mluke93/osf.io,jnayak1/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,kwierman/osf.io,abought/osf.io,SSJohns/osf.io,Nesiehr/osf.io,mluke93/osf.io,TomHeatwole/osf.io,doublebits/osf.io,aaxelb/osf.io,caneruguz/osf.io,asanfilippo7/osf.io,rdhyee/osf.io,doublebits/osf.io,kwierman/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,DanielSBrown/osf.io,amyshi188/osf.io,mfraezz/osf.io,mluke93/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,mfraezz/osf.io,mattclark/osf.io,samchrisinger/osf.io,alexschiller/osf.io,hmoco/osf.io,mluo613/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,saradbowman/osf.io,erinspace/osf.io,binoculars/osf.io,brianjgeiger/osf.io,TomHeatwole/osf.io,adlius/osf.io,kch8qx/osf.io,mfraezz/osf.io
|
# -*- coding: utf-8 -*-
"""Factory boy factories for the Box addon."""
import mock
from datetime import datetime
from dateutil.relativedelta import relativedelta
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
Fix docstring, remove unused import
|
# -*- coding: utf-8 -*-
"""Factories for the S3 addon."""
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
|
<commit_before># -*- coding: utf-8 -*-
"""Factory boy factories for the Box addon."""
import mock
from datetime import datetime
from dateutil.relativedelta import relativedelta
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
<commit_msg>Fix docstring, remove unused import<commit_after>
|
# -*- coding: utf-8 -*-
"""Factories for the S3 addon."""
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
|
# -*- coding: utf-8 -*-
"""Factory boy factories for the Box addon."""
import mock
from datetime import datetime
from dateutil.relativedelta import relativedelta
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
Fix docstring, remove unused import# -*- coding: utf-8 -*-
"""Factories for the S3 addon."""
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
|
<commit_before># -*- coding: utf-8 -*-
"""Factory boy factories for the Box addon."""
import mock
from datetime import datetime
from dateutil.relativedelta import relativedelta
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
<commit_msg>Fix docstring, remove unused import<commit_after># -*- coding: utf-8 -*-
"""Factories for the S3 addon."""
from factory import SubFactory, Sequence
from tests.factories import ModularOdmFactory, UserFactory, ProjectFactory, ExternalAccountFactory
from website.addons.s3.model import (
S3UserSettings,
S3NodeSettings
)
class S3AccountFactory(ExternalAccountFactory):
provider = 's3'
provider_id = Sequence(lambda n: 'id-{0}'.format(n))
oauth_key = Sequence(lambda n: 'key-{0}'.format(n))
oauth_secret = Sequence(lambda n:'secret-{0}'.format(n))
display_name = 'S3 Fake User'
class S3UserSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3UserSettings
owner = SubFactory(UserFactory)
class S3NodeSettingsFactory(ModularOdmFactory):
FACTORY_FOR = S3NodeSettings
owner = SubFactory(ProjectFactory)
user_settings = SubFactory(S3UserSettingsFactory)
bucket = 'mock_bucket'
|
09e0073a2aec6abc32a639fb2791af19e17eed1c
|
test/588-funicular-monorail.py
|
test/588-funicular-monorail.py
|
# way 93671417
assert_has_feature(
16, 10486, 25367, 'transit',
{ 'kind': 'monorail' })
|
# way 93671417
assert_has_feature(
16, 10486, 25367, 'transit',
{ 'kind': 'monorail' })
# relation 6060405
assert_has_feature(
16, 18201, 24705, 'transit',
{ 'kind': 'funicular' })
|
Add test for funicular feature
|
Add test for funicular feature
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
# way 93671417
assert_has_feature(
16, 10486, 25367, 'transit',
{ 'kind': 'monorail' })
Add test for funicular feature
|
# way 93671417
assert_has_feature(
16, 10486, 25367, 'transit',
{ 'kind': 'monorail' })
# relation 6060405
assert_has_feature(
16, 18201, 24705, 'transit',
{ 'kind': 'funicular' })
|
<commit_before># way 93671417
assert_has_feature(
16, 10486, 25367, 'transit',
{ 'kind': 'monorail' })
<commit_msg>Add test for funicular feature<commit_after>
|
# way 93671417
assert_has_feature(
16, 10486, 25367, 'transit',
{ 'kind': 'monorail' })
# relation 6060405
assert_has_feature(
16, 18201, 24705, 'transit',
{ 'kind': 'funicular' })
|
# way 93671417
assert_has_feature(
16, 10486, 25367, 'transit',
{ 'kind': 'monorail' })
Add test for funicular feature# way 93671417
assert_has_feature(
16, 10486, 25367, 'transit',
{ 'kind': 'monorail' })
# relation 6060405
assert_has_feature(
16, 18201, 24705, 'transit',
{ 'kind': 'funicular' })
|
<commit_before># way 93671417
assert_has_feature(
16, 10486, 25367, 'transit',
{ 'kind': 'monorail' })
<commit_msg>Add test for funicular feature<commit_after># way 93671417
assert_has_feature(
16, 10486, 25367, 'transit',
{ 'kind': 'monorail' })
# relation 6060405
assert_has_feature(
16, 18201, 24705, 'transit',
{ 'kind': 'funicular' })
|
293cbd9ac1ad6c8f53e40fa36c3fdce6d9dda7ec
|
ynr/apps/uk_results/views/api.py
|
ynr/apps/uk_results/views/api.py
|
from rest_framework import viewsets
from django_filters import filters, filterset
from api.v09.views import ResultsSetPagination
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.select_related(
"post_election__post", "user"
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filterset_class = ResultSetFilter
|
from rest_framework import viewsets
from django_filters import filters, filterset
from django.db.models import Prefetch
from api.v09.views import ResultsSetPagination
from popolo.models import Membership
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.prefetch_related(
"post_election__post",
"post_election__election",
"user",
Prefetch(
"candidate_results",
CandidateResult.objects.select_related(
"membership__party",
"membership__post",
"membership__person",
"membership__post_election",
"membership__post_election__post",
"membership__post_election__election",
),
),
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filterset_class = ResultSetFilter
|
Speed up results API view
|
Speed up results API view
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
from rest_framework import viewsets
from django_filters import filters, filterset
from api.v09.views import ResultsSetPagination
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.select_related(
"post_election__post", "user"
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filterset_class = ResultSetFilter
Speed up results API view
|
from rest_framework import viewsets
from django_filters import filters, filterset
from django.db.models import Prefetch
from api.v09.views import ResultsSetPagination
from popolo.models import Membership
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.prefetch_related(
"post_election__post",
"post_election__election",
"user",
Prefetch(
"candidate_results",
CandidateResult.objects.select_related(
"membership__party",
"membership__post",
"membership__person",
"membership__post_election",
"membership__post_election__post",
"membership__post_election__election",
),
),
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filterset_class = ResultSetFilter
|
<commit_before>from rest_framework import viewsets
from django_filters import filters, filterset
from api.v09.views import ResultsSetPagination
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.select_related(
"post_election__post", "user"
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filterset_class = ResultSetFilter
<commit_msg>Speed up results API view<commit_after>
|
from rest_framework import viewsets
from django_filters import filters, filterset
from django.db.models import Prefetch
from api.v09.views import ResultsSetPagination
from popolo.models import Membership
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.prefetch_related(
"post_election__post",
"post_election__election",
"user",
Prefetch(
"candidate_results",
CandidateResult.objects.select_related(
"membership__party",
"membership__post",
"membership__person",
"membership__post_election",
"membership__post_election__post",
"membership__post_election__election",
),
),
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filterset_class = ResultSetFilter
|
from rest_framework import viewsets
from django_filters import filters, filterset
from api.v09.views import ResultsSetPagination
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.select_related(
"post_election__post", "user"
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filterset_class = ResultSetFilter
Speed up results API viewfrom rest_framework import viewsets
from django_filters import filters, filterset
from django.db.models import Prefetch
from api.v09.views import ResultsSetPagination
from popolo.models import Membership
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.prefetch_related(
"post_election__post",
"post_election__election",
"user",
Prefetch(
"candidate_results",
CandidateResult.objects.select_related(
"membership__party",
"membership__post",
"membership__person",
"membership__post_election",
"membership__post_election__post",
"membership__post_election__election",
),
),
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filterset_class = ResultSetFilter
|
<commit_before>from rest_framework import viewsets
from django_filters import filters, filterset
from api.v09.views import ResultsSetPagination
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.select_related(
"post_election__post", "user"
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filterset_class = ResultSetFilter
<commit_msg>Speed up results API view<commit_after>from rest_framework import viewsets
from django_filters import filters, filterset
from django.db.models import Prefetch
from api.v09.views import ResultsSetPagination
from popolo.models import Membership
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.prefetch_related(
"post_election__post",
"post_election__election",
"user",
Prefetch(
"candidate_results",
CandidateResult.objects.select_related(
"membership__party",
"membership__post",
"membership__person",
"membership__post_election",
"membership__post_election__post",
"membership__post_election__election",
),
),
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filterset_class = ResultSetFilter
|
f361ee6fb384a3500892a619279e229373a1b35f
|
src/config/svc-monitor/svc_monitor/tests/test_init.py
|
src/config/svc-monitor/svc_monitor/tests/test_init.py
|
import logging
import mock
import unittest
from mock import patch
from svc_monitor.svc_monitor import SvcMonitor
from pysandesh.sandesh_base import Sandesh
class Arguments(object):
def __init__(self):
self.disc_server_ip = None
self.disc_server_port = None
self.collectors = None
self.http_server_port = 0
self.log_local = None
self.log_category = None
self.log_level = None
self.log_file = '/var/log/contrail/svc_monitor.log'
self.use_syslog = False
self.syslog_facility = Sandesh._DEFAULT_SYSLOG_FACILITY
class SvcMonitorInitTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('pysandesh.sandesh_base.Sandesh')
@patch.object(SvcMonitor, '_cassandra_init')
def test_init_monitor(self, sandesh_mock, cassandra_init_mock):
logging.debug("init")
self._api_client = mock.Mock()
arguments = Arguments()
with patch.object(logging.handlers, 'RotatingFileHandler'):
self._svc_monitor = SvcMonitor(self._api_client, arguments)
|
import logging
import mock
import unittest
from mock import patch
from svc_monitor.svc_monitor import SvcMonitor
from pysandesh.sandesh_base import Sandesh
class Arguments(object):
def __init__(self):
self.disc_server_ip = None
self.disc_server_port = None
self.collectors = None
self.http_server_port = 0
self.log_local = None
self.log_category = None
self.log_level = None
self.log_file = '/var/log/contrail/svc_monitor.log'
self.use_syslog = False
self.syslog_facility = Sandesh._DEFAULT_SYSLOG_FACILITY
self.cluster_id = None
class SvcMonitorInitTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('pysandesh.sandesh_base.Sandesh')
@patch.object(SvcMonitor, '_cassandra_init')
def test_init_monitor(self, sandesh_mock, cassandra_init_mock):
logging.debug("init")
self._api_client = mock.Mock()
arguments = Arguments()
with patch.object(logging.handlers, 'RotatingFileHandler'):
self._svc_monitor = SvcMonitor(self._api_client, arguments)
|
Fix svc_monitor tests by adding a missing arg
|
Fix svc_monitor tests by adding a missing arg
This commit d318b73fbab8f0c200c71adf642968a624a7db29
introduced a cluster_id arg but this arg is not
initialized in the test file.
Change-Id: I770e5f2c949afd408b8906439e711e7f619afa57
|
Python
|
apache-2.0
|
hthompson6/contrail-controller,tcpcloud/contrail-controller,rombie/contrail-controller,Juniper/contrail-dev-controller,vpramo/contrail-controller,facetothefate/contrail-controller,eonpatapon/contrail-controller,facetothefate/contrail-controller,numansiddique/contrail-controller,tcpcloud/contrail-controller,tcpcloud/contrail-controller,srajag/contrail-controller,DreamLab/contrail-controller,sajuptpm/contrail-controller,sajuptpm/contrail-controller,vpramo/contrail-controller,eonpatapon/contrail-controller,cloudwatt/contrail-controller,sajuptpm/contrail-controller,rombie/contrail-controller,Juniper/contrail-dev-controller,eonpatapon/contrail-controller,varunarya10/contrail-controller,nischalsheth/contrail-controller,hthompson6/contrail-controller,DreamLab/contrail-controller,nischalsheth/contrail-controller,cloudwatt/contrail-controller,facetothefate/contrail-controller,srajag/contrail-controller,cloudwatt/contrail-controller,codilime/contrail-controller,varunarya10/contrail-controller,eonpatapon/contrail-controller,codilime/contrail-controller,hthompson6/contrail-controller,vmahuli/contrail-controller,Juniper/contrail-dev-controller,codilime/contrail-controller,nischalsheth/contrail-controller,reiaaoyama/contrail-controller,nischalsheth/contrail-controller,hthompson6/contrail-controller,tcpcloud/contrail-controller,facetothefate/contrail-controller,vpramo/contrail-controller,numansiddique/contrail-controller,rombie/contrail-controller,nischalsheth/contrail-controller,rombie/contrail-controller,tcpcloud/contrail-controller,vmahuli/contrail-controller,reiaaoyama/contrail-controller,hthompson6/contrail-controller,nischalsheth/contrail-controller,sajuptpm/contrail-controller,varunarya10/contrail-controller,nischalsheth/contrail-controller,numansiddique/contrail-controller,Juniper/contrail-dev-controller,eonpatapon/contrail-controller,vpramo/contrail-controller,cloudwatt/contrail-controller,rombie/contrail-controller,srajag/contrail-controller,cloudwatt/contrail-controller,varunarya10/contrail-controller,varunarya10/contrail-controller,codilime/contrail-controller,DreamLab/contrail-controller,reiaaoyama/contrail-controller,tcpcloud/contrail-controller,reiaaoyama/contrail-controller,DreamLab/contrail-controller,numansiddique/contrail-controller,eonpatapon/contrail-controller,rombie/contrail-controller,vpramo/contrail-controller,eonpatapon/contrail-controller,numansiddique/contrail-controller,reiaaoyama/contrail-controller,codilime/contrail-controller,vmahuli/contrail-controller,vmahuli/contrail-controller,vmahuli/contrail-controller,srajag/contrail-controller,facetothefate/contrail-controller,DreamLab/contrail-controller,codilime/contrail-controller,nischalsheth/contrail-controller,srajag/contrail-controller,sajuptpm/contrail-controller,sajuptpm/contrail-controller,Juniper/contrail-dev-controller,rombie/contrail-controller
|
import logging
import mock
import unittest
from mock import patch
from svc_monitor.svc_monitor import SvcMonitor
from pysandesh.sandesh_base import Sandesh
class Arguments(object):
def __init__(self):
self.disc_server_ip = None
self.disc_server_port = None
self.collectors = None
self.http_server_port = 0
self.log_local = None
self.log_category = None
self.log_level = None
self.log_file = '/var/log/contrail/svc_monitor.log'
self.use_syslog = False
self.syslog_facility = Sandesh._DEFAULT_SYSLOG_FACILITY
class SvcMonitorInitTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('pysandesh.sandesh_base.Sandesh')
@patch.object(SvcMonitor, '_cassandra_init')
def test_init_monitor(self, sandesh_mock, cassandra_init_mock):
logging.debug("init")
self._api_client = mock.Mock()
arguments = Arguments()
with patch.object(logging.handlers, 'RotatingFileHandler'):
self._svc_monitor = SvcMonitor(self._api_client, arguments)
Fix svc_monitor tests by adding a missing arg
This commit d318b73fbab8f0c200c71adf642968a624a7db29
introduced a cluster_id arg but this arg is not
initialized in the test file.
Change-Id: I770e5f2c949afd408b8906439e711e7f619afa57
|
import logging
import mock
import unittest
from mock import patch
from svc_monitor.svc_monitor import SvcMonitor
from pysandesh.sandesh_base import Sandesh
class Arguments(object):
def __init__(self):
self.disc_server_ip = None
self.disc_server_port = None
self.collectors = None
self.http_server_port = 0
self.log_local = None
self.log_category = None
self.log_level = None
self.log_file = '/var/log/contrail/svc_monitor.log'
self.use_syslog = False
self.syslog_facility = Sandesh._DEFAULT_SYSLOG_FACILITY
self.cluster_id = None
class SvcMonitorInitTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('pysandesh.sandesh_base.Sandesh')
@patch.object(SvcMonitor, '_cassandra_init')
def test_init_monitor(self, sandesh_mock, cassandra_init_mock):
logging.debug("init")
self._api_client = mock.Mock()
arguments = Arguments()
with patch.object(logging.handlers, 'RotatingFileHandler'):
self._svc_monitor = SvcMonitor(self._api_client, arguments)
|
<commit_before>import logging
import mock
import unittest
from mock import patch
from svc_monitor.svc_monitor import SvcMonitor
from pysandesh.sandesh_base import Sandesh
class Arguments(object):
def __init__(self):
self.disc_server_ip = None
self.disc_server_port = None
self.collectors = None
self.http_server_port = 0
self.log_local = None
self.log_category = None
self.log_level = None
self.log_file = '/var/log/contrail/svc_monitor.log'
self.use_syslog = False
self.syslog_facility = Sandesh._DEFAULT_SYSLOG_FACILITY
class SvcMonitorInitTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('pysandesh.sandesh_base.Sandesh')
@patch.object(SvcMonitor, '_cassandra_init')
def test_init_monitor(self, sandesh_mock, cassandra_init_mock):
logging.debug("init")
self._api_client = mock.Mock()
arguments = Arguments()
with patch.object(logging.handlers, 'RotatingFileHandler'):
self._svc_monitor = SvcMonitor(self._api_client, arguments)
<commit_msg>Fix svc_monitor tests by adding a missing arg
This commit d318b73fbab8f0c200c71adf642968a624a7db29
introduced a cluster_id arg but this arg is not
initialized in the test file.
Change-Id: I770e5f2c949afd408b8906439e711e7f619afa57<commit_after>
|
import logging
import mock
import unittest
from mock import patch
from svc_monitor.svc_monitor import SvcMonitor
from pysandesh.sandesh_base import Sandesh
class Arguments(object):
def __init__(self):
self.disc_server_ip = None
self.disc_server_port = None
self.collectors = None
self.http_server_port = 0
self.log_local = None
self.log_category = None
self.log_level = None
self.log_file = '/var/log/contrail/svc_monitor.log'
self.use_syslog = False
self.syslog_facility = Sandesh._DEFAULT_SYSLOG_FACILITY
self.cluster_id = None
class SvcMonitorInitTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('pysandesh.sandesh_base.Sandesh')
@patch.object(SvcMonitor, '_cassandra_init')
def test_init_monitor(self, sandesh_mock, cassandra_init_mock):
logging.debug("init")
self._api_client = mock.Mock()
arguments = Arguments()
with patch.object(logging.handlers, 'RotatingFileHandler'):
self._svc_monitor = SvcMonitor(self._api_client, arguments)
|
import logging
import mock
import unittest
from mock import patch
from svc_monitor.svc_monitor import SvcMonitor
from pysandesh.sandesh_base import Sandesh
class Arguments(object):
def __init__(self):
self.disc_server_ip = None
self.disc_server_port = None
self.collectors = None
self.http_server_port = 0
self.log_local = None
self.log_category = None
self.log_level = None
self.log_file = '/var/log/contrail/svc_monitor.log'
self.use_syslog = False
self.syslog_facility = Sandesh._DEFAULT_SYSLOG_FACILITY
class SvcMonitorInitTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('pysandesh.sandesh_base.Sandesh')
@patch.object(SvcMonitor, '_cassandra_init')
def test_init_monitor(self, sandesh_mock, cassandra_init_mock):
logging.debug("init")
self._api_client = mock.Mock()
arguments = Arguments()
with patch.object(logging.handlers, 'RotatingFileHandler'):
self._svc_monitor = SvcMonitor(self._api_client, arguments)
Fix svc_monitor tests by adding a missing arg
This commit d318b73fbab8f0c200c71adf642968a624a7db29
introduced a cluster_id arg but this arg is not
initialized in the test file.
Change-Id: I770e5f2c949afd408b8906439e711e7f619afa57import logging
import mock
import unittest
from mock import patch
from svc_monitor.svc_monitor import SvcMonitor
from pysandesh.sandesh_base import Sandesh
class Arguments(object):
def __init__(self):
self.disc_server_ip = None
self.disc_server_port = None
self.collectors = None
self.http_server_port = 0
self.log_local = None
self.log_category = None
self.log_level = None
self.log_file = '/var/log/contrail/svc_monitor.log'
self.use_syslog = False
self.syslog_facility = Sandesh._DEFAULT_SYSLOG_FACILITY
self.cluster_id = None
class SvcMonitorInitTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('pysandesh.sandesh_base.Sandesh')
@patch.object(SvcMonitor, '_cassandra_init')
def test_init_monitor(self, sandesh_mock, cassandra_init_mock):
logging.debug("init")
self._api_client = mock.Mock()
arguments = Arguments()
with patch.object(logging.handlers, 'RotatingFileHandler'):
self._svc_monitor = SvcMonitor(self._api_client, arguments)
|
<commit_before>import logging
import mock
import unittest
from mock import patch
from svc_monitor.svc_monitor import SvcMonitor
from pysandesh.sandesh_base import Sandesh
class Arguments(object):
def __init__(self):
self.disc_server_ip = None
self.disc_server_port = None
self.collectors = None
self.http_server_port = 0
self.log_local = None
self.log_category = None
self.log_level = None
self.log_file = '/var/log/contrail/svc_monitor.log'
self.use_syslog = False
self.syslog_facility = Sandesh._DEFAULT_SYSLOG_FACILITY
class SvcMonitorInitTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('pysandesh.sandesh_base.Sandesh')
@patch.object(SvcMonitor, '_cassandra_init')
def test_init_monitor(self, sandesh_mock, cassandra_init_mock):
logging.debug("init")
self._api_client = mock.Mock()
arguments = Arguments()
with patch.object(logging.handlers, 'RotatingFileHandler'):
self._svc_monitor = SvcMonitor(self._api_client, arguments)
<commit_msg>Fix svc_monitor tests by adding a missing arg
This commit d318b73fbab8f0c200c71adf642968a624a7db29
introduced a cluster_id arg but this arg is not
initialized in the test file.
Change-Id: I770e5f2c949afd408b8906439e711e7f619afa57<commit_after>import logging
import mock
import unittest
from mock import patch
from svc_monitor.svc_monitor import SvcMonitor
from pysandesh.sandesh_base import Sandesh
class Arguments(object):
def __init__(self):
self.disc_server_ip = None
self.disc_server_port = None
self.collectors = None
self.http_server_port = 0
self.log_local = None
self.log_category = None
self.log_level = None
self.log_file = '/var/log/contrail/svc_monitor.log'
self.use_syslog = False
self.syslog_facility = Sandesh._DEFAULT_SYSLOG_FACILITY
self.cluster_id = None
class SvcMonitorInitTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('pysandesh.sandesh_base.Sandesh')
@patch.object(SvcMonitor, '_cassandra_init')
def test_init_monitor(self, sandesh_mock, cassandra_init_mock):
logging.debug("init")
self._api_client = mock.Mock()
arguments = Arguments()
with patch.object(logging.handlers, 'RotatingFileHandler'):
self._svc_monitor = SvcMonitor(self._api_client, arguments)
|
9da50045cc9d67df8d8d075a6e2a2dc7e9f137ee
|
tsa/data/sb5b/tweets.py
|
tsa/data/sb5b/tweets.py
|
#!/usr/bin/env python
import os
from tsa.lib import tabular, html
xlsx_filepath = '%s/ohio/sb5-b.xlsx' % os.getenv('CORPORA', '.')
label_keys = ['For', 'Against', 'Neutral', 'Broken Link', 'Not Applicable']
def read(limit=None):
'''Yields dicts with at least 'Labels' and 'Tweet' fields.'''
for row in tabular.read_xlsx(xlsx_filepath, limit=limit):
for label_key in label_keys:
row[label_key] = bool(row[label_key])
row['Labels'] = [label_key for label_key in label_keys if row[label_key]]
row['Label'] = (row['Labels'] + ['NA'])[0]
row['Tweet'] = html.unescape(row['Tweet'])
yield row
|
#!/usr/bin/env python
import os
from tsa.lib import tabular, html
import logging
logger = logging.getLogger(__name__)
xlsx_filepath = '%s/ohio/sb5-b.xlsx' % os.getenv('CORPORA', '.')
label_keys = ['For', 'Against', 'Neutral', 'Broken Link', 'Not Applicable']
def read(limit=None):
'''Yields dicts with at least 'Labels' and 'Tweet' fields.'''
for row in tabular.read_xlsx(xlsx_filepath, limit=limit):
for label_key in label_keys:
row[label_key] = bool(row[label_key])
row['Labels'] = [label_key for label_key in label_keys if row[label_key]]
row['Label'] = (row['Labels'] + ['NA'])[0]
row['Tweet'] = html.unescape(row['Tweet'])
yield row
def read_cached(limit=None):
import cPickle as pickle
pickle_filepath = '%s.pickled-%s' % (xlsx_filepath, limit or 'all')
if os.path.exists(pickle_filepath):
logger.info('Loading pickled sb5b tweets file from %s', pickle_filepath)
pickle_file = open(pickle_filepath, 'rb')
for item in pickle.load(pickle_file):
yield item
else:
logger.info('Reading fresh sb5b tweets')
items = list(read(limit=limit))
logger.info('Pickling sb5b tweets to %s', pickle_filepath)
pickle_file = open(pickle_filepath, 'wb')
pickle.dump(items, pickle_file)
for item in items:
yield item
|
Add specific iterable-like pickling handler for sb5b tweet data
|
Add specific iterable-like pickling handler for sb5b tweet data
|
Python
|
mit
|
chbrown/tsa,chbrown/tsa,chbrown/tsa
|
#!/usr/bin/env python
import os
from tsa.lib import tabular, html
xlsx_filepath = '%s/ohio/sb5-b.xlsx' % os.getenv('CORPORA', '.')
label_keys = ['For', 'Against', 'Neutral', 'Broken Link', 'Not Applicable']
def read(limit=None):
'''Yields dicts with at least 'Labels' and 'Tweet' fields.'''
for row in tabular.read_xlsx(xlsx_filepath, limit=limit):
for label_key in label_keys:
row[label_key] = bool(row[label_key])
row['Labels'] = [label_key for label_key in label_keys if row[label_key]]
row['Label'] = (row['Labels'] + ['NA'])[0]
row['Tweet'] = html.unescape(row['Tweet'])
yield row
Add specific iterable-like pickling handler for sb5b tweet data
|
#!/usr/bin/env python
import os
from tsa.lib import tabular, html
import logging
logger = logging.getLogger(__name__)
xlsx_filepath = '%s/ohio/sb5-b.xlsx' % os.getenv('CORPORA', '.')
label_keys = ['For', 'Against', 'Neutral', 'Broken Link', 'Not Applicable']
def read(limit=None):
'''Yields dicts with at least 'Labels' and 'Tweet' fields.'''
for row in tabular.read_xlsx(xlsx_filepath, limit=limit):
for label_key in label_keys:
row[label_key] = bool(row[label_key])
row['Labels'] = [label_key for label_key in label_keys if row[label_key]]
row['Label'] = (row['Labels'] + ['NA'])[0]
row['Tweet'] = html.unescape(row['Tweet'])
yield row
def read_cached(limit=None):
import cPickle as pickle
pickle_filepath = '%s.pickled-%s' % (xlsx_filepath, limit or 'all')
if os.path.exists(pickle_filepath):
logger.info('Loading pickled sb5b tweets file from %s', pickle_filepath)
pickle_file = open(pickle_filepath, 'rb')
for item in pickle.load(pickle_file):
yield item
else:
logger.info('Reading fresh sb5b tweets')
items = list(read(limit=limit))
logger.info('Pickling sb5b tweets to %s', pickle_filepath)
pickle_file = open(pickle_filepath, 'wb')
pickle.dump(items, pickle_file)
for item in items:
yield item
|
<commit_before>#!/usr/bin/env python
import os
from tsa.lib import tabular, html
xlsx_filepath = '%s/ohio/sb5-b.xlsx' % os.getenv('CORPORA', '.')
label_keys = ['For', 'Against', 'Neutral', 'Broken Link', 'Not Applicable']
def read(limit=None):
'''Yields dicts with at least 'Labels' and 'Tweet' fields.'''
for row in tabular.read_xlsx(xlsx_filepath, limit=limit):
for label_key in label_keys:
row[label_key] = bool(row[label_key])
row['Labels'] = [label_key for label_key in label_keys if row[label_key]]
row['Label'] = (row['Labels'] + ['NA'])[0]
row['Tweet'] = html.unescape(row['Tweet'])
yield row
<commit_msg>Add specific iterable-like pickling handler for sb5b tweet data<commit_after>
|
#!/usr/bin/env python
import os
from tsa.lib import tabular, html
import logging
logger = logging.getLogger(__name__)
xlsx_filepath = '%s/ohio/sb5-b.xlsx' % os.getenv('CORPORA', '.')
label_keys = ['For', 'Against', 'Neutral', 'Broken Link', 'Not Applicable']
def read(limit=None):
'''Yields dicts with at least 'Labels' and 'Tweet' fields.'''
for row in tabular.read_xlsx(xlsx_filepath, limit=limit):
for label_key in label_keys:
row[label_key] = bool(row[label_key])
row['Labels'] = [label_key for label_key in label_keys if row[label_key]]
row['Label'] = (row['Labels'] + ['NA'])[0]
row['Tweet'] = html.unescape(row['Tweet'])
yield row
def read_cached(limit=None):
import cPickle as pickle
pickle_filepath = '%s.pickled-%s' % (xlsx_filepath, limit or 'all')
if os.path.exists(pickle_filepath):
logger.info('Loading pickled sb5b tweets file from %s', pickle_filepath)
pickle_file = open(pickle_filepath, 'rb')
for item in pickle.load(pickle_file):
yield item
else:
logger.info('Reading fresh sb5b tweets')
items = list(read(limit=limit))
logger.info('Pickling sb5b tweets to %s', pickle_filepath)
pickle_file = open(pickle_filepath, 'wb')
pickle.dump(items, pickle_file)
for item in items:
yield item
|
#!/usr/bin/env python
import os
from tsa.lib import tabular, html
xlsx_filepath = '%s/ohio/sb5-b.xlsx' % os.getenv('CORPORA', '.')
label_keys = ['For', 'Against', 'Neutral', 'Broken Link', 'Not Applicable']
def read(limit=None):
'''Yields dicts with at least 'Labels' and 'Tweet' fields.'''
for row in tabular.read_xlsx(xlsx_filepath, limit=limit):
for label_key in label_keys:
row[label_key] = bool(row[label_key])
row['Labels'] = [label_key for label_key in label_keys if row[label_key]]
row['Label'] = (row['Labels'] + ['NA'])[0]
row['Tweet'] = html.unescape(row['Tweet'])
yield row
Add specific iterable-like pickling handler for sb5b tweet data#!/usr/bin/env python
import os
from tsa.lib import tabular, html
import logging
logger = logging.getLogger(__name__)
xlsx_filepath = '%s/ohio/sb5-b.xlsx' % os.getenv('CORPORA', '.')
label_keys = ['For', 'Against', 'Neutral', 'Broken Link', 'Not Applicable']
def read(limit=None):
'''Yields dicts with at least 'Labels' and 'Tweet' fields.'''
for row in tabular.read_xlsx(xlsx_filepath, limit=limit):
for label_key in label_keys:
row[label_key] = bool(row[label_key])
row['Labels'] = [label_key for label_key in label_keys if row[label_key]]
row['Label'] = (row['Labels'] + ['NA'])[0]
row['Tweet'] = html.unescape(row['Tweet'])
yield row
def read_cached(limit=None):
import cPickle as pickle
pickle_filepath = '%s.pickled-%s' % (xlsx_filepath, limit or 'all')
if os.path.exists(pickle_filepath):
logger.info('Loading pickled sb5b tweets file from %s', pickle_filepath)
pickle_file = open(pickle_filepath, 'rb')
for item in pickle.load(pickle_file):
yield item
else:
logger.info('Reading fresh sb5b tweets')
items = list(read(limit=limit))
logger.info('Pickling sb5b tweets to %s', pickle_filepath)
pickle_file = open(pickle_filepath, 'wb')
pickle.dump(items, pickle_file)
for item in items:
yield item
|
<commit_before>#!/usr/bin/env python
import os
from tsa.lib import tabular, html
xlsx_filepath = '%s/ohio/sb5-b.xlsx' % os.getenv('CORPORA', '.')
label_keys = ['For', 'Against', 'Neutral', 'Broken Link', 'Not Applicable']
def read(limit=None):
'''Yields dicts with at least 'Labels' and 'Tweet' fields.'''
for row in tabular.read_xlsx(xlsx_filepath, limit=limit):
for label_key in label_keys:
row[label_key] = bool(row[label_key])
row['Labels'] = [label_key for label_key in label_keys if row[label_key]]
row['Label'] = (row['Labels'] + ['NA'])[0]
row['Tweet'] = html.unescape(row['Tweet'])
yield row
<commit_msg>Add specific iterable-like pickling handler for sb5b tweet data<commit_after>#!/usr/bin/env python
import os
from tsa.lib import tabular, html
import logging
logger = logging.getLogger(__name__)
xlsx_filepath = '%s/ohio/sb5-b.xlsx' % os.getenv('CORPORA', '.')
label_keys = ['For', 'Against', 'Neutral', 'Broken Link', 'Not Applicable']
def read(limit=None):
'''Yields dicts with at least 'Labels' and 'Tweet' fields.'''
for row in tabular.read_xlsx(xlsx_filepath, limit=limit):
for label_key in label_keys:
row[label_key] = bool(row[label_key])
row['Labels'] = [label_key for label_key in label_keys if row[label_key]]
row['Label'] = (row['Labels'] + ['NA'])[0]
row['Tweet'] = html.unescape(row['Tweet'])
yield row
def read_cached(limit=None):
import cPickle as pickle
pickle_filepath = '%s.pickled-%s' % (xlsx_filepath, limit or 'all')
if os.path.exists(pickle_filepath):
logger.info('Loading pickled sb5b tweets file from %s', pickle_filepath)
pickle_file = open(pickle_filepath, 'rb')
for item in pickle.load(pickle_file):
yield item
else:
logger.info('Reading fresh sb5b tweets')
items = list(read(limit=limit))
logger.info('Pickling sb5b tweets to %s', pickle_filepath)
pickle_file = open(pickle_filepath, 'wb')
pickle.dump(items, pickle_file)
for item in items:
yield item
|
c916ea93fc4bcd0383ae7a95ae73f2418e122e1f
|
Orange/tests/__init__.py
|
Orange/tests/__init__.py
|
import os
import unittest
def suite():
test_dir = os.path.dirname(__file__)
return unittest.TestLoader().discover(test_dir, )
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import os
import unittest
from Orange.widgets.tests import test_settings, test_setting_provider
def suite():
test_dir = os.path.dirname(__file__)
return unittest.TestSuite([
unittest.TestLoader().discover(test_dir),
unittest.TestLoader().loadTestsFromModule(test_settings),
unittest.TestLoader().loadTestsFromModule(test_setting_provider),
])
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Test settings when setup.py test is run.
|
Test settings when setup.py test is run.
|
Python
|
bsd-2-clause
|
marinkaz/orange3,marinkaz/orange3,qPCR4vir/orange3,qusp/orange3,cheral/orange3,kwikadi/orange3,cheral/orange3,cheral/orange3,marinkaz/orange3,marinkaz/orange3,kwikadi/orange3,qusp/orange3,qPCR4vir/orange3,kwikadi/orange3,kwikadi/orange3,qPCR4vir/orange3,kwikadi/orange3,kwikadi/orange3,qPCR4vir/orange3,marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,qusp/orange3,qusp/orange3,marinkaz/orange3,cheral/orange3,cheral/orange3,qPCR4vir/orange3
|
import os
import unittest
def suite():
test_dir = os.path.dirname(__file__)
return unittest.TestLoader().discover(test_dir, )
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
Test settings when setup.py test is run.
|
import os
import unittest
from Orange.widgets.tests import test_settings, test_setting_provider
def suite():
test_dir = os.path.dirname(__file__)
return unittest.TestSuite([
unittest.TestLoader().discover(test_dir),
unittest.TestLoader().loadTestsFromModule(test_settings),
unittest.TestLoader().loadTestsFromModule(test_setting_provider),
])
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
<commit_before>import os
import unittest
def suite():
test_dir = os.path.dirname(__file__)
return unittest.TestLoader().discover(test_dir, )
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
<commit_msg>Test settings when setup.py test is run.<commit_after>
|
import os
import unittest
from Orange.widgets.tests import test_settings, test_setting_provider
def suite():
test_dir = os.path.dirname(__file__)
return unittest.TestSuite([
unittest.TestLoader().discover(test_dir),
unittest.TestLoader().loadTestsFromModule(test_settings),
unittest.TestLoader().loadTestsFromModule(test_setting_provider),
])
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import os
import unittest
def suite():
test_dir = os.path.dirname(__file__)
return unittest.TestLoader().discover(test_dir, )
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
Test settings when setup.py test is run.import os
import unittest
from Orange.widgets.tests import test_settings, test_setting_provider
def suite():
test_dir = os.path.dirname(__file__)
return unittest.TestSuite([
unittest.TestLoader().discover(test_dir),
unittest.TestLoader().loadTestsFromModule(test_settings),
unittest.TestLoader().loadTestsFromModule(test_setting_provider),
])
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
<commit_before>import os
import unittest
def suite():
test_dir = os.path.dirname(__file__)
return unittest.TestLoader().discover(test_dir, )
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
<commit_msg>Test settings when setup.py test is run.<commit_after>import os
import unittest
from Orange.widgets.tests import test_settings, test_setting_provider
def suite():
test_dir = os.path.dirname(__file__)
return unittest.TestSuite([
unittest.TestLoader().discover(test_dir),
unittest.TestLoader().loadTestsFromModule(test_settings),
unittest.TestLoader().loadTestsFromModule(test_setting_provider),
])
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
f7d3fa716cd73c5a066aa0e40c337b50880befea
|
lc005_longest_palindromic_substring.py
|
lc005_longest_palindromic_substring.py
|
"""Leetcode 5. Longest Palindromic Substring
Medium
Given a string s, find the longest palindromic substring in s.
You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad'
print(Solution().longestPalindrome(s))
s = 'cbbd'
print(Solution().longestPalindrome(s))
if __name__ == '__main__':
main()
|
"""Leetcode 5. Longest Palindromic Substring
Medium
Given a string s, find the longest palindromic substring in s.
You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
class SolutionNaive(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
Time limit exceeded.
"""
n = len(s)
max_len = 1
max_i = 0
max_j = 0
for i in range(n):
for j in range(i + 1, n):
if s[i:(j + 1)] == s[i:(j + 1)][::-1]:
ij_len = j - i + 1
if ij_len > max_len:
max_len = ij_len
max_i = i
max_j = j
else:
break
return s[max_i:(max_j + 1)]
class SolutionDP(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad' # Ans: bab.
print(SolutionNaive().longestPalindrome(s))
s = 'cbbd' # Ans: bb.
print(SolutionNaive().longestPalindrome(s))
if __name__ == '__main__':
main()
|
Complete naive longest palindromic substring
|
Complete naive longest palindromic substring
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
"""Leetcode 5. Longest Palindromic Substring
Medium
Given a string s, find the longest palindromic substring in s.
You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad'
print(Solution().longestPalindrome(s))
s = 'cbbd'
print(Solution().longestPalindrome(s))
if __name__ == '__main__':
main()
Complete naive longest palindromic substring
|
"""Leetcode 5. Longest Palindromic Substring
Medium
Given a string s, find the longest palindromic substring in s.
You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
class SolutionNaive(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
Time limit exceeded.
"""
n = len(s)
max_len = 1
max_i = 0
max_j = 0
for i in range(n):
for j in range(i + 1, n):
if s[i:(j + 1)] == s[i:(j + 1)][::-1]:
ij_len = j - i + 1
if ij_len > max_len:
max_len = ij_len
max_i = i
max_j = j
else:
break
return s[max_i:(max_j + 1)]
class SolutionDP(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad' # Ans: bab.
print(SolutionNaive().longestPalindrome(s))
s = 'cbbd' # Ans: bb.
print(SolutionNaive().longestPalindrome(s))
if __name__ == '__main__':
main()
|
<commit_before>"""Leetcode 5. Longest Palindromic Substring
Medium
Given a string s, find the longest palindromic substring in s.
You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad'
print(Solution().longestPalindrome(s))
s = 'cbbd'
print(Solution().longestPalindrome(s))
if __name__ == '__main__':
main()
<commit_msg>Complete naive longest palindromic substring<commit_after>
|
"""Leetcode 5. Longest Palindromic Substring
Medium
Given a string s, find the longest palindromic substring in s.
You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
class SolutionNaive(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
Time limit exceeded.
"""
n = len(s)
max_len = 1
max_i = 0
max_j = 0
for i in range(n):
for j in range(i + 1, n):
if s[i:(j + 1)] == s[i:(j + 1)][::-1]:
ij_len = j - i + 1
if ij_len > max_len:
max_len = ij_len
max_i = i
max_j = j
else:
break
return s[max_i:(max_j + 1)]
class SolutionDP(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad' # Ans: bab.
print(SolutionNaive().longestPalindrome(s))
s = 'cbbd' # Ans: bb.
print(SolutionNaive().longestPalindrome(s))
if __name__ == '__main__':
main()
|
"""Leetcode 5. Longest Palindromic Substring
Medium
Given a string s, find the longest palindromic substring in s.
You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad'
print(Solution().longestPalindrome(s))
s = 'cbbd'
print(Solution().longestPalindrome(s))
if __name__ == '__main__':
main()
Complete naive longest palindromic substring"""Leetcode 5. Longest Palindromic Substring
Medium
Given a string s, find the longest palindromic substring in s.
You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
class SolutionNaive(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
Time limit exceeded.
"""
n = len(s)
max_len = 1
max_i = 0
max_j = 0
for i in range(n):
for j in range(i + 1, n):
if s[i:(j + 1)] == s[i:(j + 1)][::-1]:
ij_len = j - i + 1
if ij_len > max_len:
max_len = ij_len
max_i = i
max_j = j
else:
break
return s[max_i:(max_j + 1)]
class SolutionDP(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad' # Ans: bab.
print(SolutionNaive().longestPalindrome(s))
s = 'cbbd' # Ans: bb.
print(SolutionNaive().longestPalindrome(s))
if __name__ == '__main__':
main()
|
<commit_before>"""Leetcode 5. Longest Palindromic Substring
Medium
Given a string s, find the longest palindromic substring in s.
You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad'
print(Solution().longestPalindrome(s))
s = 'cbbd'
print(Solution().longestPalindrome(s))
if __name__ == '__main__':
main()
<commit_msg>Complete naive longest palindromic substring<commit_after>"""Leetcode 5. Longest Palindromic Substring
Medium
Given a string s, find the longest palindromic substring in s.
You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
class SolutionNaive(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
Time limit exceeded.
"""
n = len(s)
max_len = 1
max_i = 0
max_j = 0
for i in range(n):
for j in range(i + 1, n):
if s[i:(j + 1)] == s[i:(j + 1)][::-1]:
ij_len = j - i + 1
if ij_len > max_len:
max_len = ij_len
max_i = i
max_j = j
else:
break
return s[max_i:(max_j + 1)]
class SolutionDP(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad' # Ans: bab.
print(SolutionNaive().longestPalindrome(s))
s = 'cbbd' # Ans: bb.
print(SolutionNaive().longestPalindrome(s))
if __name__ == '__main__':
main()
|
4923c2e25cc7547e3b1e1b0ade35a03a931e3f84
|
core/management/commands/run_urlscript.py
|
core/management/commands/run_urlscript.py
|
import urllib
import datetime
import multiprocessing
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from core.models import URL, Cron
def request_url(url):
urllib.urlopen("http://{0}{1}".format(
Site.objects.get_current().domain,
reverse("run_fn", kwargs={"slug": url.slug})
))
class Command(BaseCommand):
help = "Run the url scripts"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = int(now.strftime("%s")) - now.second
mins_passed = int((curr_time - today) / 60.0)
intervals = Cron.objects.filter(interval__lte=mins_passed)\
.values_list('interval', flat=True).\
order_by('interval').distinct()
request = ""
for interval in intervals:
if mins_passed % interval == 0 or settings.DEBUG:
for cron in Cron.objects.filter(interval=interval):
url = cron.url
pool.apply_async(request_url, (url, ))
pool.close()
pool.join()
|
try:
from urllib.request import urlopen
except ImportError:
from urllib import urlopen
import datetime
import multiprocessing
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from core.models import URL, Cron
def request_url(url):
urlopen("http://{0}{1}".format(
Site.objects.get_current().domain,
reverse("run_fn", kwargs={"slug": url.slug})
))
class Command(BaseCommand):
help = "Run the url scripts"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = int(now.strftime("%s")) - now.second
mins_passed = int((curr_time - today) / 60.0)
intervals = Cron.objects.filter(interval__lte=mins_passed)\
.values_list('interval', flat=True).\
order_by('interval').distinct()
for interval in intervals:
if mins_passed % interval == 0 or settings.DEBUG:
for cron in Cron.objects.filter(interval=interval):
url = cron.url
pool.apply_async(request_url, (url, ))
pool.close()
pool.join()
|
Fix a python3 import .
|
Fix a python3 import .
|
Python
|
mit
|
theju/urlscript
|
import urllib
import datetime
import multiprocessing
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from core.models import URL, Cron
def request_url(url):
urllib.urlopen("http://{0}{1}".format(
Site.objects.get_current().domain,
reverse("run_fn", kwargs={"slug": url.slug})
))
class Command(BaseCommand):
help = "Run the url scripts"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = int(now.strftime("%s")) - now.second
mins_passed = int((curr_time - today) / 60.0)
intervals = Cron.objects.filter(interval__lte=mins_passed)\
.values_list('interval', flat=True).\
order_by('interval').distinct()
request = ""
for interval in intervals:
if mins_passed % interval == 0 or settings.DEBUG:
for cron in Cron.objects.filter(interval=interval):
url = cron.url
pool.apply_async(request_url, (url, ))
pool.close()
pool.join()
Fix a python3 import .
|
try:
from urllib.request import urlopen
except ImportError:
from urllib import urlopen
import datetime
import multiprocessing
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from core.models import URL, Cron
def request_url(url):
urlopen("http://{0}{1}".format(
Site.objects.get_current().domain,
reverse("run_fn", kwargs={"slug": url.slug})
))
class Command(BaseCommand):
help = "Run the url scripts"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = int(now.strftime("%s")) - now.second
mins_passed = int((curr_time - today) / 60.0)
intervals = Cron.objects.filter(interval__lte=mins_passed)\
.values_list('interval', flat=True).\
order_by('interval').distinct()
for interval in intervals:
if mins_passed % interval == 0 or settings.DEBUG:
for cron in Cron.objects.filter(interval=interval):
url = cron.url
pool.apply_async(request_url, (url, ))
pool.close()
pool.join()
|
<commit_before>import urllib
import datetime
import multiprocessing
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from core.models import URL, Cron
def request_url(url):
urllib.urlopen("http://{0}{1}".format(
Site.objects.get_current().domain,
reverse("run_fn", kwargs={"slug": url.slug})
))
class Command(BaseCommand):
help = "Run the url scripts"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = int(now.strftime("%s")) - now.second
mins_passed = int((curr_time - today) / 60.0)
intervals = Cron.objects.filter(interval__lte=mins_passed)\
.values_list('interval', flat=True).\
order_by('interval').distinct()
request = ""
for interval in intervals:
if mins_passed % interval == 0 or settings.DEBUG:
for cron in Cron.objects.filter(interval=interval):
url = cron.url
pool.apply_async(request_url, (url, ))
pool.close()
pool.join()
<commit_msg>Fix a python3 import .<commit_after>
|
try:
from urllib.request import urlopen
except ImportError:
from urllib import urlopen
import datetime
import multiprocessing
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from core.models import URL, Cron
def request_url(url):
urlopen("http://{0}{1}".format(
Site.objects.get_current().domain,
reverse("run_fn", kwargs={"slug": url.slug})
))
class Command(BaseCommand):
help = "Run the url scripts"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = int(now.strftime("%s")) - now.second
mins_passed = int((curr_time - today) / 60.0)
intervals = Cron.objects.filter(interval__lte=mins_passed)\
.values_list('interval', flat=True).\
order_by('interval').distinct()
for interval in intervals:
if mins_passed % interval == 0 or settings.DEBUG:
for cron in Cron.objects.filter(interval=interval):
url = cron.url
pool.apply_async(request_url, (url, ))
pool.close()
pool.join()
|
import urllib
import datetime
import multiprocessing
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from core.models import URL, Cron
def request_url(url):
urllib.urlopen("http://{0}{1}".format(
Site.objects.get_current().domain,
reverse("run_fn", kwargs={"slug": url.slug})
))
class Command(BaseCommand):
help = "Run the url scripts"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = int(now.strftime("%s")) - now.second
mins_passed = int((curr_time - today) / 60.0)
intervals = Cron.objects.filter(interval__lte=mins_passed)\
.values_list('interval', flat=True).\
order_by('interval').distinct()
request = ""
for interval in intervals:
if mins_passed % interval == 0 or settings.DEBUG:
for cron in Cron.objects.filter(interval=interval):
url = cron.url
pool.apply_async(request_url, (url, ))
pool.close()
pool.join()
Fix a python3 import .try:
from urllib.request import urlopen
except ImportError:
from urllib import urlopen
import datetime
import multiprocessing
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from core.models import URL, Cron
def request_url(url):
urlopen("http://{0}{1}".format(
Site.objects.get_current().domain,
reverse("run_fn", kwargs={"slug": url.slug})
))
class Command(BaseCommand):
help = "Run the url scripts"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = int(now.strftime("%s")) - now.second
mins_passed = int((curr_time - today) / 60.0)
intervals = Cron.objects.filter(interval__lte=mins_passed)\
.values_list('interval', flat=True).\
order_by('interval').distinct()
for interval in intervals:
if mins_passed % interval == 0 or settings.DEBUG:
for cron in Cron.objects.filter(interval=interval):
url = cron.url
pool.apply_async(request_url, (url, ))
pool.close()
pool.join()
|
<commit_before>import urllib
import datetime
import multiprocessing
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from core.models import URL, Cron
def request_url(url):
urllib.urlopen("http://{0}{1}".format(
Site.objects.get_current().domain,
reverse("run_fn", kwargs={"slug": url.slug})
))
class Command(BaseCommand):
help = "Run the url scripts"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = int(now.strftime("%s")) - now.second
mins_passed = int((curr_time - today) / 60.0)
intervals = Cron.objects.filter(interval__lte=mins_passed)\
.values_list('interval', flat=True).\
order_by('interval').distinct()
request = ""
for interval in intervals:
if mins_passed % interval == 0 or settings.DEBUG:
for cron in Cron.objects.filter(interval=interval):
url = cron.url
pool.apply_async(request_url, (url, ))
pool.close()
pool.join()
<commit_msg>Fix a python3 import .<commit_after>try:
from urllib.request import urlopen
except ImportError:
from urllib import urlopen
import datetime
import multiprocessing
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from core.models import URL, Cron
def request_url(url):
urlopen("http://{0}{1}".format(
Site.objects.get_current().domain,
reverse("run_fn", kwargs={"slug": url.slug})
))
class Command(BaseCommand):
help = "Run the url scripts"
can_import_settings = True
def handle(self, *args, **options):
pool = multiprocessing.Pool(multiprocessing.cpu_count())
today = int(datetime.date.today().strftime("%s"))
now = datetime.datetime.now()
curr_time = int(now.strftime("%s")) - now.second
mins_passed = int((curr_time - today) / 60.0)
intervals = Cron.objects.filter(interval__lte=mins_passed)\
.values_list('interval', flat=True).\
order_by('interval').distinct()
for interval in intervals:
if mins_passed % interval == 0 or settings.DEBUG:
for cron in Cron.objects.filter(interval=interval):
url = cron.url
pool.apply_async(request_url, (url, ))
pool.close()
pool.join()
|
eab249a092da21d47b07fd9918d4b28dcbc6089b
|
server/dummy/dummy_server.py
|
server/dummy/dummy_server.py
|
#!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print '\n---> dummy server: got post!'
print 'command:', self.command
print 'path:', self.path
print 'headers:\n\n', self.headers
print 'content:\n\n', content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
|
#!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print('\n--- %s%s\n%s' % (self.command, self.path, self.headers))
print content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
|
Clean up content and header output
|
Clean up content and header output
|
Python
|
mit
|
jonspeicher/Puddle,jonspeicher/Puddle,jonspeicher/Puddle
|
#!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print '\n---> dummy server: got post!'
print 'command:', self.command
print 'path:', self.path
print 'headers:\n\n', self.headers
print 'content:\n\n', content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
Clean up content and header output
|
#!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print('\n--- %s%s\n%s' % (self.command, self.path, self.headers))
print content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
|
<commit_before>#!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print '\n---> dummy server: got post!'
print 'command:', self.command
print 'path:', self.path
print 'headers:\n\n', self.headers
print 'content:\n\n', content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
<commit_msg>Clean up content and header output<commit_after>
|
#!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print('\n--- %s%s\n%s' % (self.command, self.path, self.headers))
print content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
|
#!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print '\n---> dummy server: got post!'
print 'command:', self.command
print 'path:', self.path
print 'headers:\n\n', self.headers
print 'content:\n\n', content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
Clean up content and header output#!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print('\n--- %s%s\n%s' % (self.command, self.path, self.headers))
print content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
|
<commit_before>#!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print '\n---> dummy server: got post!'
print 'command:', self.command
print 'path:', self.path
print 'headers:\n\n', self.headers
print 'content:\n\n', content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
<commit_msg>Clean up content and header output<commit_after>#!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print('\n--- %s%s\n%s' % (self.command, self.path, self.headers))
print content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
|
fad3aa04d54d8804984a9c66bfde79f0f5cd8871
|
app/views.py
|
app/views.py
|
import random
from flask import render_template, session, request
from app import app
import config
from app import request_logger
from app import questions
@app.route('/', methods=['GET', 'POST'])
def q():
# NOTE: this will break if questions are answered in wrong order
# TODO: make sure that is is not possible
possible_questions = list(request.form.keys())
if len(possible_questions) == 1:
question = possible_questions[0]
if questions.is_such_question(question):
session['answers'].update({question: request.form[question]})
session['qnumber'] += 1
question_and_answers = questions.get_question_and_answers_by_number(session['qnumber'])
if question_and_answers is None:
return render_template('ready.html')
question = question_and_answers['question']
answers = question_and_answers['answers']
selected = None
description = question_and_answers.get('description')
# shuffle answers if not stated otherwise
if question_and_answers.get('shuffle', True):
random.shuffle(answers)
if question in session['answers']:
selected = session['answers'][question]
return render_template('question.html', question=question, answers=answers, selected=selected, description=description)
@app.route('/uudestaan')
def again():
session['qnumber'] = 0
return q()
|
import random
from flask import render_template, session, request
from app import app
import config
from app import request_logger
from app import questions
@app.route('/', methods=['GET', 'POST'])
def q():
# NOTE: this will break if questions are answered in wrong order
# TODO: make sure that is is not possible
possible_questions = list(request.form.keys())
if len(possible_questions) == 1:
question = possible_questions[0]
if questions.is_such_question(question):
session['answers'].update({question: request.form[question]})
session['qnumber'] += 1
question_and_answers = questions.get_question_and_answers_by_number(session['qnumber'])
if question_and_answers is None:
return render_template('ready.html')
question = question_and_answers['question']
answers = question_and_answers['answers']
selected = None
description = question_and_answers.get('description')
# shuffle answers if not stated otherwise
if question_and_answers.get('shuffle', True):
random.shuffle(answers)
if question in session['answers']:
selected = session['answers'][question]
return render_template('question.html', question=question, answers=answers, selected=selected, description=description)
@app.route('/uudestaan')
def again():
session['qnumber'] = 0
return q()
@app.route('/id')
def id():
return str(session['id'])
|
Add debug page that tells id
|
Add debug page that tells id
|
Python
|
mit
|
felixbade/visa
|
import random
from flask import render_template, session, request
from app import app
import config
from app import request_logger
from app import questions
@app.route('/', methods=['GET', 'POST'])
def q():
# NOTE: this will break if questions are answered in wrong order
# TODO: make sure that is is not possible
possible_questions = list(request.form.keys())
if len(possible_questions) == 1:
question = possible_questions[0]
if questions.is_such_question(question):
session['answers'].update({question: request.form[question]})
session['qnumber'] += 1
question_and_answers = questions.get_question_and_answers_by_number(session['qnumber'])
if question_and_answers is None:
return render_template('ready.html')
question = question_and_answers['question']
answers = question_and_answers['answers']
selected = None
description = question_and_answers.get('description')
# shuffle answers if not stated otherwise
if question_and_answers.get('shuffle', True):
random.shuffle(answers)
if question in session['answers']:
selected = session['answers'][question]
return render_template('question.html', question=question, answers=answers, selected=selected, description=description)
@app.route('/uudestaan')
def again():
session['qnumber'] = 0
return q()
Add debug page that tells id
|
import random
from flask import render_template, session, request
from app import app
import config
from app import request_logger
from app import questions
@app.route('/', methods=['GET', 'POST'])
def q():
# NOTE: this will break if questions are answered in wrong order
# TODO: make sure that is is not possible
possible_questions = list(request.form.keys())
if len(possible_questions) == 1:
question = possible_questions[0]
if questions.is_such_question(question):
session['answers'].update({question: request.form[question]})
session['qnumber'] += 1
question_and_answers = questions.get_question_and_answers_by_number(session['qnumber'])
if question_and_answers is None:
return render_template('ready.html')
question = question_and_answers['question']
answers = question_and_answers['answers']
selected = None
description = question_and_answers.get('description')
# shuffle answers if not stated otherwise
if question_and_answers.get('shuffle', True):
random.shuffle(answers)
if question in session['answers']:
selected = session['answers'][question]
return render_template('question.html', question=question, answers=answers, selected=selected, description=description)
@app.route('/uudestaan')
def again():
session['qnumber'] = 0
return q()
@app.route('/id')
def id():
return str(session['id'])
|
<commit_before>import random
from flask import render_template, session, request
from app import app
import config
from app import request_logger
from app import questions
@app.route('/', methods=['GET', 'POST'])
def q():
# NOTE: this will break if questions are answered in wrong order
# TODO: make sure that is is not possible
possible_questions = list(request.form.keys())
if len(possible_questions) == 1:
question = possible_questions[0]
if questions.is_such_question(question):
session['answers'].update({question: request.form[question]})
session['qnumber'] += 1
question_and_answers = questions.get_question_and_answers_by_number(session['qnumber'])
if question_and_answers is None:
return render_template('ready.html')
question = question_and_answers['question']
answers = question_and_answers['answers']
selected = None
description = question_and_answers.get('description')
# shuffle answers if not stated otherwise
if question_and_answers.get('shuffle', True):
random.shuffle(answers)
if question in session['answers']:
selected = session['answers'][question]
return render_template('question.html', question=question, answers=answers, selected=selected, description=description)
@app.route('/uudestaan')
def again():
session['qnumber'] = 0
return q()
<commit_msg>Add debug page that tells id<commit_after>
|
import random
from flask import render_template, session, request
from app import app
import config
from app import request_logger
from app import questions
@app.route('/', methods=['GET', 'POST'])
def q():
# NOTE: this will break if questions are answered in wrong order
# TODO: make sure that is is not possible
possible_questions = list(request.form.keys())
if len(possible_questions) == 1:
question = possible_questions[0]
if questions.is_such_question(question):
session['answers'].update({question: request.form[question]})
session['qnumber'] += 1
question_and_answers = questions.get_question_and_answers_by_number(session['qnumber'])
if question_and_answers is None:
return render_template('ready.html')
question = question_and_answers['question']
answers = question_and_answers['answers']
selected = None
description = question_and_answers.get('description')
# shuffle answers if not stated otherwise
if question_and_answers.get('shuffle', True):
random.shuffle(answers)
if question in session['answers']:
selected = session['answers'][question]
return render_template('question.html', question=question, answers=answers, selected=selected, description=description)
@app.route('/uudestaan')
def again():
session['qnumber'] = 0
return q()
@app.route('/id')
def id():
return str(session['id'])
|
import random
from flask import render_template, session, request
from app import app
import config
from app import request_logger
from app import questions
@app.route('/', methods=['GET', 'POST'])
def q():
# NOTE: this will break if questions are answered in wrong order
# TODO: make sure that is is not possible
possible_questions = list(request.form.keys())
if len(possible_questions) == 1:
question = possible_questions[0]
if questions.is_such_question(question):
session['answers'].update({question: request.form[question]})
session['qnumber'] += 1
question_and_answers = questions.get_question_and_answers_by_number(session['qnumber'])
if question_and_answers is None:
return render_template('ready.html')
question = question_and_answers['question']
answers = question_and_answers['answers']
selected = None
description = question_and_answers.get('description')
# shuffle answers if not stated otherwise
if question_and_answers.get('shuffle', True):
random.shuffle(answers)
if question in session['answers']:
selected = session['answers'][question]
return render_template('question.html', question=question, answers=answers, selected=selected, description=description)
@app.route('/uudestaan')
def again():
session['qnumber'] = 0
return q()
Add debug page that tells idimport random
from flask import render_template, session, request
from app import app
import config
from app import request_logger
from app import questions
@app.route('/', methods=['GET', 'POST'])
def q():
# NOTE: this will break if questions are answered in wrong order
# TODO: make sure that is is not possible
possible_questions = list(request.form.keys())
if len(possible_questions) == 1:
question = possible_questions[0]
if questions.is_such_question(question):
session['answers'].update({question: request.form[question]})
session['qnumber'] += 1
question_and_answers = questions.get_question_and_answers_by_number(session['qnumber'])
if question_and_answers is None:
return render_template('ready.html')
question = question_and_answers['question']
answers = question_and_answers['answers']
selected = None
description = question_and_answers.get('description')
# shuffle answers if not stated otherwise
if question_and_answers.get('shuffle', True):
random.shuffle(answers)
if question in session['answers']:
selected = session['answers'][question]
return render_template('question.html', question=question, answers=answers, selected=selected, description=description)
@app.route('/uudestaan')
def again():
session['qnumber'] = 0
return q()
@app.route('/id')
def id():
return str(session['id'])
|
<commit_before>import random
from flask import render_template, session, request
from app import app
import config
from app import request_logger
from app import questions
@app.route('/', methods=['GET', 'POST'])
def q():
# NOTE: this will break if questions are answered in wrong order
# TODO: make sure that is is not possible
possible_questions = list(request.form.keys())
if len(possible_questions) == 1:
question = possible_questions[0]
if questions.is_such_question(question):
session['answers'].update({question: request.form[question]})
session['qnumber'] += 1
question_and_answers = questions.get_question_and_answers_by_number(session['qnumber'])
if question_and_answers is None:
return render_template('ready.html')
question = question_and_answers['question']
answers = question_and_answers['answers']
selected = None
description = question_and_answers.get('description')
# shuffle answers if not stated otherwise
if question_and_answers.get('shuffle', True):
random.shuffle(answers)
if question in session['answers']:
selected = session['answers'][question]
return render_template('question.html', question=question, answers=answers, selected=selected, description=description)
@app.route('/uudestaan')
def again():
session['qnumber'] = 0
return q()
<commit_msg>Add debug page that tells id<commit_after>import random
from flask import render_template, session, request
from app import app
import config
from app import request_logger
from app import questions
@app.route('/', methods=['GET', 'POST'])
def q():
# NOTE: this will break if questions are answered in wrong order
# TODO: make sure that is is not possible
possible_questions = list(request.form.keys())
if len(possible_questions) == 1:
question = possible_questions[0]
if questions.is_such_question(question):
session['answers'].update({question: request.form[question]})
session['qnumber'] += 1
question_and_answers = questions.get_question_and_answers_by_number(session['qnumber'])
if question_and_answers is None:
return render_template('ready.html')
question = question_and_answers['question']
answers = question_and_answers['answers']
selected = None
description = question_and_answers.get('description')
# shuffle answers if not stated otherwise
if question_and_answers.get('shuffle', True):
random.shuffle(answers)
if question in session['answers']:
selected = session['answers'][question]
return render_template('question.html', question=question, answers=answers, selected=selected, description=description)
@app.route('/uudestaan')
def again():
session['qnumber'] = 0
return q()
@app.route('/id')
def id():
return str(session['id'])
|
c788398c2c89a7afcbbf899e7ed4d51fccf114b5
|
php_coverage/command.py
|
php_coverage/command.py
|
import sublime_plugin
from php_coverage.finder import CoverageFinder
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Finds the coverage file which contains coverage data for the
file open in the view which is running this command.
"""
return self.get_coverage_finder().find(self.view.file_name())
|
import sublime_plugin
from php_coverage.data import CoverageDataFactory
from php_coverage.finder import CoverageFinder
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Loads coverage data for the file open in the view which is
running this command.
"""
filename = self.view.file_name()
coverage_file = self.get_coverage_finder().find(filename)
if (coverage_file):
return CoverageDataFactory().factory(coverage_file)
return None
|
Return coverage data in CoverageCommand::coverage()
|
Return coverage data in CoverageCommand::coverage()
|
Python
|
mit
|
bradfeehan/SublimePHPCoverage,bradfeehan/SublimePHPCoverage
|
import sublime_plugin
from php_coverage.finder import CoverageFinder
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Finds the coverage file which contains coverage data for the
file open in the view which is running this command.
"""
return self.get_coverage_finder().find(self.view.file_name())
Return coverage data in CoverageCommand::coverage()
|
import sublime_plugin
from php_coverage.data import CoverageDataFactory
from php_coverage.finder import CoverageFinder
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Loads coverage data for the file open in the view which is
running this command.
"""
filename = self.view.file_name()
coverage_file = self.get_coverage_finder().find(filename)
if (coverage_file):
return CoverageDataFactory().factory(coverage_file)
return None
|
<commit_before>import sublime_plugin
from php_coverage.finder import CoverageFinder
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Finds the coverage file which contains coverage data for the
file open in the view which is running this command.
"""
return self.get_coverage_finder().find(self.view.file_name())
<commit_msg>Return coverage data in CoverageCommand::coverage()<commit_after>
|
import sublime_plugin
from php_coverage.data import CoverageDataFactory
from php_coverage.finder import CoverageFinder
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Loads coverage data for the file open in the view which is
running this command.
"""
filename = self.view.file_name()
coverage_file = self.get_coverage_finder().find(filename)
if (coverage_file):
return CoverageDataFactory().factory(coverage_file)
return None
|
import sublime_plugin
from php_coverage.finder import CoverageFinder
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Finds the coverage file which contains coverage data for the
file open in the view which is running this command.
"""
return self.get_coverage_finder().find(self.view.file_name())
Return coverage data in CoverageCommand::coverage()import sublime_plugin
from php_coverage.data import CoverageDataFactory
from php_coverage.finder import CoverageFinder
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Loads coverage data for the file open in the view which is
running this command.
"""
filename = self.view.file_name()
coverage_file = self.get_coverage_finder().find(filename)
if (coverage_file):
return CoverageDataFactory().factory(coverage_file)
return None
|
<commit_before>import sublime_plugin
from php_coverage.finder import CoverageFinder
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Finds the coverage file which contains coverage data for the
file open in the view which is running this command.
"""
return self.get_coverage_finder().find(self.view.file_name())
<commit_msg>Return coverage data in CoverageCommand::coverage()<commit_after>import sublime_plugin
from php_coverage.data import CoverageDataFactory
from php_coverage.finder import CoverageFinder
class CoverageCommand(sublime_plugin.TextCommand):
"""
Base class for a text command which has a coverage file.
"""
def __init__(self, view, coverage_finder=None):
super(CoverageCommand, self).__init__(view)
self.coverage_finder = coverage_finder
def get_coverage_finder(self):
"""
Gets the coverage finder for the command. If none is set, it
instantiates an instance of the default CoverageFinder class.
"""
if not self.coverage_finder:
self.coverage_finder = CoverageFinder()
return self.coverage_finder
def coverage(self):
"""
Loads coverage data for the file open in the view which is
running this command.
"""
filename = self.view.file_name()
coverage_file = self.get_coverage_finder().find(filename)
if (coverage_file):
return CoverageDataFactory().factory(coverage_file)
return None
|
35a5e8717df9a5bcb60593700aa7e2f291816b0f
|
test/test_extensions/test_analytics.py
|
test/test_extensions/test_analytics.py
|
# encoding: utf-8
import time
from web.core.context import Context
from web.ext.analytics import AnalyticsExtension
def test_analytics_extension():
ctx = Context(response=Context(headers=dict()))
ext = AnalyticsExtension()
assert not hasattr(ctx, '_start_time')
ext.prepare(ctx)
assert hasattr(ctx, '_start_time')
ext.before(ctx)
time.sleep(0.1)
ext.after(ctx)
assert 0.1 <= float(ctx.response.headers['X-Generation-Time']) <= 0.2
|
# encoding: utf-8
import time
import pytest
from webob import Request
from web.core import Application
from web.core.context import Context
from web.ext.analytics import AnalyticsExtension
def endpoint(context):
time.sleep(0.1)
return "Hi."
sample = Application(endpoint, extensions=[AnalyticsExtension()])
def test_analytics_extension():
ctx = Context(response=Context(headers=dict()))
ext = AnalyticsExtension()
assert not hasattr(ctx, '_start_time')
ext.prepare(ctx)
assert hasattr(ctx, '_start_time')
ext.before(ctx)
time.sleep(0.1)
ext.after(ctx)
assert 0.1 <= float(ctx.response.headers['X-Generation-Time']) <= 0.2
def test_analytics_extension_in_context():
try:
__import__('web.dispatch.object')
except ImportError:
pytest.skip("web.dispatch.object not installed")
resp = Request.blank('/').get_response(sample)
assert 0.1 <= float(resp.headers['X-Generation-Time']) <= 0.2
|
Add test for full processing pipeline.
|
Add test for full processing pipeline.
|
Python
|
mit
|
marrow/WebCore,marrow/WebCore
|
# encoding: utf-8
import time
from web.core.context import Context
from web.ext.analytics import AnalyticsExtension
def test_analytics_extension():
ctx = Context(response=Context(headers=dict()))
ext = AnalyticsExtension()
assert not hasattr(ctx, '_start_time')
ext.prepare(ctx)
assert hasattr(ctx, '_start_time')
ext.before(ctx)
time.sleep(0.1)
ext.after(ctx)
assert 0.1 <= float(ctx.response.headers['X-Generation-Time']) <= 0.2
Add test for full processing pipeline.
|
# encoding: utf-8
import time
import pytest
from webob import Request
from web.core import Application
from web.core.context import Context
from web.ext.analytics import AnalyticsExtension
def endpoint(context):
time.sleep(0.1)
return "Hi."
sample = Application(endpoint, extensions=[AnalyticsExtension()])
def test_analytics_extension():
ctx = Context(response=Context(headers=dict()))
ext = AnalyticsExtension()
assert not hasattr(ctx, '_start_time')
ext.prepare(ctx)
assert hasattr(ctx, '_start_time')
ext.before(ctx)
time.sleep(0.1)
ext.after(ctx)
assert 0.1 <= float(ctx.response.headers['X-Generation-Time']) <= 0.2
def test_analytics_extension_in_context():
try:
__import__('web.dispatch.object')
except ImportError:
pytest.skip("web.dispatch.object not installed")
resp = Request.blank('/').get_response(sample)
assert 0.1 <= float(resp.headers['X-Generation-Time']) <= 0.2
|
<commit_before># encoding: utf-8
import time
from web.core.context import Context
from web.ext.analytics import AnalyticsExtension
def test_analytics_extension():
ctx = Context(response=Context(headers=dict()))
ext = AnalyticsExtension()
assert not hasattr(ctx, '_start_time')
ext.prepare(ctx)
assert hasattr(ctx, '_start_time')
ext.before(ctx)
time.sleep(0.1)
ext.after(ctx)
assert 0.1 <= float(ctx.response.headers['X-Generation-Time']) <= 0.2
<commit_msg>Add test for full processing pipeline.<commit_after>
|
# encoding: utf-8
import time
import pytest
from webob import Request
from web.core import Application
from web.core.context import Context
from web.ext.analytics import AnalyticsExtension
def endpoint(context):
time.sleep(0.1)
return "Hi."
sample = Application(endpoint, extensions=[AnalyticsExtension()])
def test_analytics_extension():
ctx = Context(response=Context(headers=dict()))
ext = AnalyticsExtension()
assert not hasattr(ctx, '_start_time')
ext.prepare(ctx)
assert hasattr(ctx, '_start_time')
ext.before(ctx)
time.sleep(0.1)
ext.after(ctx)
assert 0.1 <= float(ctx.response.headers['X-Generation-Time']) <= 0.2
def test_analytics_extension_in_context():
try:
__import__('web.dispatch.object')
except ImportError:
pytest.skip("web.dispatch.object not installed")
resp = Request.blank('/').get_response(sample)
assert 0.1 <= float(resp.headers['X-Generation-Time']) <= 0.2
|
# encoding: utf-8
import time
from web.core.context import Context
from web.ext.analytics import AnalyticsExtension
def test_analytics_extension():
ctx = Context(response=Context(headers=dict()))
ext = AnalyticsExtension()
assert not hasattr(ctx, '_start_time')
ext.prepare(ctx)
assert hasattr(ctx, '_start_time')
ext.before(ctx)
time.sleep(0.1)
ext.after(ctx)
assert 0.1 <= float(ctx.response.headers['X-Generation-Time']) <= 0.2
Add test for full processing pipeline.# encoding: utf-8
import time
import pytest
from webob import Request
from web.core import Application
from web.core.context import Context
from web.ext.analytics import AnalyticsExtension
def endpoint(context):
time.sleep(0.1)
return "Hi."
sample = Application(endpoint, extensions=[AnalyticsExtension()])
def test_analytics_extension():
ctx = Context(response=Context(headers=dict()))
ext = AnalyticsExtension()
assert not hasattr(ctx, '_start_time')
ext.prepare(ctx)
assert hasattr(ctx, '_start_time')
ext.before(ctx)
time.sleep(0.1)
ext.after(ctx)
assert 0.1 <= float(ctx.response.headers['X-Generation-Time']) <= 0.2
def test_analytics_extension_in_context():
try:
__import__('web.dispatch.object')
except ImportError:
pytest.skip("web.dispatch.object not installed")
resp = Request.blank('/').get_response(sample)
assert 0.1 <= float(resp.headers['X-Generation-Time']) <= 0.2
|
<commit_before># encoding: utf-8
import time
from web.core.context import Context
from web.ext.analytics import AnalyticsExtension
def test_analytics_extension():
ctx = Context(response=Context(headers=dict()))
ext = AnalyticsExtension()
assert not hasattr(ctx, '_start_time')
ext.prepare(ctx)
assert hasattr(ctx, '_start_time')
ext.before(ctx)
time.sleep(0.1)
ext.after(ctx)
assert 0.1 <= float(ctx.response.headers['X-Generation-Time']) <= 0.2
<commit_msg>Add test for full processing pipeline.<commit_after># encoding: utf-8
import time
import pytest
from webob import Request
from web.core import Application
from web.core.context import Context
from web.ext.analytics import AnalyticsExtension
def endpoint(context):
time.sleep(0.1)
return "Hi."
sample = Application(endpoint, extensions=[AnalyticsExtension()])
def test_analytics_extension():
ctx = Context(response=Context(headers=dict()))
ext = AnalyticsExtension()
assert not hasattr(ctx, '_start_time')
ext.prepare(ctx)
assert hasattr(ctx, '_start_time')
ext.before(ctx)
time.sleep(0.1)
ext.after(ctx)
assert 0.1 <= float(ctx.response.headers['X-Generation-Time']) <= 0.2
def test_analytics_extension_in_context():
try:
__import__('web.dispatch.object')
except ImportError:
pytest.skip("web.dispatch.object not installed")
resp = Request.blank('/').get_response(sample)
assert 0.1 <= float(resp.headers['X-Generation-Time']) <= 0.2
|
92d5e7078c86b50c0682e70e115271355442cea2
|
pixie/utils/__init__.py
|
pixie/utils/__init__.py
|
# Standard library imports
import json
# Our setup file
with open('../../pixie/setup.json') as file:
setup_file = json.load(file)
# Our user agent
user_agent = "Pixie (https://github.com/GetRektByMe/Pixie)"
# A function to use with checks to check for owner
def is_owner(ctx):
return ctx.message.author.id == setup_file["discord"]['owner_id']
# Sanitises roles so when functions that print roles are used it doesn't ping @everyone
def safe_roles(roles: list):
names = []
for role in roles:
if role.name == "@everyone":
names.append("@\u200beveryone") # \u200b is an invisible space
else:
names.append(role.name)
return names
|
# Standard library imports
import json
# Our setup file
with open('../setup.json') as file:
setup_file = json.load(file)
# Our user agent
user_agent = "Pixie (https://github.com/GetRektByMe/Pixie)"
# A function to use with checks to check for owner
def is_owner(ctx):
return ctx.message.author.id == setup_file["discord"]['owner_id']
# Sanitises roles so when functions that print roles are used it doesn't ping @everyone
def safe_roles(roles: list):
names = []
for role in roles:
if role.name == "@everyone":
names.append("@\u200beveryone") # \u200b is an invisible space
else:
names.append(role.name)
return names
|
Change method of search for utils setup_file
|
Change method of search for utils setup_file
|
Python
|
mit
|
GetRektByMe/Pixie
|
# Standard library imports
import json
# Our setup file
with open('../../pixie/setup.json') as file:
setup_file = json.load(file)
# Our user agent
user_agent = "Pixie (https://github.com/GetRektByMe/Pixie)"
# A function to use with checks to check for owner
def is_owner(ctx):
return ctx.message.author.id == setup_file["discord"]['owner_id']
# Sanitises roles so when functions that print roles are used it doesn't ping @everyone
def safe_roles(roles: list):
names = []
for role in roles:
if role.name == "@everyone":
names.append("@\u200beveryone") # \u200b is an invisible space
else:
names.append(role.name)
return names
Change method of search for utils setup_file
|
# Standard library imports
import json
# Our setup file
with open('../setup.json') as file:
setup_file = json.load(file)
# Our user agent
user_agent = "Pixie (https://github.com/GetRektByMe/Pixie)"
# A function to use with checks to check for owner
def is_owner(ctx):
return ctx.message.author.id == setup_file["discord"]['owner_id']
# Sanitises roles so when functions that print roles are used it doesn't ping @everyone
def safe_roles(roles: list):
names = []
for role in roles:
if role.name == "@everyone":
names.append("@\u200beveryone") # \u200b is an invisible space
else:
names.append(role.name)
return names
|
<commit_before># Standard library imports
import json
# Our setup file
with open('../../pixie/setup.json') as file:
setup_file = json.load(file)
# Our user agent
user_agent = "Pixie (https://github.com/GetRektByMe/Pixie)"
# A function to use with checks to check for owner
def is_owner(ctx):
return ctx.message.author.id == setup_file["discord"]['owner_id']
# Sanitises roles so when functions that print roles are used it doesn't ping @everyone
def safe_roles(roles: list):
names = []
for role in roles:
if role.name == "@everyone":
names.append("@\u200beveryone") # \u200b is an invisible space
else:
names.append(role.name)
return names
<commit_msg>Change method of search for utils setup_file<commit_after>
|
# Standard library imports
import json
# Our setup file
with open('../setup.json') as file:
setup_file = json.load(file)
# Our user agent
user_agent = "Pixie (https://github.com/GetRektByMe/Pixie)"
# A function to use with checks to check for owner
def is_owner(ctx):
return ctx.message.author.id == setup_file["discord"]['owner_id']
# Sanitises roles so when functions that print roles are used it doesn't ping @everyone
def safe_roles(roles: list):
names = []
for role in roles:
if role.name == "@everyone":
names.append("@\u200beveryone") # \u200b is an invisible space
else:
names.append(role.name)
return names
|
# Standard library imports
import json
# Our setup file
with open('../../pixie/setup.json') as file:
setup_file = json.load(file)
# Our user agent
user_agent = "Pixie (https://github.com/GetRektByMe/Pixie)"
# A function to use with checks to check for owner
def is_owner(ctx):
return ctx.message.author.id == setup_file["discord"]['owner_id']
# Sanitises roles so when functions that print roles are used it doesn't ping @everyone
def safe_roles(roles: list):
names = []
for role in roles:
if role.name == "@everyone":
names.append("@\u200beveryone") # \u200b is an invisible space
else:
names.append(role.name)
return names
Change method of search for utils setup_file# Standard library imports
import json
# Our setup file
with open('../setup.json') as file:
setup_file = json.load(file)
# Our user agent
user_agent = "Pixie (https://github.com/GetRektByMe/Pixie)"
# A function to use with checks to check for owner
def is_owner(ctx):
return ctx.message.author.id == setup_file["discord"]['owner_id']
# Sanitises roles so when functions that print roles are used it doesn't ping @everyone
def safe_roles(roles: list):
names = []
for role in roles:
if role.name == "@everyone":
names.append("@\u200beveryone") # \u200b is an invisible space
else:
names.append(role.name)
return names
|
<commit_before># Standard library imports
import json
# Our setup file
with open('../../pixie/setup.json') as file:
setup_file = json.load(file)
# Our user agent
user_agent = "Pixie (https://github.com/GetRektByMe/Pixie)"
# A function to use with checks to check for owner
def is_owner(ctx):
return ctx.message.author.id == setup_file["discord"]['owner_id']
# Sanitises roles so when functions that print roles are used it doesn't ping @everyone
def safe_roles(roles: list):
names = []
for role in roles:
if role.name == "@everyone":
names.append("@\u200beveryone") # \u200b is an invisible space
else:
names.append(role.name)
return names
<commit_msg>Change method of search for utils setup_file<commit_after># Standard library imports
import json
# Our setup file
with open('../setup.json') as file:
setup_file = json.load(file)
# Our user agent
user_agent = "Pixie (https://github.com/GetRektByMe/Pixie)"
# A function to use with checks to check for owner
def is_owner(ctx):
return ctx.message.author.id == setup_file["discord"]['owner_id']
# Sanitises roles so when functions that print roles are used it doesn't ping @everyone
def safe_roles(roles: list):
names = []
for role in roles:
if role.name == "@everyone":
names.append("@\u200beveryone") # \u200b is an invisible space
else:
names.append(role.name)
return names
|
01a832d1c761eda01ad94f29709c8e76bd7e82fe
|
project/models.py
|
project/models.py
|
import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
def __init__(self, email, password, confirmed,
paid=False, admin=False, confirmed_on=None):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
|
import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
rating = db.Column(db.Float, nullable=True)
def __init__(self, email, password, confirmed,
paid=False, admin=False, confirmed_on=None):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
|
Add rating field to User model
|
Add rating field to User model
|
Python
|
mit
|
dylanshine/streamschool,dylanshine/streamschool
|
import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
def __init__(self, email, password, confirmed,
paid=False, admin=False, confirmed_on=None):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
Add rating field to User model
|
import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
rating = db.Column(db.Float, nullable=True)
def __init__(self, email, password, confirmed,
paid=False, admin=False, confirmed_on=None):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
|
<commit_before>import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
def __init__(self, email, password, confirmed,
paid=False, admin=False, confirmed_on=None):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
<commit_msg>Add rating field to User model<commit_after>
|
import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
rating = db.Column(db.Float, nullable=True)
def __init__(self, email, password, confirmed,
paid=False, admin=False, confirmed_on=None):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
|
import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
def __init__(self, email, password, confirmed,
paid=False, admin=False, confirmed_on=None):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
Add rating field to User modelimport datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
rating = db.Column(db.Float, nullable=True)
def __init__(self, email, password, confirmed,
paid=False, admin=False, confirmed_on=None):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
|
<commit_before>import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
def __init__(self, email, password, confirmed,
paid=False, admin=False, confirmed_on=None):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
<commit_msg>Add rating field to User model<commit_after>import datetime
from project import db, bcrypt
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
confirmed_on = db.Column(db.DateTime, nullable=True)
rating = db.Column(db.Float, nullable=True)
def __init__(self, email, password, confirmed,
paid=False, admin=False, confirmed_on=None):
self.email = email
self.password = bcrypt.generate_password_hash(password)
self.registered_on = datetime.datetime.now()
self.admin = admin
self.confirmed = confirmed
self.confirmed_on = confirmed_on
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def __repr__(self):
return '<email {}'.format(self.email)
|
a013cdbe690271c4ec9bc172c994ff5f6e5808c4
|
test/test_assetstore_model_override.py
|
test/test_assetstore_model_override.py
|
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake().save({
'foo': 'bar',
'type': 'fake'
})
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeModel,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert adapter.the_assetstore == fakeModel
|
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
fakeAssetstore = fakeModel().save({
'foo': 'bar',
'type': 'fake'
})
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeAssetstore,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert isinstance(adapter, FakeAdapter)
assert adapter.the_assetstore == fakeAssetstore
|
Improve clarity of fake assetstore model fixture
|
Improve clarity of fake assetstore model fixture
|
Python
|
apache-2.0
|
data-exp-lab/girder,girder/girder,manthey/girder,kotfic/girder,Xarthisius/girder,jbeezley/girder,girder/girder,manthey/girder,kotfic/girder,RafaelPalomar/girder,girder/girder,data-exp-lab/girder,girder/girder,manthey/girder,Xarthisius/girder,RafaelPalomar/girder,RafaelPalomar/girder,RafaelPalomar/girder,Kitware/girder,jbeezley/girder,data-exp-lab/girder,data-exp-lab/girder,data-exp-lab/girder,RafaelPalomar/girder,kotfic/girder,Kitware/girder,Xarthisius/girder,Xarthisius/girder,jbeezley/girder,Kitware/girder,Kitware/girder,manthey/girder,kotfic/girder,jbeezley/girder,Xarthisius/girder,kotfic/girder
|
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake().save({
'foo': 'bar',
'type': 'fake'
})
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeModel,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert adapter.the_assetstore == fakeModel
Improve clarity of fake assetstore model fixture
|
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
fakeAssetstore = fakeModel().save({
'foo': 'bar',
'type': 'fake'
})
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeAssetstore,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert isinstance(adapter, FakeAdapter)
assert adapter.the_assetstore == fakeAssetstore
|
<commit_before>import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake().save({
'foo': 'bar',
'type': 'fake'
})
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeModel,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert adapter.the_assetstore == fakeModel
<commit_msg>Improve clarity of fake assetstore model fixture<commit_after>
|
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
fakeAssetstore = fakeModel().save({
'foo': 'bar',
'type': 'fake'
})
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeAssetstore,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert isinstance(adapter, FakeAdapter)
assert adapter.the_assetstore == fakeAssetstore
|
import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake().save({
'foo': 'bar',
'type': 'fake'
})
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeModel,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert adapter.the_assetstore == fakeModel
Improve clarity of fake assetstore model fixtureimport pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
fakeAssetstore = fakeModel().save({
'foo': 'bar',
'type': 'fake'
})
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeAssetstore,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert isinstance(adapter, FakeAdapter)
assert adapter.the_assetstore == fakeAssetstore
|
<commit_before>import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake().save({
'foo': 'bar',
'type': 'fake'
})
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeModel,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert adapter.the_assetstore == fakeModel
<commit_msg>Improve clarity of fake assetstore model fixture<commit_after>import pytest
from girder.models.file import File
from girder.models.model_base import Model
from girder.utility import assetstore_utilities
from girder.utility.model_importer import ModelImporter
from girder.utility.abstract_assetstore_adapter import AbstractAssetstoreAdapter
class Fake(Model):
def initialize(self):
self.name = 'fake_collection'
def validate(self, doc):
return doc
class FakeAdapter(AbstractAssetstoreAdapter):
def __init__(self, assetstore):
self.the_assetstore = assetstore
@pytest.fixture
def fakeModel(db):
ModelImporter.registerModel('fake', Fake(), plugin='fake_plugin')
yield Fake
ModelImporter.unregisterModel('fake', plugin='fake_plugin')
@pytest.fixture
def fakeAdapter(db):
assetstore_utilities.setAssetstoreAdapter('fake', FakeAdapter)
yield
assetstore_utilities.removeAssetstoreAdapter('fake')
def testAssetstoreModelOverride(fakeModel, fakeAdapter, admin):
fakeAssetstore = fakeModel().save({
'foo': 'bar',
'type': 'fake'
})
file = File().createFile(
creator=admin, item=None, name='a.out', size=0, assetstore=fakeAssetstore,
assetstoreModel='fake', assetstoreModelPlugin='fake_plugin')
adapter = File().getAssetstoreAdapter(file)
assert isinstance(adapter, FakeAdapter)
assert adapter.the_assetstore == fakeAssetstore
|
817d9c78f939de2b01ff518356ed0414178aaa6d
|
avalonstar/apps/api/serializers.py
|
avalonstar/apps/api/serializers.py
|
# -*- coding: utf-8 -*-
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Series
from apps.games.models import Game
class BroadcastSerializer(serializers.ModelSerializer):
class Meta:
depth = 1
model = Broadcast
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
|
# -*- coding: utf-8 -*-
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Raid, Series
from apps.games.models import Game
class BroadcastSerializer(serializers.ModelSerializer):
class Meta:
depth = 1
model = Broadcast
class RaidSerializer(serializers.ModelSerializer):
class Meta:
model = Raid
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
|
Add Raid to the API.
|
Add Raid to the API.
|
Python
|
apache-2.0
|
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
|
# -*- coding: utf-8 -*-
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Series
from apps.games.models import Game
class BroadcastSerializer(serializers.ModelSerializer):
class Meta:
depth = 1
model = Broadcast
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
Add Raid to the API.
|
# -*- coding: utf-8 -*-
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Raid, Series
from apps.games.models import Game
class BroadcastSerializer(serializers.ModelSerializer):
class Meta:
depth = 1
model = Broadcast
class RaidSerializer(serializers.ModelSerializer):
class Meta:
model = Raid
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
|
<commit_before># -*- coding: utf-8 -*-
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Series
from apps.games.models import Game
class BroadcastSerializer(serializers.ModelSerializer):
class Meta:
depth = 1
model = Broadcast
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
<commit_msg>Add Raid to the API.<commit_after>
|
# -*- coding: utf-8 -*-
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Raid, Series
from apps.games.models import Game
class BroadcastSerializer(serializers.ModelSerializer):
class Meta:
depth = 1
model = Broadcast
class RaidSerializer(serializers.ModelSerializer):
class Meta:
model = Raid
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
|
# -*- coding: utf-8 -*-
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Series
from apps.games.models import Game
class BroadcastSerializer(serializers.ModelSerializer):
class Meta:
depth = 1
model = Broadcast
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
Add Raid to the API.# -*- coding: utf-8 -*-
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Raid, Series
from apps.games.models import Game
class BroadcastSerializer(serializers.ModelSerializer):
class Meta:
depth = 1
model = Broadcast
class RaidSerializer(serializers.ModelSerializer):
class Meta:
model = Raid
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
|
<commit_before># -*- coding: utf-8 -*-
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Series
from apps.games.models import Game
class BroadcastSerializer(serializers.ModelSerializer):
class Meta:
depth = 1
model = Broadcast
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
<commit_msg>Add Raid to the API.<commit_after># -*- coding: utf-8 -*-
from rest_framework import serializers
from apps.broadcasts.models import Broadcast, Raid, Series
from apps.games.models import Game
class BroadcastSerializer(serializers.ModelSerializer):
class Meta:
depth = 1
model = Broadcast
class RaidSerializer(serializers.ModelSerializer):
class Meta:
model = Raid
class SeriesSerializer(serializers.ModelSerializer):
class Meta:
model = Series
class GameSerializer(serializers.ModelSerializer):
class Meta:
model = Game
|
1275fec0e485deef75a4e12956acb919a9fb7439
|
tests/modules/myInitialPythonModule.py
|
tests/modules/myInitialPythonModule.py
|
from jtapi import *
import os
import sys
import re
import numpy as np
from scipy import misc
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
myImageFilename = input_args['myImageFilename']
myImage = np.array(misc.imread(myImageFilename), dtype='float64')
print('>>>>> "myImage" has type "%s" and dimensions "%s".' %
(str(myImage.dtype), str(myImage.shape)))
print '>>>>> position [1, 2] (0-based): %d' % myImage[1, 2]
data = dict()
output_args = dict()
output_args['OutputVar'] = myImage
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
|
from jtapi import *
import os
import sys
import re
import numpy as np
from scipy import misc
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
myImageFilename = input_args['myImageFilename']
print '>>>>> loading "myImage" from "%s"' % myImageFilename
myImage = np.array(misc.imread(myImageFilename), dtype='float64')
print('>>>>> "myImage" has type "%s" and dimensions "%s".' %
(str(myImage.dtype), str(myImage.shape)))
print '>>>>> position [1, 2] (0-based): %d' % myImage[1, 2]
data = dict()
output_args = dict()
output_args['OutputVar'] = myImage
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
|
Add more detailed print in first module
|
Add more detailed print in first module
|
Python
|
mit
|
brainy-minds/Jterator,brainy-minds/Jterator,brainy-minds/Jterator,brainy-minds/Jterator
|
from jtapi import *
import os
import sys
import re
import numpy as np
from scipy import misc
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
myImageFilename = input_args['myImageFilename']
myImage = np.array(misc.imread(myImageFilename), dtype='float64')
print('>>>>> "myImage" has type "%s" and dimensions "%s".' %
(str(myImage.dtype), str(myImage.shape)))
print '>>>>> position [1, 2] (0-based): %d' % myImage[1, 2]
data = dict()
output_args = dict()
output_args['OutputVar'] = myImage
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
Add more detailed print in first module
|
from jtapi import *
import os
import sys
import re
import numpy as np
from scipy import misc
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
myImageFilename = input_args['myImageFilename']
print '>>>>> loading "myImage" from "%s"' % myImageFilename
myImage = np.array(misc.imread(myImageFilename), dtype='float64')
print('>>>>> "myImage" has type "%s" and dimensions "%s".' %
(str(myImage.dtype), str(myImage.shape)))
print '>>>>> position [1, 2] (0-based): %d' % myImage[1, 2]
data = dict()
output_args = dict()
output_args['OutputVar'] = myImage
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
|
<commit_before>from jtapi import *
import os
import sys
import re
import numpy as np
from scipy import misc
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
myImageFilename = input_args['myImageFilename']
myImage = np.array(misc.imread(myImageFilename), dtype='float64')
print('>>>>> "myImage" has type "%s" and dimensions "%s".' %
(str(myImage.dtype), str(myImage.shape)))
print '>>>>> position [1, 2] (0-based): %d' % myImage[1, 2]
data = dict()
output_args = dict()
output_args['OutputVar'] = myImage
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
<commit_msg>Add more detailed print in first module<commit_after>
|
from jtapi import *
import os
import sys
import re
import numpy as np
from scipy import misc
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
myImageFilename = input_args['myImageFilename']
print '>>>>> loading "myImage" from "%s"' % myImageFilename
myImage = np.array(misc.imread(myImageFilename), dtype='float64')
print('>>>>> "myImage" has type "%s" and dimensions "%s".' %
(str(myImage.dtype), str(myImage.shape)))
print '>>>>> position [1, 2] (0-based): %d' % myImage[1, 2]
data = dict()
output_args = dict()
output_args['OutputVar'] = myImage
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
|
from jtapi import *
import os
import sys
import re
import numpy as np
from scipy import misc
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
myImageFilename = input_args['myImageFilename']
myImage = np.array(misc.imread(myImageFilename), dtype='float64')
print('>>>>> "myImage" has type "%s" and dimensions "%s".' %
(str(myImage.dtype), str(myImage.shape)))
print '>>>>> position [1, 2] (0-based): %d' % myImage[1, 2]
data = dict()
output_args = dict()
output_args['OutputVar'] = myImage
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
Add more detailed print in first modulefrom jtapi import *
import os
import sys
import re
import numpy as np
from scipy import misc
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
myImageFilename = input_args['myImageFilename']
print '>>>>> loading "myImage" from "%s"' % myImageFilename
myImage = np.array(misc.imread(myImageFilename), dtype='float64')
print('>>>>> "myImage" has type "%s" and dimensions "%s".' %
(str(myImage.dtype), str(myImage.shape)))
print '>>>>> position [1, 2] (0-based): %d' % myImage[1, 2]
data = dict()
output_args = dict()
output_args['OutputVar'] = myImage
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
|
<commit_before>from jtapi import *
import os
import sys
import re
import numpy as np
from scipy import misc
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
myImageFilename = input_args['myImageFilename']
myImage = np.array(misc.imread(myImageFilename), dtype='float64')
print('>>>>> "myImage" has type "%s" and dimensions "%s".' %
(str(myImage.dtype), str(myImage.shape)))
print '>>>>> position [1, 2] (0-based): %d' % myImage[1, 2]
data = dict()
output_args = dict()
output_args['OutputVar'] = myImage
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
<commit_msg>Add more detailed print in first module<commit_after>from jtapi import *
import os
import sys
import re
import numpy as np
from scipy import misc
mfilename = re.search('(.*).py', os.path.basename(__file__)).group(1)
#########
# input #
#########
print('jt - %s:' % mfilename)
handles_stream = sys.stdin
handles = gethandles(handles_stream)
input_args = readinputargs(handles)
input_args = checkinputargs(input_args)
##############
# processing #
##############
myImageFilename = input_args['myImageFilename']
print '>>>>> loading "myImage" from "%s"' % myImageFilename
myImage = np.array(misc.imread(myImageFilename), dtype='float64')
print('>>>>> "myImage" has type "%s" and dimensions "%s".' %
(str(myImage.dtype), str(myImage.shape)))
print '>>>>> position [1, 2] (0-based): %d' % myImage[1, 2]
data = dict()
output_args = dict()
output_args['OutputVar'] = myImage
##########
# output #
##########
writedata(handles, data)
writeoutputargs(handles, output_args)
|
6deebdc7e5c93d5f61cad97870cea7fb445bb860
|
onitu/utils.py
|
onitu/utils.py
|
import time
import redis
def connect_to_redis(*args, **kwargs):
client = redis.Redis(*args, unix_socket_path='redis/redis.sock', **kwargs)
while True:
try:
assert client.ping()
except (redis.exceptions.ConnectionError, AssertionError):
time.sleep(0.5)
else:
return client
|
import time
import redis
def connect_to_redis(*args, **kwargs):
client = redis.Redis(
*args,
unix_socket_path='redis/redis.sock',
decode_responses=True,
**kwargs
)
while True:
try:
assert client.ping()
except (redis.exceptions.ConnectionError, AssertionError):
time.sleep(0.5)
else:
return client
|
Convert Redis keys and values to str
|
Convert Redis keys and values to str
|
Python
|
mit
|
onitu/onitu,onitu/onitu,onitu/onitu
|
import time
import redis
def connect_to_redis(*args, **kwargs):
client = redis.Redis(*args, unix_socket_path='redis/redis.sock', **kwargs)
while True:
try:
assert client.ping()
except (redis.exceptions.ConnectionError, AssertionError):
time.sleep(0.5)
else:
return client
Convert Redis keys and values to str
|
import time
import redis
def connect_to_redis(*args, **kwargs):
client = redis.Redis(
*args,
unix_socket_path='redis/redis.sock',
decode_responses=True,
**kwargs
)
while True:
try:
assert client.ping()
except (redis.exceptions.ConnectionError, AssertionError):
time.sleep(0.5)
else:
return client
|
<commit_before>import time
import redis
def connect_to_redis(*args, **kwargs):
client = redis.Redis(*args, unix_socket_path='redis/redis.sock', **kwargs)
while True:
try:
assert client.ping()
except (redis.exceptions.ConnectionError, AssertionError):
time.sleep(0.5)
else:
return client
<commit_msg>Convert Redis keys and values to str<commit_after>
|
import time
import redis
def connect_to_redis(*args, **kwargs):
client = redis.Redis(
*args,
unix_socket_path='redis/redis.sock',
decode_responses=True,
**kwargs
)
while True:
try:
assert client.ping()
except (redis.exceptions.ConnectionError, AssertionError):
time.sleep(0.5)
else:
return client
|
import time
import redis
def connect_to_redis(*args, **kwargs):
client = redis.Redis(*args, unix_socket_path='redis/redis.sock', **kwargs)
while True:
try:
assert client.ping()
except (redis.exceptions.ConnectionError, AssertionError):
time.sleep(0.5)
else:
return client
Convert Redis keys and values to strimport time
import redis
def connect_to_redis(*args, **kwargs):
client = redis.Redis(
*args,
unix_socket_path='redis/redis.sock',
decode_responses=True,
**kwargs
)
while True:
try:
assert client.ping()
except (redis.exceptions.ConnectionError, AssertionError):
time.sleep(0.5)
else:
return client
|
<commit_before>import time
import redis
def connect_to_redis(*args, **kwargs):
client = redis.Redis(*args, unix_socket_path='redis/redis.sock', **kwargs)
while True:
try:
assert client.ping()
except (redis.exceptions.ConnectionError, AssertionError):
time.sleep(0.5)
else:
return client
<commit_msg>Convert Redis keys and values to str<commit_after>import time
import redis
def connect_to_redis(*args, **kwargs):
client = redis.Redis(
*args,
unix_socket_path='redis/redis.sock',
decode_responses=True,
**kwargs
)
while True:
try:
assert client.ping()
except (redis.exceptions.ConnectionError, AssertionError):
time.sleep(0.5)
else:
return client
|
2049dbe3f672041b7b0e93b0b444a6ebb47f723a
|
streak-podium/read.py
|
streak-podium/read.py
|
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
|
import logging
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
|
Add missing 'logging' module import
|
Add missing 'logging' module import
|
Python
|
mit
|
jollyra/hubot-streak-podium,jollyra/hubot-commit-streak,supermitch/streak-podium,supermitch/streak-podium,jollyra/hubot-streak-podium,jollyra/hubot-commit-streak
|
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
Add missing 'logging' module import
|
import logging
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
|
<commit_before>import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
<commit_msg>Add missing 'logging' module import<commit_after>
|
import logging
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
|
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
Add missing 'logging' module importimport logging
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
|
<commit_before>import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
<commit_msg>Add missing 'logging' module import<commit_after>import logging
import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
|
4d410dec85fc944717a6537e9eef2585a53159b6
|
python_logging_rabbitmq/formatters.py
|
python_logging_rabbitmq/formatters.py
|
# coding: utf-8
import logging
from socket import gethostname
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
del data[f]
return json.dumps(data)
|
# coding: utf-8
import logging
from socket import gethostname
from django.core.serializers.json import DjangoJSONEncoder
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
if f in data:
del data[f]
return json.dumps(data, cls=DjangoJSONEncoder)
|
Use DjangoJSONEncoder for JSON serialization
|
Use DjangoJSONEncoder for JSON serialization
|
Python
|
mit
|
albertomr86/python-logging-rabbitmq
|
# coding: utf-8
import logging
from socket import gethostname
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
del data[f]
return json.dumps(data)
Use DjangoJSONEncoder for JSON serialization
|
# coding: utf-8
import logging
from socket import gethostname
from django.core.serializers.json import DjangoJSONEncoder
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
if f in data:
del data[f]
return json.dumps(data, cls=DjangoJSONEncoder)
|
<commit_before># coding: utf-8
import logging
from socket import gethostname
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
del data[f]
return json.dumps(data)
<commit_msg>Use DjangoJSONEncoder for JSON serialization<commit_after>
|
# coding: utf-8
import logging
from socket import gethostname
from django.core.serializers.json import DjangoJSONEncoder
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
if f in data:
del data[f]
return json.dumps(data, cls=DjangoJSONEncoder)
|
# coding: utf-8
import logging
from socket import gethostname
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
del data[f]
return json.dumps(data)
Use DjangoJSONEncoder for JSON serialization# coding: utf-8
import logging
from socket import gethostname
from django.core.serializers.json import DjangoJSONEncoder
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
if f in data:
del data[f]
return json.dumps(data, cls=DjangoJSONEncoder)
|
<commit_before># coding: utf-8
import logging
from socket import gethostname
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
del data[f]
return json.dumps(data)
<commit_msg>Use DjangoJSONEncoder for JSON serialization<commit_after># coding: utf-8
import logging
from socket import gethostname
from django.core.serializers.json import DjangoJSONEncoder
from .compat import json, text_type
class JSONFormatter(logging.Formatter):
"""
Formatter to convert LogRecord into JSON.
Thanks to: https://github.com/lobziik/rlog
"""
def __init__(self, *args, **kwargs):
include = kwargs.pop('include', None)
exclude = kwargs.pop('exclude', None)
super().__init__(*args, **kwargs)
self.include = include
self.exclude = exclude
def format(self, record):
data = record.__dict__.copy()
if record.args:
msg = record.msg % record.args
else:
msg = record.msg
data.update(
host=gethostname(),
msg=msg,
args=tuple(text_type(arg) for arg in record.args)
)
if 'exc_info' in data and data['exc_info']:
data['exc_info'] = self.formatException(data['exc_info'])
if self.include:
data = {f: data[f] for f in self.include}
elif self.exclude:
for f in self.exclude:
if f in data:
del data[f]
return json.dumps(data, cls=DjangoJSONEncoder)
|
bce093df2bbcf12d8eec8f812408a0ea88521d10
|
squid_url_cleaner.py
|
squid_url_cleaner.py
|
#!/usr/bin/python
import sys
from url_cleaner import removeBlackListedParameters
while True:
line = sys.stdin.readline().strip()
urlList = line.split(' ')
urlInput = urlList[0]
newUrl = removeBlackListedParameters(urlInput)
sys.stdout.write('%s%s' % (newUrl, '\n'))
sys.stdout.flush()
|
#!/usr/bin/python
import sys
import signal
from url_cleaner import removeBlackListedParameters
def sig_handle(signal, frame):
sys.exit(0)
while True:
signal.signal(signal.SIGINT, sig_handle)
signal.signal(signal.SIGTERM, sig_handle)
try:
line = sys.stdin.readline().strip()
urlList = line.split()
urlInput = urlList[0]
newUrl = removeBlackListedParameters(urlInput)
sys.stdout.write('%s%s'.format(newUrl, '\n'))
sys.stdout.flush()
except Exception:
continue
|
Handle signals for daemon processes, removed deprecated python var sub
|
Handle signals for daemon processes, removed deprecated python var sub
|
Python
|
mit
|
Ladoo/url_cleaner
|
#!/usr/bin/python
import sys
from url_cleaner import removeBlackListedParameters
while True:
line = sys.stdin.readline().strip()
urlList = line.split(' ')
urlInput = urlList[0]
newUrl = removeBlackListedParameters(urlInput)
sys.stdout.write('%s%s' % (newUrl, '\n'))
sys.stdout.flush()
Handle signals for daemon processes, removed deprecated python var sub
|
#!/usr/bin/python
import sys
import signal
from url_cleaner import removeBlackListedParameters
def sig_handle(signal, frame):
sys.exit(0)
while True:
signal.signal(signal.SIGINT, sig_handle)
signal.signal(signal.SIGTERM, sig_handle)
try:
line = sys.stdin.readline().strip()
urlList = line.split()
urlInput = urlList[0]
newUrl = removeBlackListedParameters(urlInput)
sys.stdout.write('%s%s'.format(newUrl, '\n'))
sys.stdout.flush()
except Exception:
continue
|
<commit_before>#!/usr/bin/python
import sys
from url_cleaner import removeBlackListedParameters
while True:
line = sys.stdin.readline().strip()
urlList = line.split(' ')
urlInput = urlList[0]
newUrl = removeBlackListedParameters(urlInput)
sys.stdout.write('%s%s' % (newUrl, '\n'))
sys.stdout.flush()
<commit_msg>Handle signals for daemon processes, removed deprecated python var sub<commit_after>
|
#!/usr/bin/python
import sys
import signal
from url_cleaner import removeBlackListedParameters
def sig_handle(signal, frame):
sys.exit(0)
while True:
signal.signal(signal.SIGINT, sig_handle)
signal.signal(signal.SIGTERM, sig_handle)
try:
line = sys.stdin.readline().strip()
urlList = line.split()
urlInput = urlList[0]
newUrl = removeBlackListedParameters(urlInput)
sys.stdout.write('%s%s'.format(newUrl, '\n'))
sys.stdout.flush()
except Exception:
continue
|
#!/usr/bin/python
import sys
from url_cleaner import removeBlackListedParameters
while True:
line = sys.stdin.readline().strip()
urlList = line.split(' ')
urlInput = urlList[0]
newUrl = removeBlackListedParameters(urlInput)
sys.stdout.write('%s%s' % (newUrl, '\n'))
sys.stdout.flush()
Handle signals for daemon processes, removed deprecated python var sub#!/usr/bin/python
import sys
import signal
from url_cleaner import removeBlackListedParameters
def sig_handle(signal, frame):
sys.exit(0)
while True:
signal.signal(signal.SIGINT, sig_handle)
signal.signal(signal.SIGTERM, sig_handle)
try:
line = sys.stdin.readline().strip()
urlList = line.split()
urlInput = urlList[0]
newUrl = removeBlackListedParameters(urlInput)
sys.stdout.write('%s%s'.format(newUrl, '\n'))
sys.stdout.flush()
except Exception:
continue
|
<commit_before>#!/usr/bin/python
import sys
from url_cleaner import removeBlackListedParameters
while True:
line = sys.stdin.readline().strip()
urlList = line.split(' ')
urlInput = urlList[0]
newUrl = removeBlackListedParameters(urlInput)
sys.stdout.write('%s%s' % (newUrl, '\n'))
sys.stdout.flush()
<commit_msg>Handle signals for daemon processes, removed deprecated python var sub<commit_after>#!/usr/bin/python
import sys
import signal
from url_cleaner import removeBlackListedParameters
def sig_handle(signal, frame):
sys.exit(0)
while True:
signal.signal(signal.SIGINT, sig_handle)
signal.signal(signal.SIGTERM, sig_handle)
try:
line = sys.stdin.readline().strip()
urlList = line.split()
urlInput = urlList[0]
newUrl = removeBlackListedParameters(urlInput)
sys.stdout.write('%s%s'.format(newUrl, '\n'))
sys.stdout.flush()
except Exception:
continue
|
e71e42ec8b7ee80937a983a80db61f4e450fb764
|
tests/__init__.py
|
tests/__init__.py
|
from json import loads
from os import close, unlink
from tempfile import mkstemp
from unittest import TestCase
from cunhajacaiu import app
class FlaskTestCase(TestCase):
def setUp(self):
# set a test db
self.db_handler, self.db_path = mkstemp()
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + self.db_path
# set a testing app
app.config['TESTING'] = True
app.config['REQUESTS_CACHE_BACKEND'] = 'memory'
self.app = app.test_client()
def tearDown(self):
close(self.db_handler)
unlink(self.db_path)
class MockJsonNewsResponse:
@staticmethod
def json():
with open('tests/news.json') as file_handler:
return loads(file_handler.read())
@property
def status_code(self):
return 200
|
from json import loads
from os import close, unlink
from tempfile import mkstemp
from unittest import TestCase
from cunhajacaiu import app
class FlaskTestCase(TestCase):
def setUp(self):
# set a test db
self.db_handler, self.db_path = mkstemp()
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + self.db_path
# set a testing app
app.config['TESTING'] = True
app.config['REQUESTS_CACHE_BACKEND'] = 'memory'
self.app = app.test_client()
def tearDown(self):
close(self.db_handler)
unlink(self.db_path)
class MockJsonNewsResponse:
HTTP_STATUS_CODE = (500, 200)
COUNT = 0
@staticmethod
def json():
with open('tests/news.json') as file_handler:
return loads(file_handler.read())
@property
def status_code(self):
self.COUNT += 1
return self.HTTP_STATUS_CODE[self.COUNT - 1]
|
Test the only untested line
|
Test the only untested line
|
Python
|
mit
|
cuducos/cunhajacaiu,cuducos/cunhajacaiu,cuducos/cunhajacaiu
|
from json import loads
from os import close, unlink
from tempfile import mkstemp
from unittest import TestCase
from cunhajacaiu import app
class FlaskTestCase(TestCase):
def setUp(self):
# set a test db
self.db_handler, self.db_path = mkstemp()
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + self.db_path
# set a testing app
app.config['TESTING'] = True
app.config['REQUESTS_CACHE_BACKEND'] = 'memory'
self.app = app.test_client()
def tearDown(self):
close(self.db_handler)
unlink(self.db_path)
class MockJsonNewsResponse:
@staticmethod
def json():
with open('tests/news.json') as file_handler:
return loads(file_handler.read())
@property
def status_code(self):
return 200
Test the only untested line
|
from json import loads
from os import close, unlink
from tempfile import mkstemp
from unittest import TestCase
from cunhajacaiu import app
class FlaskTestCase(TestCase):
def setUp(self):
# set a test db
self.db_handler, self.db_path = mkstemp()
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + self.db_path
# set a testing app
app.config['TESTING'] = True
app.config['REQUESTS_CACHE_BACKEND'] = 'memory'
self.app = app.test_client()
def tearDown(self):
close(self.db_handler)
unlink(self.db_path)
class MockJsonNewsResponse:
HTTP_STATUS_CODE = (500, 200)
COUNT = 0
@staticmethod
def json():
with open('tests/news.json') as file_handler:
return loads(file_handler.read())
@property
def status_code(self):
self.COUNT += 1
return self.HTTP_STATUS_CODE[self.COUNT - 1]
|
<commit_before>from json import loads
from os import close, unlink
from tempfile import mkstemp
from unittest import TestCase
from cunhajacaiu import app
class FlaskTestCase(TestCase):
def setUp(self):
# set a test db
self.db_handler, self.db_path = mkstemp()
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + self.db_path
# set a testing app
app.config['TESTING'] = True
app.config['REQUESTS_CACHE_BACKEND'] = 'memory'
self.app = app.test_client()
def tearDown(self):
close(self.db_handler)
unlink(self.db_path)
class MockJsonNewsResponse:
@staticmethod
def json():
with open('tests/news.json') as file_handler:
return loads(file_handler.read())
@property
def status_code(self):
return 200
<commit_msg>Test the only untested line<commit_after>
|
from json import loads
from os import close, unlink
from tempfile import mkstemp
from unittest import TestCase
from cunhajacaiu import app
class FlaskTestCase(TestCase):
def setUp(self):
# set a test db
self.db_handler, self.db_path = mkstemp()
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + self.db_path
# set a testing app
app.config['TESTING'] = True
app.config['REQUESTS_CACHE_BACKEND'] = 'memory'
self.app = app.test_client()
def tearDown(self):
close(self.db_handler)
unlink(self.db_path)
class MockJsonNewsResponse:
HTTP_STATUS_CODE = (500, 200)
COUNT = 0
@staticmethod
def json():
with open('tests/news.json') as file_handler:
return loads(file_handler.read())
@property
def status_code(self):
self.COUNT += 1
return self.HTTP_STATUS_CODE[self.COUNT - 1]
|
from json import loads
from os import close, unlink
from tempfile import mkstemp
from unittest import TestCase
from cunhajacaiu import app
class FlaskTestCase(TestCase):
def setUp(self):
# set a test db
self.db_handler, self.db_path = mkstemp()
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + self.db_path
# set a testing app
app.config['TESTING'] = True
app.config['REQUESTS_CACHE_BACKEND'] = 'memory'
self.app = app.test_client()
def tearDown(self):
close(self.db_handler)
unlink(self.db_path)
class MockJsonNewsResponse:
@staticmethod
def json():
with open('tests/news.json') as file_handler:
return loads(file_handler.read())
@property
def status_code(self):
return 200
Test the only untested linefrom json import loads
from os import close, unlink
from tempfile import mkstemp
from unittest import TestCase
from cunhajacaiu import app
class FlaskTestCase(TestCase):
def setUp(self):
# set a test db
self.db_handler, self.db_path = mkstemp()
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + self.db_path
# set a testing app
app.config['TESTING'] = True
app.config['REQUESTS_CACHE_BACKEND'] = 'memory'
self.app = app.test_client()
def tearDown(self):
close(self.db_handler)
unlink(self.db_path)
class MockJsonNewsResponse:
HTTP_STATUS_CODE = (500, 200)
COUNT = 0
@staticmethod
def json():
with open('tests/news.json') as file_handler:
return loads(file_handler.read())
@property
def status_code(self):
self.COUNT += 1
return self.HTTP_STATUS_CODE[self.COUNT - 1]
|
<commit_before>from json import loads
from os import close, unlink
from tempfile import mkstemp
from unittest import TestCase
from cunhajacaiu import app
class FlaskTestCase(TestCase):
def setUp(self):
# set a test db
self.db_handler, self.db_path = mkstemp()
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + self.db_path
# set a testing app
app.config['TESTING'] = True
app.config['REQUESTS_CACHE_BACKEND'] = 'memory'
self.app = app.test_client()
def tearDown(self):
close(self.db_handler)
unlink(self.db_path)
class MockJsonNewsResponse:
@staticmethod
def json():
with open('tests/news.json') as file_handler:
return loads(file_handler.read())
@property
def status_code(self):
return 200
<commit_msg>Test the only untested line<commit_after>from json import loads
from os import close, unlink
from tempfile import mkstemp
from unittest import TestCase
from cunhajacaiu import app
class FlaskTestCase(TestCase):
def setUp(self):
# set a test db
self.db_handler, self.db_path = mkstemp()
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + self.db_path
# set a testing app
app.config['TESTING'] = True
app.config['REQUESTS_CACHE_BACKEND'] = 'memory'
self.app = app.test_client()
def tearDown(self):
close(self.db_handler)
unlink(self.db_path)
class MockJsonNewsResponse:
HTTP_STATUS_CODE = (500, 200)
COUNT = 0
@staticmethod
def json():
with open('tests/news.json') as file_handler:
return loads(file_handler.read())
@property
def status_code(self):
self.COUNT += 1
return self.HTTP_STATUS_CODE[self.COUNT - 1]
|
d773b01721ab090021139fb9a9397cddd89bd487
|
tests/conftest.py
|
tests/conftest.py
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# REV - This has no effect - http://stackoverflow.com/q/18558666/656912
def pytest_report_header(config):
return "Testing Enigma functionality"
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
from crypto_enigma import __version__
def pytest_report_header(config):
return "version: {}".format(__version__)
|
Add logging of tested package version
|
Add logging of tested package version
|
Python
|
bsd-3-clause
|
orome/crypto-enigma-py
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# REV - This has no effect - http://stackoverflow.com/q/18558666/656912
def pytest_report_header(config):
return "Testing Enigma functionality"
Add logging of tested package version
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
from crypto_enigma import __version__
def pytest_report_header(config):
return "version: {}".format(__version__)
|
<commit_before>#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# REV - This has no effect - http://stackoverflow.com/q/18558666/656912
def pytest_report_header(config):
return "Testing Enigma functionality"
<commit_msg>Add logging of tested package version<commit_after>
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
from crypto_enigma import __version__
def pytest_report_header(config):
return "version: {}".format(__version__)
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# REV - This has no effect - http://stackoverflow.com/q/18558666/656912
def pytest_report_header(config):
return "Testing Enigma functionality"
Add logging of tested package version#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
from crypto_enigma import __version__
def pytest_report_header(config):
return "version: {}".format(__version__)
|
<commit_before>#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# REV - This has no effect - http://stackoverflow.com/q/18558666/656912
def pytest_report_header(config):
return "Testing Enigma functionality"
<commit_msg>Add logging of tested package version<commit_after>#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
from crypto_enigma import __version__
def pytest_report_header(config):
return "version: {}".format(__version__)
|
c2731d22adbf2abc29d73f5759d5d9f0fa124f5f
|
tests/fixtures.py
|
tests/fixtures.py
|
from . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
name = uuid(8)
a = shotgun.create('Task', {'content': name})
trigger_poll()
b = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertSameEntity(a, b)
name += '-2'
shotgun.update('Task', a['id'], {'content': name})
trigger_poll()
c = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertEqual(c['content'], name)
shotgun.delete('Task', a['id'])
trigger_poll()
d = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertIs(d, None)
|
from . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
shot_name = uuid(8)
shot = shotgun.create('Shot', {'code': shot_name})
name = uuid(8)
task = shotgun.create('Task', {'content': name, 'entity': shot})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertSameEntity(task, x)
# entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity'])
self.assertSameEntity(shot, x['entity'])
# return through entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity.Shot.code'])
self.assertEqual(shot_name, x['entity.Shot.code'])
# Updates
name += '-2'
shotgun.update('Task', task['id'], {'content': name})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertEqual(x['content'], name)
# Delete
shotgun.delete('Task', task['id'])
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertIs(x, None)
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'], retired_only=True)
self.assertSameEntity(task, x)
|
Add entity link to basic crud tests
|
Add entity link to basic crud tests
|
Python
|
bsd-3-clause
|
westernx/sgcache,westernx/sgcache
|
from . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
name = uuid(8)
a = shotgun.create('Task', {'content': name})
trigger_poll()
b = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertSameEntity(a, b)
name += '-2'
shotgun.update('Task', a['id'], {'content': name})
trigger_poll()
c = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertEqual(c['content'], name)
shotgun.delete('Task', a['id'])
trigger_poll()
d = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertIs(d, None)
Add entity link to basic crud tests
|
from . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
shot_name = uuid(8)
shot = shotgun.create('Shot', {'code': shot_name})
name = uuid(8)
task = shotgun.create('Task', {'content': name, 'entity': shot})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertSameEntity(task, x)
# entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity'])
self.assertSameEntity(shot, x['entity'])
# return through entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity.Shot.code'])
self.assertEqual(shot_name, x['entity.Shot.code'])
# Updates
name += '-2'
shotgun.update('Task', task['id'], {'content': name})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertEqual(x['content'], name)
# Delete
shotgun.delete('Task', task['id'])
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertIs(x, None)
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'], retired_only=True)
self.assertSameEntity(task, x)
|
<commit_before>from . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
name = uuid(8)
a = shotgun.create('Task', {'content': name})
trigger_poll()
b = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertSameEntity(a, b)
name += '-2'
shotgun.update('Task', a['id'], {'content': name})
trigger_poll()
c = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertEqual(c['content'], name)
shotgun.delete('Task', a['id'])
trigger_poll()
d = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertIs(d, None)
<commit_msg>Add entity link to basic crud tests<commit_after>
|
from . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
shot_name = uuid(8)
shot = shotgun.create('Shot', {'code': shot_name})
name = uuid(8)
task = shotgun.create('Task', {'content': name, 'entity': shot})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertSameEntity(task, x)
# entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity'])
self.assertSameEntity(shot, x['entity'])
# return through entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity.Shot.code'])
self.assertEqual(shot_name, x['entity.Shot.code'])
# Updates
name += '-2'
shotgun.update('Task', task['id'], {'content': name})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertEqual(x['content'], name)
# Delete
shotgun.delete('Task', task['id'])
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertIs(x, None)
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'], retired_only=True)
self.assertSameEntity(task, x)
|
from . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
name = uuid(8)
a = shotgun.create('Task', {'content': name})
trigger_poll()
b = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertSameEntity(a, b)
name += '-2'
shotgun.update('Task', a['id'], {'content': name})
trigger_poll()
c = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertEqual(c['content'], name)
shotgun.delete('Task', a['id'])
trigger_poll()
d = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertIs(d, None)
Add entity link to basic crud testsfrom . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
shot_name = uuid(8)
shot = shotgun.create('Shot', {'code': shot_name})
name = uuid(8)
task = shotgun.create('Task', {'content': name, 'entity': shot})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertSameEntity(task, x)
# entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity'])
self.assertSameEntity(shot, x['entity'])
# return through entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity.Shot.code'])
self.assertEqual(shot_name, x['entity.Shot.code'])
# Updates
name += '-2'
shotgun.update('Task', task['id'], {'content': name})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertEqual(x['content'], name)
# Delete
shotgun.delete('Task', task['id'])
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertIs(x, None)
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'], retired_only=True)
self.assertSameEntity(task, x)
|
<commit_before>from . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
name = uuid(8)
a = shotgun.create('Task', {'content': name})
trigger_poll()
b = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertSameEntity(a, b)
name += '-2'
shotgun.update('Task', a['id'], {'content': name})
trigger_poll()
c = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertEqual(c['content'], name)
shotgun.delete('Task', a['id'])
trigger_poll()
d = self.cached.find_one('Task', [('id', 'is', a['id'])], ['content'])
self.assertIs(d, None)
<commit_msg>Add entity link to basic crud tests<commit_after>from . import uuid
def task_crud(self, shotgun, trigger_poll=lambda: None):
shot_name = uuid(8)
shot = shotgun.create('Shot', {'code': shot_name})
name = uuid(8)
task = shotgun.create('Task', {'content': name, 'entity': shot})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertSameEntity(task, x)
# entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity'])
self.assertSameEntity(shot, x['entity'])
# return through entity field
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['entity.Shot.code'])
self.assertEqual(shot_name, x['entity.Shot.code'])
# Updates
name += '-2'
shotgun.update('Task', task['id'], {'content': name})
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertEqual(x['content'], name)
# Delete
shotgun.delete('Task', task['id'])
trigger_poll()
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'])
self.assertIs(x, None)
x = self.cached.find_one('Task', [('id', 'is', task['id'])], ['content'], retired_only=True)
self.assertSameEntity(task, x)
|
85b94f0d9caef0b1d22763371b1279ae2f433944
|
pyinfra_cli/__main__.py
|
pyinfra_cli/__main__.py
|
import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal_handler(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
|
import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
try:
# Kill any greenlets on ctrl+c
gevent.signal_handler(signal.SIGINT, gevent.kill)
except AttributeError:
# Legacy (gevent <1.2) support
gevent.signal(signal.SIGINT, gevent.kill)
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
|
Fix support for older gevent versions.
|
Fix support for older gevent versions.
Gevent 1.5 removed the `gevent.signal` alias, but some older versions
do not have the new `signal_handler` function.
|
Python
|
mit
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal_handler(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
Fix support for older gevent versions.
Gevent 1.5 removed the `gevent.signal` alias, but some older versions
do not have the new `signal_handler` function.
|
import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
try:
# Kill any greenlets on ctrl+c
gevent.signal_handler(signal.SIGINT, gevent.kill)
except AttributeError:
# Legacy (gevent <1.2) support
gevent.signal(signal.SIGINT, gevent.kill)
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
|
<commit_before>import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal_handler(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
<commit_msg>Fix support for older gevent versions.
Gevent 1.5 removed the `gevent.signal` alias, but some older versions
do not have the new `signal_handler` function.<commit_after>
|
import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
try:
# Kill any greenlets on ctrl+c
gevent.signal_handler(signal.SIGINT, gevent.kill)
except AttributeError:
# Legacy (gevent <1.2) support
gevent.signal(signal.SIGINT, gevent.kill)
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
|
import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal_handler(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
Fix support for older gevent versions.
Gevent 1.5 removed the `gevent.signal` alias, but some older versions
do not have the new `signal_handler` function.import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
try:
# Kill any greenlets on ctrl+c
gevent.signal_handler(signal.SIGINT, gevent.kill)
except AttributeError:
# Legacy (gevent <1.2) support
gevent.signal(signal.SIGINT, gevent.kill)
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
|
<commit_before>import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal_handler(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
<commit_msg>Fix support for older gevent versions.
Gevent 1.5 removed the `gevent.signal` alias, but some older versions
do not have the new `signal_handler` function.<commit_after>import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
try:
# Kill any greenlets on ctrl+c
gevent.signal_handler(signal.SIGINT, gevent.kill)
except AttributeError:
# Legacy (gevent <1.2) support
gevent.signal(signal.SIGINT, gevent.kill)
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
|
90132a3e4f9a0a251d9d1738703e6e927a0e23af
|
pytest_pipeline/utils.py
|
pytest_pipeline/utils.py
|
# -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
|
# -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, blocksize=65536, encoding="utf-8"):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, "rb") as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
|
Use 'rb' mode explicitly in file_md5sum and allow for custom encoding
|
Use 'rb' mode explicitly in file_md5sum and allow for custom encoding
|
Python
|
bsd-3-clause
|
bow/pytest-pipeline
|
# -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
Use 'rb' mode explicitly in file_md5sum and allow for custom encoding
|
# -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, blocksize=65536, encoding="utf-8"):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, "rb") as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
|
<commit_before># -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
<commit_msg>Use 'rb' mode explicitly in file_md5sum and allow for custom encoding<commit_after>
|
# -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, blocksize=65536, encoding="utf-8"):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, "rb") as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
|
# -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
Use 'rb' mode explicitly in file_md5sum and allow for custom encoding# -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, blocksize=65536, encoding="utf-8"):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, "rb") as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
|
<commit_before># -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, mode="r", blocksize=65536):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, mode) as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
<commit_msg>Use 'rb' mode explicitly in file_md5sum and allow for custom encoding<commit_after># -*- coding: utf-8 -*-
"""
pytest_pipeline.utils
~~~~~~~~~~~~~~~~~~~~~
General utilities.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
import gzip
import hashlib
import os
def file_md5sum(fname, unzip=False, blocksize=65536, encoding="utf-8"):
if unzip:
opener = gzip.open
else:
opener = open
hasher = hashlib.md5()
with opener(fname, "rb") as src:
buf = src.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = src.read(blocksize)
return hasher.hexdigest()
def isexecfile(fname):
return os.path.isfile(fname) and os.access(fname, os.X_OK)
def which(program):
# can not do anything meaningful without PATH
if "PATH" not in os.environ:
return
for possible in os.environ["PATH"].split(":"):
qualname = os.path.join(possible, program)
if isexecfile(qualname):
return qualname
return
|
bc16915aa3c4a7cef456da4193bdcdc34117eab0
|
tests/test_classes.py
|
tests/test_classes.py
|
import unittest
import os
import gzip
import bs4
import logging
from classes import (
NbaTeam
)
class MockRequests:
def get(self, url):
pass
class TestNbaTeamPage(unittest.TestCase):
# read html file and ungzip
@classmethod
def setUpClass(cls):
requester = MockRequests()
# file_path = os.path.join(os.path.dirname(__file__), 'mock_data/nba_roster_lakers.htm.gz')
# f = gzip.open(file_path)
# content = f.read()
cls.nba_team = NbaTeam('okc', requester, bs4)
cls.roster_text = content
def test_get_page(self):
team_page = self.nba_team.get_page(self.nba_team.url)
self.assertFalse(self.nba_team.failed)
if __name__ == '__main__':
unittest.main()
|
import unittest
import os
import gzip
import bs4
import logging
from classes import (
NbaTeam
)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
class MockRequests:
def get(self, url):
pass
class TestNbaTeamPage(unittest.TestCase):
# read html file and ungzip
@classmethod
def setUpClass(cls):
file_path = os.path.join(os.path.dirname(__file__), 'mock_data/nba_roster_lakers.htm.gz')
cls.roster_text = gzip.open(file_path).read()
cls.requester = MockRequests()
@classmethod
def setUp(cls):
cls.nba_team = NbaTeam('okc', cls.requester, bs4)
cls.parsed = cls.nba_team.convert_page(cls.roster_text)
def test_get_page_should_not_fail(self):
team_page = self.nba_team.get_page(self.nba_team.url)
self.assertFalse(self.nba_team.failed)
def test_convert_page_should_not_fail(self):
parsed_page = self.nba_team.convert_page(self.roster_text)
self.assertFalse(self.nba_team.failed)
def test_parse_roster_should_return_player_ids(self):
expected = ['5383', '4285', '5357', '3824', '5329', '5601', '4794', '5487', '5762',
'5318', '5011', '5433', '3339', '4294', '5663']
player_ids = self.nba_team.parse_roster(self.parsed)
self.assertEqual(expected, player_ids)
if __name__ == '__main__':
unittest.main()
|
Add more NbaTeam class tests
|
Add more NbaTeam class tests
|
Python
|
mit
|
arosenberg01/asdata
|
import unittest
import os
import gzip
import bs4
import logging
from classes import (
NbaTeam
)
class MockRequests:
def get(self, url):
pass
class TestNbaTeamPage(unittest.TestCase):
# read html file and ungzip
@classmethod
def setUpClass(cls):
requester = MockRequests()
# file_path = os.path.join(os.path.dirname(__file__), 'mock_data/nba_roster_lakers.htm.gz')
# f = gzip.open(file_path)
# content = f.read()
cls.nba_team = NbaTeam('okc', requester, bs4)
cls.roster_text = content
def test_get_page(self):
team_page = self.nba_team.get_page(self.nba_team.url)
self.assertFalse(self.nba_team.failed)
if __name__ == '__main__':
unittest.main()Add more NbaTeam class tests
|
import unittest
import os
import gzip
import bs4
import logging
from classes import (
NbaTeam
)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
class MockRequests:
def get(self, url):
pass
class TestNbaTeamPage(unittest.TestCase):
# read html file and ungzip
@classmethod
def setUpClass(cls):
file_path = os.path.join(os.path.dirname(__file__), 'mock_data/nba_roster_lakers.htm.gz')
cls.roster_text = gzip.open(file_path).read()
cls.requester = MockRequests()
@classmethod
def setUp(cls):
cls.nba_team = NbaTeam('okc', cls.requester, bs4)
cls.parsed = cls.nba_team.convert_page(cls.roster_text)
def test_get_page_should_not_fail(self):
team_page = self.nba_team.get_page(self.nba_team.url)
self.assertFalse(self.nba_team.failed)
def test_convert_page_should_not_fail(self):
parsed_page = self.nba_team.convert_page(self.roster_text)
self.assertFalse(self.nba_team.failed)
def test_parse_roster_should_return_player_ids(self):
expected = ['5383', '4285', '5357', '3824', '5329', '5601', '4794', '5487', '5762',
'5318', '5011', '5433', '3339', '4294', '5663']
player_ids = self.nba_team.parse_roster(self.parsed)
self.assertEqual(expected, player_ids)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import os
import gzip
import bs4
import logging
from classes import (
NbaTeam
)
class MockRequests:
def get(self, url):
pass
class TestNbaTeamPage(unittest.TestCase):
# read html file and ungzip
@classmethod
def setUpClass(cls):
requester = MockRequests()
# file_path = os.path.join(os.path.dirname(__file__), 'mock_data/nba_roster_lakers.htm.gz')
# f = gzip.open(file_path)
# content = f.read()
cls.nba_team = NbaTeam('okc', requester, bs4)
cls.roster_text = content
def test_get_page(self):
team_page = self.nba_team.get_page(self.nba_team.url)
self.assertFalse(self.nba_team.failed)
if __name__ == '__main__':
unittest.main()<commit_msg>Add more NbaTeam class tests<commit_after>
|
import unittest
import os
import gzip
import bs4
import logging
from classes import (
NbaTeam
)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
class MockRequests:
def get(self, url):
pass
class TestNbaTeamPage(unittest.TestCase):
# read html file and ungzip
@classmethod
def setUpClass(cls):
file_path = os.path.join(os.path.dirname(__file__), 'mock_data/nba_roster_lakers.htm.gz')
cls.roster_text = gzip.open(file_path).read()
cls.requester = MockRequests()
@classmethod
def setUp(cls):
cls.nba_team = NbaTeam('okc', cls.requester, bs4)
cls.parsed = cls.nba_team.convert_page(cls.roster_text)
def test_get_page_should_not_fail(self):
team_page = self.nba_team.get_page(self.nba_team.url)
self.assertFalse(self.nba_team.failed)
def test_convert_page_should_not_fail(self):
parsed_page = self.nba_team.convert_page(self.roster_text)
self.assertFalse(self.nba_team.failed)
def test_parse_roster_should_return_player_ids(self):
expected = ['5383', '4285', '5357', '3824', '5329', '5601', '4794', '5487', '5762',
'5318', '5011', '5433', '3339', '4294', '5663']
player_ids = self.nba_team.parse_roster(self.parsed)
self.assertEqual(expected, player_ids)
if __name__ == '__main__':
unittest.main()
|
import unittest
import os
import gzip
import bs4
import logging
from classes import (
NbaTeam
)
class MockRequests:
def get(self, url):
pass
class TestNbaTeamPage(unittest.TestCase):
# read html file and ungzip
@classmethod
def setUpClass(cls):
requester = MockRequests()
# file_path = os.path.join(os.path.dirname(__file__), 'mock_data/nba_roster_lakers.htm.gz')
# f = gzip.open(file_path)
# content = f.read()
cls.nba_team = NbaTeam('okc', requester, bs4)
cls.roster_text = content
def test_get_page(self):
team_page = self.nba_team.get_page(self.nba_team.url)
self.assertFalse(self.nba_team.failed)
if __name__ == '__main__':
unittest.main()Add more NbaTeam class testsimport unittest
import os
import gzip
import bs4
import logging
from classes import (
NbaTeam
)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
class MockRequests:
def get(self, url):
pass
class TestNbaTeamPage(unittest.TestCase):
# read html file and ungzip
@classmethod
def setUpClass(cls):
file_path = os.path.join(os.path.dirname(__file__), 'mock_data/nba_roster_lakers.htm.gz')
cls.roster_text = gzip.open(file_path).read()
cls.requester = MockRequests()
@classmethod
def setUp(cls):
cls.nba_team = NbaTeam('okc', cls.requester, bs4)
cls.parsed = cls.nba_team.convert_page(cls.roster_text)
def test_get_page_should_not_fail(self):
team_page = self.nba_team.get_page(self.nba_team.url)
self.assertFalse(self.nba_team.failed)
def test_convert_page_should_not_fail(self):
parsed_page = self.nba_team.convert_page(self.roster_text)
self.assertFalse(self.nba_team.failed)
def test_parse_roster_should_return_player_ids(self):
expected = ['5383', '4285', '5357', '3824', '5329', '5601', '4794', '5487', '5762',
'5318', '5011', '5433', '3339', '4294', '5663']
player_ids = self.nba_team.parse_roster(self.parsed)
self.assertEqual(expected, player_ids)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import os
import gzip
import bs4
import logging
from classes import (
NbaTeam
)
class MockRequests:
def get(self, url):
pass
class TestNbaTeamPage(unittest.TestCase):
# read html file and ungzip
@classmethod
def setUpClass(cls):
requester = MockRequests()
# file_path = os.path.join(os.path.dirname(__file__), 'mock_data/nba_roster_lakers.htm.gz')
# f = gzip.open(file_path)
# content = f.read()
cls.nba_team = NbaTeam('okc', requester, bs4)
cls.roster_text = content
def test_get_page(self):
team_page = self.nba_team.get_page(self.nba_team.url)
self.assertFalse(self.nba_team.failed)
if __name__ == '__main__':
unittest.main()<commit_msg>Add more NbaTeam class tests<commit_after>import unittest
import os
import gzip
import bs4
import logging
from classes import (
NbaTeam
)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
class MockRequests:
def get(self, url):
pass
class TestNbaTeamPage(unittest.TestCase):
# read html file and ungzip
@classmethod
def setUpClass(cls):
file_path = os.path.join(os.path.dirname(__file__), 'mock_data/nba_roster_lakers.htm.gz')
cls.roster_text = gzip.open(file_path).read()
cls.requester = MockRequests()
@classmethod
def setUp(cls):
cls.nba_team = NbaTeam('okc', cls.requester, bs4)
cls.parsed = cls.nba_team.convert_page(cls.roster_text)
def test_get_page_should_not_fail(self):
team_page = self.nba_team.get_page(self.nba_team.url)
self.assertFalse(self.nba_team.failed)
def test_convert_page_should_not_fail(self):
parsed_page = self.nba_team.convert_page(self.roster_text)
self.assertFalse(self.nba_team.failed)
def test_parse_roster_should_return_player_ids(self):
expected = ['5383', '4285', '5357', '3824', '5329', '5601', '4794', '5487', '5762',
'5318', '5011', '5433', '3339', '4294', '5663']
player_ids = self.nba_team.parse_roster(self.parsed)
self.assertEqual(expected, player_ids)
if __name__ == '__main__':
unittest.main()
|
29baa0a57fe49c790d4ef5dcdde1e744fc83efde
|
boundary/alarm_create.py
|
boundary/alarm_create.py
|
#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import AlarmModify
class AlarmCreate(AlarmModify):
def __init__(self, **kwargs):
AlarmModify.__init__(self, False)
self._kwargs = kwargs
self.method = "POST"
self._alarm_result = None
def add_arguments(self):
self.parser.add_argument('-n', '--alarm-name', dest='alarm_name', action='store', required=True,
metavar='alarm_name', help='Name of the alarm')
AlarmModify.add_arguments(self)
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
AlarmModify.get_arguments(self)
def get_description(self):
return 'Creates an alarm definition in an {0} account'.format(self.product_name)
def get_api_parameters(self):
AlarmModify.get_api_parameters(self)
self.path = 'v1/alarms'
|
#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import AlarmModify
class AlarmCreate(AlarmModify):
def __init__(self, **kwargs):
AlarmModify.__init__(self, False)
self._kwargs = kwargs
self.method = "POST"
self._alarm_result = None
def add_arguments(self):
self.parser.add_argument('-n', '--alarm-name', dest='alarm_name', action='store', required=True,
metavar='alarm_name', help='Name of the alarm')
AlarmModify.add_arguments(self)
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
AlarmModify.get_arguments(self)
def get_description(self):
return 'Creates an alarm definition in an {0} account'.format(self.product_name)
|
Remove no needed to duplicate parent behaviour
|
Remove no needed to duplicate parent behaviour
|
Python
|
apache-2.0
|
boundary/pulse-api-cli,jdgwartney/boundary-api-cli,boundary/boundary-api-cli,jdgwartney/pulse-api-cli,boundary/pulse-api-cli,jdgwartney/pulse-api-cli,jdgwartney/boundary-api-cli,boundary/boundary-api-cli
|
#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import AlarmModify
class AlarmCreate(AlarmModify):
def __init__(self, **kwargs):
AlarmModify.__init__(self, False)
self._kwargs = kwargs
self.method = "POST"
self._alarm_result = None
def add_arguments(self):
self.parser.add_argument('-n', '--alarm-name', dest='alarm_name', action='store', required=True,
metavar='alarm_name', help='Name of the alarm')
AlarmModify.add_arguments(self)
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
AlarmModify.get_arguments(self)
def get_description(self):
return 'Creates an alarm definition in an {0} account'.format(self.product_name)
def get_api_parameters(self):
AlarmModify.get_api_parameters(self)
self.path = 'v1/alarms'
Remove no needed to duplicate parent behaviour
|
#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import AlarmModify
class AlarmCreate(AlarmModify):
def __init__(self, **kwargs):
AlarmModify.__init__(self, False)
self._kwargs = kwargs
self.method = "POST"
self._alarm_result = None
def add_arguments(self):
self.parser.add_argument('-n', '--alarm-name', dest='alarm_name', action='store', required=True,
metavar='alarm_name', help='Name of the alarm')
AlarmModify.add_arguments(self)
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
AlarmModify.get_arguments(self)
def get_description(self):
return 'Creates an alarm definition in an {0} account'.format(self.product_name)
|
<commit_before>#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import AlarmModify
class AlarmCreate(AlarmModify):
def __init__(self, **kwargs):
AlarmModify.__init__(self, False)
self._kwargs = kwargs
self.method = "POST"
self._alarm_result = None
def add_arguments(self):
self.parser.add_argument('-n', '--alarm-name', dest='alarm_name', action='store', required=True,
metavar='alarm_name', help='Name of the alarm')
AlarmModify.add_arguments(self)
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
AlarmModify.get_arguments(self)
def get_description(self):
return 'Creates an alarm definition in an {0} account'.format(self.product_name)
def get_api_parameters(self):
AlarmModify.get_api_parameters(self)
self.path = 'v1/alarms'
<commit_msg>Remove no needed to duplicate parent behaviour<commit_after>
|
#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import AlarmModify
class AlarmCreate(AlarmModify):
def __init__(self, **kwargs):
AlarmModify.__init__(self, False)
self._kwargs = kwargs
self.method = "POST"
self._alarm_result = None
def add_arguments(self):
self.parser.add_argument('-n', '--alarm-name', dest='alarm_name', action='store', required=True,
metavar='alarm_name', help='Name of the alarm')
AlarmModify.add_arguments(self)
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
AlarmModify.get_arguments(self)
def get_description(self):
return 'Creates an alarm definition in an {0} account'.format(self.product_name)
|
#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import AlarmModify
class AlarmCreate(AlarmModify):
def __init__(self, **kwargs):
AlarmModify.__init__(self, False)
self._kwargs = kwargs
self.method = "POST"
self._alarm_result = None
def add_arguments(self):
self.parser.add_argument('-n', '--alarm-name', dest='alarm_name', action='store', required=True,
metavar='alarm_name', help='Name of the alarm')
AlarmModify.add_arguments(self)
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
AlarmModify.get_arguments(self)
def get_description(self):
return 'Creates an alarm definition in an {0} account'.format(self.product_name)
def get_api_parameters(self):
AlarmModify.get_api_parameters(self)
self.path = 'v1/alarms'
Remove no needed to duplicate parent behaviour#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import AlarmModify
class AlarmCreate(AlarmModify):
def __init__(self, **kwargs):
AlarmModify.__init__(self, False)
self._kwargs = kwargs
self.method = "POST"
self._alarm_result = None
def add_arguments(self):
self.parser.add_argument('-n', '--alarm-name', dest='alarm_name', action='store', required=True,
metavar='alarm_name', help='Name of the alarm')
AlarmModify.add_arguments(self)
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
AlarmModify.get_arguments(self)
def get_description(self):
return 'Creates an alarm definition in an {0} account'.format(self.product_name)
|
<commit_before>#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import AlarmModify
class AlarmCreate(AlarmModify):
def __init__(self, **kwargs):
AlarmModify.__init__(self, False)
self._kwargs = kwargs
self.method = "POST"
self._alarm_result = None
def add_arguments(self):
self.parser.add_argument('-n', '--alarm-name', dest='alarm_name', action='store', required=True,
metavar='alarm_name', help='Name of the alarm')
AlarmModify.add_arguments(self)
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
AlarmModify.get_arguments(self)
def get_description(self):
return 'Creates an alarm definition in an {0} account'.format(self.product_name)
def get_api_parameters(self):
AlarmModify.get_api_parameters(self)
self.path = 'v1/alarms'
<commit_msg>Remove no needed to duplicate parent behaviour<commit_after>#
# Copyright 2015 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import AlarmModify
class AlarmCreate(AlarmModify):
def __init__(self, **kwargs):
AlarmModify.__init__(self, False)
self._kwargs = kwargs
self.method = "POST"
self._alarm_result = None
def add_arguments(self):
self.parser.add_argument('-n', '--alarm-name', dest='alarm_name', action='store', required=True,
metavar='alarm_name', help='Name of the alarm')
AlarmModify.add_arguments(self)
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
AlarmModify.get_arguments(self)
def get_description(self):
return 'Creates an alarm definition in an {0} account'.format(self.product_name)
|
8d7b2597e73ca82e016e635fe0db840070b7bd7a
|
semillas_backend/users/serializers.py
|
semillas_backend/users/serializers.py
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
Add uuid to update user serializer
|
Add uuid to update user serializer
|
Python
|
mit
|
Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_platform,Semillas/semillas_backend
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
Add uuid to update user serializer
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
<commit_before>#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
<commit_msg>Add uuid to update user serializer<commit_after>
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
Add uuid to update user serializer#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
<commit_before>#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
<commit_msg>Add uuid to update user serializer<commit_after>#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
5bc3e6a3fb112b529f738142850860dd98a9d428
|
tests/runtests.py
|
tests/runtests.py
|
import glob
import os
import unittest
def build_test_suite():
suite = unittest.TestSuite()
for test_case in glob.glob('tests/test_*.py'):
modname = os.path.splitext(test_case)[0]
modname = modname.replace('/', '.')
module = __import__(modname, {}, {}, ['1'])
suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
return suite
if __name__ == "__main__":
suite = build_test_suite()
runner = unittest.TextTestRunner()
runner.run(suite)
|
import glob
import os
import unittest
import sys
def build_test_suite():
suite = unittest.TestSuite()
for test_case in glob.glob('tests/test_*.py'):
modname = os.path.splitext(test_case)[0]
modname = modname.replace('/', '.')
module = __import__(modname, {}, {}, ['1'])
suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
return suite
if __name__ == "__main__":
suite = build_test_suite()
runner = unittest.TextTestRunner()
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
|
Make unittest return exit code 1 on failure
|
Make unittest return exit code 1 on failure
This is to allow travis to catch test failures
|
Python
|
bsd-3-clause
|
jorgecarleitao/pyglet-gui
|
import glob
import os
import unittest
def build_test_suite():
suite = unittest.TestSuite()
for test_case in glob.glob('tests/test_*.py'):
modname = os.path.splitext(test_case)[0]
modname = modname.replace('/', '.')
module = __import__(modname, {}, {}, ['1'])
suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
return suite
if __name__ == "__main__":
suite = build_test_suite()
runner = unittest.TextTestRunner()
runner.run(suite)
Make unittest return exit code 1 on failure
This is to allow travis to catch test failures
|
import glob
import os
import unittest
import sys
def build_test_suite():
suite = unittest.TestSuite()
for test_case in glob.glob('tests/test_*.py'):
modname = os.path.splitext(test_case)[0]
modname = modname.replace('/', '.')
module = __import__(modname, {}, {}, ['1'])
suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
return suite
if __name__ == "__main__":
suite = build_test_suite()
runner = unittest.TextTestRunner()
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
|
<commit_before>import glob
import os
import unittest
def build_test_suite():
suite = unittest.TestSuite()
for test_case in glob.glob('tests/test_*.py'):
modname = os.path.splitext(test_case)[0]
modname = modname.replace('/', '.')
module = __import__(modname, {}, {}, ['1'])
suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
return suite
if __name__ == "__main__":
suite = build_test_suite()
runner = unittest.TextTestRunner()
runner.run(suite)
<commit_msg>Make unittest return exit code 1 on failure
This is to allow travis to catch test failures<commit_after>
|
import glob
import os
import unittest
import sys
def build_test_suite():
suite = unittest.TestSuite()
for test_case in glob.glob('tests/test_*.py'):
modname = os.path.splitext(test_case)[0]
modname = modname.replace('/', '.')
module = __import__(modname, {}, {}, ['1'])
suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
return suite
if __name__ == "__main__":
suite = build_test_suite()
runner = unittest.TextTestRunner()
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
|
import glob
import os
import unittest
def build_test_suite():
suite = unittest.TestSuite()
for test_case in glob.glob('tests/test_*.py'):
modname = os.path.splitext(test_case)[0]
modname = modname.replace('/', '.')
module = __import__(modname, {}, {}, ['1'])
suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
return suite
if __name__ == "__main__":
suite = build_test_suite()
runner = unittest.TextTestRunner()
runner.run(suite)
Make unittest return exit code 1 on failure
This is to allow travis to catch test failuresimport glob
import os
import unittest
import sys
def build_test_suite():
suite = unittest.TestSuite()
for test_case in glob.glob('tests/test_*.py'):
modname = os.path.splitext(test_case)[0]
modname = modname.replace('/', '.')
module = __import__(modname, {}, {}, ['1'])
suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
return suite
if __name__ == "__main__":
suite = build_test_suite()
runner = unittest.TextTestRunner()
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
|
<commit_before>import glob
import os
import unittest
def build_test_suite():
suite = unittest.TestSuite()
for test_case in glob.glob('tests/test_*.py'):
modname = os.path.splitext(test_case)[0]
modname = modname.replace('/', '.')
module = __import__(modname, {}, {}, ['1'])
suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
return suite
if __name__ == "__main__":
suite = build_test_suite()
runner = unittest.TextTestRunner()
runner.run(suite)
<commit_msg>Make unittest return exit code 1 on failure
This is to allow travis to catch test failures<commit_after>import glob
import os
import unittest
import sys
def build_test_suite():
suite = unittest.TestSuite()
for test_case in glob.glob('tests/test_*.py'):
modname = os.path.splitext(test_case)[0]
modname = modname.replace('/', '.')
module = __import__(modname, {}, {}, ['1'])
suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
return suite
if __name__ == "__main__":
suite = build_test_suite()
runner = unittest.TextTestRunner()
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
|
ecb3e8e9bea6388d2368fefdd24037f933a78dfe
|
tests/settings.py
|
tests/settings.py
|
"""Settings file for the Django project used for tests."""
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
PROJECT_NAME = 'project'
# Base paths.
ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT = os.path.join(ROOT, PROJECT_NAME)
# Django configuration.
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEBUG = TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.staticfiles',
'endless_pagination',
PROJECT_NAME,
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
SECRET_KEY = 'secret'
SITE_ID = 1
STATIC_ROOT = os.path.join(PROJECT, 'static')
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
PROJECT_NAME + '.context_processors.navbar',
PROJECT_NAME + '.context_processors.versions',
)
TEMPLATE_DIRS = os.path.join(PROJECT, 'templates')
# Testing.
NOSE_ARGS = (
'--verbosity=2',
'--with-coverage',
'--cover-package=endless_pagination',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
|
"""Settings file for the Django project used for tests."""
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
PROJECT_NAME = 'project'
# Base paths.
ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT = os.path.join(ROOT, PROJECT_NAME)
# Django configuration.
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEBUG = TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.staticfiles',
'endless_pagination',
PROJECT_NAME,
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
SECRET_KEY = os.getenv('ENDLESS_PAGINATION_SECRET_KEY', 'secret')
SITE_ID = 1
STATIC_ROOT = os.path.join(PROJECT, 'static')
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
PROJECT_NAME + '.context_processors.navbar',
PROJECT_NAME + '.context_processors.versions',
)
TEMPLATE_DIRS = os.path.join(PROJECT, 'templates')
# Testing.
NOSE_ARGS = (
'--verbosity=2',
'--with-coverage',
'--cover-package=endless_pagination',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
|
Support providing SECRET_KEY as environment variable.
|
Support providing SECRET_KEY as environment variable.
|
Python
|
mit
|
poswald/django-endless-pagination,poswald/django-endless-pagination,catalpainternational/django-endless-pagination,igorkramaric/django-endless-pagination,poswald/django-endless-pagination,kjefes/django-endless-pagination,igorkramaric/django-endless-pagination,catalpainternational/django-endless-pagination,catalpainternational/django-endless-pagination,suchanlee/django-endless-pagination,frankban/django-endless-pagination,poswald/django-endless-pagination,frankban/django-endless-pagination,suchanlee/django-endless-pagination,kjefes/django-endless-pagination,kjefes/django-endless-pagination,igorkramaric/django-endless-pagination,suchanlee/django-endless-pagination,kjefes/django-endless-pagination,catalpainternational/django-endless-pagination,igorkramaric/django-endless-pagination,frankban/django-endless-pagination
|
"""Settings file for the Django project used for tests."""
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
PROJECT_NAME = 'project'
# Base paths.
ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT = os.path.join(ROOT, PROJECT_NAME)
# Django configuration.
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEBUG = TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.staticfiles',
'endless_pagination',
PROJECT_NAME,
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
SECRET_KEY = 'secret'
SITE_ID = 1
STATIC_ROOT = os.path.join(PROJECT, 'static')
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
PROJECT_NAME + '.context_processors.navbar',
PROJECT_NAME + '.context_processors.versions',
)
TEMPLATE_DIRS = os.path.join(PROJECT, 'templates')
# Testing.
NOSE_ARGS = (
'--verbosity=2',
'--with-coverage',
'--cover-package=endless_pagination',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
Support providing SECRET_KEY as environment variable.
|
"""Settings file for the Django project used for tests."""
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
PROJECT_NAME = 'project'
# Base paths.
ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT = os.path.join(ROOT, PROJECT_NAME)
# Django configuration.
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEBUG = TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.staticfiles',
'endless_pagination',
PROJECT_NAME,
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
SECRET_KEY = os.getenv('ENDLESS_PAGINATION_SECRET_KEY', 'secret')
SITE_ID = 1
STATIC_ROOT = os.path.join(PROJECT, 'static')
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
PROJECT_NAME + '.context_processors.navbar',
PROJECT_NAME + '.context_processors.versions',
)
TEMPLATE_DIRS = os.path.join(PROJECT, 'templates')
# Testing.
NOSE_ARGS = (
'--verbosity=2',
'--with-coverage',
'--cover-package=endless_pagination',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
|
<commit_before>"""Settings file for the Django project used for tests."""
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
PROJECT_NAME = 'project'
# Base paths.
ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT = os.path.join(ROOT, PROJECT_NAME)
# Django configuration.
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEBUG = TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.staticfiles',
'endless_pagination',
PROJECT_NAME,
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
SECRET_KEY = 'secret'
SITE_ID = 1
STATIC_ROOT = os.path.join(PROJECT, 'static')
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
PROJECT_NAME + '.context_processors.navbar',
PROJECT_NAME + '.context_processors.versions',
)
TEMPLATE_DIRS = os.path.join(PROJECT, 'templates')
# Testing.
NOSE_ARGS = (
'--verbosity=2',
'--with-coverage',
'--cover-package=endless_pagination',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
<commit_msg>Support providing SECRET_KEY as environment variable.<commit_after>
|
"""Settings file for the Django project used for tests."""
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
PROJECT_NAME = 'project'
# Base paths.
ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT = os.path.join(ROOT, PROJECT_NAME)
# Django configuration.
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEBUG = TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.staticfiles',
'endless_pagination',
PROJECT_NAME,
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
SECRET_KEY = os.getenv('ENDLESS_PAGINATION_SECRET_KEY', 'secret')
SITE_ID = 1
STATIC_ROOT = os.path.join(PROJECT, 'static')
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
PROJECT_NAME + '.context_processors.navbar',
PROJECT_NAME + '.context_processors.versions',
)
TEMPLATE_DIRS = os.path.join(PROJECT, 'templates')
# Testing.
NOSE_ARGS = (
'--verbosity=2',
'--with-coverage',
'--cover-package=endless_pagination',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
|
"""Settings file for the Django project used for tests."""
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
PROJECT_NAME = 'project'
# Base paths.
ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT = os.path.join(ROOT, PROJECT_NAME)
# Django configuration.
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEBUG = TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.staticfiles',
'endless_pagination',
PROJECT_NAME,
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
SECRET_KEY = 'secret'
SITE_ID = 1
STATIC_ROOT = os.path.join(PROJECT, 'static')
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
PROJECT_NAME + '.context_processors.navbar',
PROJECT_NAME + '.context_processors.versions',
)
TEMPLATE_DIRS = os.path.join(PROJECT, 'templates')
# Testing.
NOSE_ARGS = (
'--verbosity=2',
'--with-coverage',
'--cover-package=endless_pagination',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
Support providing SECRET_KEY as environment variable."""Settings file for the Django project used for tests."""
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
PROJECT_NAME = 'project'
# Base paths.
ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT = os.path.join(ROOT, PROJECT_NAME)
# Django configuration.
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEBUG = TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.staticfiles',
'endless_pagination',
PROJECT_NAME,
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
SECRET_KEY = os.getenv('ENDLESS_PAGINATION_SECRET_KEY', 'secret')
SITE_ID = 1
STATIC_ROOT = os.path.join(PROJECT, 'static')
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
PROJECT_NAME + '.context_processors.navbar',
PROJECT_NAME + '.context_processors.versions',
)
TEMPLATE_DIRS = os.path.join(PROJECT, 'templates')
# Testing.
NOSE_ARGS = (
'--verbosity=2',
'--with-coverage',
'--cover-package=endless_pagination',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
|
<commit_before>"""Settings file for the Django project used for tests."""
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
PROJECT_NAME = 'project'
# Base paths.
ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT = os.path.join(ROOT, PROJECT_NAME)
# Django configuration.
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEBUG = TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.staticfiles',
'endless_pagination',
PROJECT_NAME,
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
SECRET_KEY = 'secret'
SITE_ID = 1
STATIC_ROOT = os.path.join(PROJECT, 'static')
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
PROJECT_NAME + '.context_processors.navbar',
PROJECT_NAME + '.context_processors.versions',
)
TEMPLATE_DIRS = os.path.join(PROJECT, 'templates')
# Testing.
NOSE_ARGS = (
'--verbosity=2',
'--with-coverage',
'--cover-package=endless_pagination',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
<commit_msg>Support providing SECRET_KEY as environment variable.<commit_after>"""Settings file for the Django project used for tests."""
import os
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
PROJECT_NAME = 'project'
# Base paths.
ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT = os.path.join(ROOT, PROJECT_NAME)
# Django configuration.
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEBUG = TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.staticfiles',
'endless_pagination',
PROJECT_NAME,
)
ROOT_URLCONF = PROJECT_NAME + '.urls'
SECRET_KEY = os.getenv('ENDLESS_PAGINATION_SECRET_KEY', 'secret')
SITE_ID = 1
STATIC_ROOT = os.path.join(PROJECT, 'static')
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
PROJECT_NAME + '.context_processors.navbar',
PROJECT_NAME + '.context_processors.versions',
)
TEMPLATE_DIRS = os.path.join(PROJECT, 'templates')
# Testing.
NOSE_ARGS = (
'--verbosity=2',
'--with-coverage',
'--cover-package=endless_pagination',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
|
d4412f8573dbfc1b06f2a298cc5c3042c6c468e6
|
tests/test_api.py
|
tests/test_api.py
|
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
|
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
tests_models = [x.model_name for x in self.api._resources['tests']]
self.assertNotIn('abstract', tests_models)
|
Test to see if abstract classes sneak in.
|
Test to see if abstract classes sneak in.
Now that get_models has been found to skip abstract classes, we want to test
for this in case this behaviour ever changes.
|
Python
|
bsd-3-clause
|
ainmosni/django-snooze,ainmosni/django-snooze
|
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
Test to see if abstract classes sneak in.
Now that get_models has been found to skip abstract classes, we want to test
for this in case this behaviour ever changes.
|
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
tests_models = [x.model_name for x in self.api._resources['tests']]
self.assertNotIn('abstract', tests_models)
|
<commit_before>from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
<commit_msg>Test to see if abstract classes sneak in.
Now that get_models has been found to skip abstract classes, we want to test
for this in case this behaviour ever changes.<commit_after>
|
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
tests_models = [x.model_name for x in self.api._resources['tests']]
self.assertNotIn('abstract', tests_models)
|
from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
Test to see if abstract classes sneak in.
Now that get_models has been found to skip abstract classes, we want to test
for this in case this behaviour ever changes.from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
tests_models = [x.model_name for x in self.api._resources['tests']]
self.assertNotIn('abstract', tests_models)
|
<commit_before>from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
<commit_msg>Test to see if abstract classes sneak in.
Now that get_models has been found to skip abstract classes, we want to test
for this in case this behaviour ever changes.<commit_after>from django.test import TestCase
from django_snooze import apis
class APITestCase(TestCase):
def setUp(self):
"""Sets up an API object to play with.
:returns: None
"""
self.api = apis.api
self.api.discover_models()
def test_apps(self):
"""Test if the right apps are present.
:returns: None
"""
self.assertIn('tests', self.api._resources.keys())
self.assertIn('auth', self.api._resources.keys())
tests_models = [x.model_name for x in self.api._resources['tests']]
self.assertNotIn('abstract', tests_models)
|
4a9f0f909abb955ca579b3abec7c6ffef83429af
|
cli_tests.py
|
cli_tests.py
|
import unittest
from unittest.mock import patch, call
from crc import main
class CliTests(unittest.TestCase):
def test_cli_no_arguments_provided(self):
expected_exit_code = -1
argv = []
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
def test_table_subcommand_with_no_additional_arguments(self):
expected_exit_code = -1
argv = ['table']
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python3
#
# Copyright (c) 2021, Nicola Coretti
# All rights reserved.
import unittest
from unittest.mock import patch, call
from crc import main
class CliTests(unittest.TestCase):
def test_cli_no_arguments_provided(self):
expected_exit_code = -1
argv = []
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
def test_table_subcommand_with_no_additional_arguments(self):
expected_exit_code = -1
argv = ['table']
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
if __name__ == '__main__':
unittest.main()
|
Add shebang to cli_tets.py module
|
Add shebang to cli_tets.py module
|
Python
|
bsd-2-clause
|
Nicoretti/crc
|
import unittest
from unittest.mock import patch, call
from crc import main
class CliTests(unittest.TestCase):
def test_cli_no_arguments_provided(self):
expected_exit_code = -1
argv = []
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
def test_table_subcommand_with_no_additional_arguments(self):
expected_exit_code = -1
argv = ['table']
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
if __name__ == '__main__':
unittest.main()
Add shebang to cli_tets.py module
|
#!/usr/bin/env python3
#
# Copyright (c) 2021, Nicola Coretti
# All rights reserved.
import unittest
from unittest.mock import patch, call
from crc import main
class CliTests(unittest.TestCase):
def test_cli_no_arguments_provided(self):
expected_exit_code = -1
argv = []
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
def test_table_subcommand_with_no_additional_arguments(self):
expected_exit_code = -1
argv = ['table']
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from unittest.mock import patch, call
from crc import main
class CliTests(unittest.TestCase):
def test_cli_no_arguments_provided(self):
expected_exit_code = -1
argv = []
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
def test_table_subcommand_with_no_additional_arguments(self):
expected_exit_code = -1
argv = ['table']
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
if __name__ == '__main__':
unittest.main()
<commit_msg>Add shebang to cli_tets.py module<commit_after>
|
#!/usr/bin/env python3
#
# Copyright (c) 2021, Nicola Coretti
# All rights reserved.
import unittest
from unittest.mock import patch, call
from crc import main
class CliTests(unittest.TestCase):
def test_cli_no_arguments_provided(self):
expected_exit_code = -1
argv = []
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
def test_table_subcommand_with_no_additional_arguments(self):
expected_exit_code = -1
argv = ['table']
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
if __name__ == '__main__':
unittest.main()
|
import unittest
from unittest.mock import patch, call
from crc import main
class CliTests(unittest.TestCase):
def test_cli_no_arguments_provided(self):
expected_exit_code = -1
argv = []
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
def test_table_subcommand_with_no_additional_arguments(self):
expected_exit_code = -1
argv = ['table']
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
if __name__ == '__main__':
unittest.main()
Add shebang to cli_tets.py module#!/usr/bin/env python3
#
# Copyright (c) 2021, Nicola Coretti
# All rights reserved.
import unittest
from unittest.mock import patch, call
from crc import main
class CliTests(unittest.TestCase):
def test_cli_no_arguments_provided(self):
expected_exit_code = -1
argv = []
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
def test_table_subcommand_with_no_additional_arguments(self):
expected_exit_code = -1
argv = ['table']
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from unittest.mock import patch, call
from crc import main
class CliTests(unittest.TestCase):
def test_cli_no_arguments_provided(self):
expected_exit_code = -1
argv = []
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
def test_table_subcommand_with_no_additional_arguments(self):
expected_exit_code = -1
argv = ['table']
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
if __name__ == '__main__':
unittest.main()
<commit_msg>Add shebang to cli_tets.py module<commit_after>#!/usr/bin/env python3
#
# Copyright (c) 2021, Nicola Coretti
# All rights reserved.
import unittest
from unittest.mock import patch, call
from crc import main
class CliTests(unittest.TestCase):
def test_cli_no_arguments_provided(self):
expected_exit_code = -1
argv = []
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
def test_table_subcommand_with_no_additional_arguments(self):
expected_exit_code = -1
argv = ['table']
with patch('sys.exit') as exit_mock:
main(argv)
self.assertTrue(exit_mock.called)
self.assertEqual(exit_mock.call_args, (call(expected_exit_code)))
if __name__ == '__main__':
unittest.main()
|
1a5583fdba626059e5481e6099b14b8988316dfe
|
server/superdesk/locators/__init__.py
|
server/superdesk/locators/__init__.py
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import os
def _load_json(file_path):
"""
Reads JSON string from the file located in file_path.
:param file_path: path of the file having JSON string.
:return: JSON Object
"""
with open(file_path, 'r') as f:
return json.load(f)
_dir_name = os.path.dirname(os.path.realpath(__file__))
_locators_file_path = os.path.join(_dir_name, 'data', 'locators.json')
locators = _load_json(_locators_file_path)
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import os
def _load_json(file_path):
"""
Reads JSON string from the file located in file_path.
:param file_path: path of the file having JSON string.
:return: JSON Object
"""
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
_dir_name = os.path.dirname(os.path.realpath(__file__))
_locators_file_path = os.path.join(_dir_name, 'data', 'locators.json')
locators = _load_json(_locators_file_path)
|
Fix locators reading on ubuntu
|
Fix locators reading on ubuntu
|
Python
|
agpl-3.0
|
thnkloud9/superdesk,superdesk/superdesk,marwoodandrew/superdesk-aap,ancafarcas/superdesk,ioanpocol/superdesk-ntb,gbbr/superdesk,liveblog/superdesk,plamut/superdesk,pavlovicnemanja92/superdesk,akintolga/superdesk,pavlovicnemanja92/superdesk,pavlovicnemanja/superdesk,verifiedpixel/superdesk,amagdas/superdesk,akintolga/superdesk-aap,petrjasek/superdesk,akintolga/superdesk,amagdas/superdesk,darconny/superdesk,plamut/superdesk,mdhaman/superdesk,ioanpocol/superdesk-ntb,marwoodandrew/superdesk,mugurrus/superdesk,plamut/superdesk,Aca-jov/superdesk,gbbr/superdesk,sivakuna-aap/superdesk,Aca-jov/superdesk,marwoodandrew/superdesk,mdhaman/superdesk-aap,petrjasek/superdesk-ntb,mdhaman/superdesk,ioanpocol/superdesk,hlmnrmr/superdesk,plamut/superdesk,verifiedpixel/superdesk,marwoodandrew/superdesk,liveblog/superdesk,superdesk/superdesk,pavlovicnemanja/superdesk,petrjasek/superdesk-ntb,petrjasek/superdesk-ntb,mdhaman/superdesk-aap,ancafarcas/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,hlmnrmr/superdesk,akintolga/superdesk-aap,marwoodandrew/superdesk-aap,petrjasek/superdesk,superdesk/superdesk-aap,pavlovicnemanja92/superdesk,darconny/superdesk,liveblog/superdesk,superdesk/superdesk,verifiedpixel/superdesk,amagdas/superdesk,petrjasek/superdesk,superdesk/superdesk-ntb,Aca-jov/superdesk,superdesk/superdesk,pavlovicnemanja92/superdesk,akintolga/superdesk,marwoodandrew/superdesk,akintolga/superdesk-aap,fritzSF/superdesk,superdesk/superdesk-aap,superdesk/superdesk-ntb,superdesk/superdesk-ntb,hlmnrmr/superdesk,ioanpocol/superdesk,ioanpocol/superdesk-ntb,fritzSF/superdesk,superdesk/superdesk-ntb,sjunaid/superdesk,pavlovicnemanja/superdesk,sivakuna-aap/superdesk,liveblog/superdesk,amagdas/superdesk,thnkloud9/superdesk,akintolga/superdesk,sjunaid/superdesk,marwoodandrew/superdesk-aap,superdesk/superdesk-aap,sivakuna-aap/superdesk,mdhaman/superdesk,pavlovicnemanja/superdesk,akintolga/superdesk,fritzSF/superdesk,sjunaid/superdesk,verifiedpixel/superdesk,ioanpocol/superdesk,plamut/superdesk,petrjasek/superdesk,mugurrus/superdesk,mdhaman/superdesk-aap,ancafarcas/superdesk,akintolga/superdesk-aap,darconny/superdesk,petrjasek/superdesk-ntb,amagdas/superdesk,sivakuna-aap/superdesk,thnkloud9/superdesk,marwoodandrew/superdesk,verifiedpixel/superdesk,fritzSF/superdesk,marwoodandrew/superdesk-aap,pavlovicnemanja92/superdesk,fritzSF/superdesk,liveblog/superdesk,gbbr/superdesk,superdesk/superdesk-aap,mugurrus/superdesk
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import os
def _load_json(file_path):
"""
Reads JSON string from the file located in file_path.
:param file_path: path of the file having JSON string.
:return: JSON Object
"""
with open(file_path, 'r') as f:
return json.load(f)
_dir_name = os.path.dirname(os.path.realpath(__file__))
_locators_file_path = os.path.join(_dir_name, 'data', 'locators.json')
locators = _load_json(_locators_file_path)
Fix locators reading on ubuntu
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import os
def _load_json(file_path):
"""
Reads JSON string from the file located in file_path.
:param file_path: path of the file having JSON string.
:return: JSON Object
"""
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
_dir_name = os.path.dirname(os.path.realpath(__file__))
_locators_file_path = os.path.join(_dir_name, 'data', 'locators.json')
locators = _load_json(_locators_file_path)
|
<commit_before># -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import os
def _load_json(file_path):
"""
Reads JSON string from the file located in file_path.
:param file_path: path of the file having JSON string.
:return: JSON Object
"""
with open(file_path, 'r') as f:
return json.load(f)
_dir_name = os.path.dirname(os.path.realpath(__file__))
_locators_file_path = os.path.join(_dir_name, 'data', 'locators.json')
locators = _load_json(_locators_file_path)
<commit_msg>Fix locators reading on ubuntu<commit_after>
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import os
def _load_json(file_path):
"""
Reads JSON string from the file located in file_path.
:param file_path: path of the file having JSON string.
:return: JSON Object
"""
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
_dir_name = os.path.dirname(os.path.realpath(__file__))
_locators_file_path = os.path.join(_dir_name, 'data', 'locators.json')
locators = _load_json(_locators_file_path)
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import os
def _load_json(file_path):
"""
Reads JSON string from the file located in file_path.
:param file_path: path of the file having JSON string.
:return: JSON Object
"""
with open(file_path, 'r') as f:
return json.load(f)
_dir_name = os.path.dirname(os.path.realpath(__file__))
_locators_file_path = os.path.join(_dir_name, 'data', 'locators.json')
locators = _load_json(_locators_file_path)
Fix locators reading on ubuntu# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import os
def _load_json(file_path):
"""
Reads JSON string from the file located in file_path.
:param file_path: path of the file having JSON string.
:return: JSON Object
"""
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
_dir_name = os.path.dirname(os.path.realpath(__file__))
_locators_file_path = os.path.join(_dir_name, 'data', 'locators.json')
locators = _load_json(_locators_file_path)
|
<commit_before># -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import os
def _load_json(file_path):
"""
Reads JSON string from the file located in file_path.
:param file_path: path of the file having JSON string.
:return: JSON Object
"""
with open(file_path, 'r') as f:
return json.load(f)
_dir_name = os.path.dirname(os.path.realpath(__file__))
_locators_file_path = os.path.join(_dir_name, 'data', 'locators.json')
locators = _load_json(_locators_file_path)
<commit_msg>Fix locators reading on ubuntu<commit_after># -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import json
import os
def _load_json(file_path):
"""
Reads JSON string from the file located in file_path.
:param file_path: path of the file having JSON string.
:return: JSON Object
"""
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
_dir_name = os.path.dirname(os.path.realpath(__file__))
_locators_file_path = os.path.join(_dir_name, 'data', 'locators.json')
locators = _load_json(_locators_file_path)
|
cffaea8986aa300a632d3a0d39219431efe80f9e
|
rever/__init__.py
|
rever/__init__.py
|
import builtins
# setup xonsh ctx and execer
builtins.__xonsh_ctx__ = {}
from xonsh.execer import Execer
builtins.__xonsh_execer__ = Execer(xonsh_ctx=builtins.__xonsh_ctx__)
from xonsh.shell import Shell
builtins.__xonsh_shell__ = Shell(builtins.__xonsh_execer__,
ctx=builtins.__xonsh_ctx__,
shell_type='none')
# setup import hooks
import xonsh.imphooks
xonsh.imphooks.install_import_hooks()
__version__ = '0.0.0'
|
import builtins
# setup xonsh ctx and execer
builtins.__xonsh_ctx__ = {}
from xonsh.execer import Execer
builtins.__xonsh_execer__ = Execer(xonsh_ctx=builtins.__xonsh_ctx__)
from xonsh.shell import Shell
builtins.__xonsh_shell__ = Shell(builtins.__xonsh_execer__,
ctx=builtins.__xonsh_ctx__,
shell_type='none')
builtins.__xonsh_env__['RAISE_SUBPROC_ERROR'] = True
# setup import hooks
import xonsh.imphooks
xonsh.imphooks.install_import_hooks()
__version__ = '0.0.0'
|
Raise on subproc error everywhere in rever
|
Raise on subproc error everywhere in rever
|
Python
|
bsd-3-clause
|
ergs/rever,scopatz/rever
|
import builtins
# setup xonsh ctx and execer
builtins.__xonsh_ctx__ = {}
from xonsh.execer import Execer
builtins.__xonsh_execer__ = Execer(xonsh_ctx=builtins.__xonsh_ctx__)
from xonsh.shell import Shell
builtins.__xonsh_shell__ = Shell(builtins.__xonsh_execer__,
ctx=builtins.__xonsh_ctx__,
shell_type='none')
# setup import hooks
import xonsh.imphooks
xonsh.imphooks.install_import_hooks()
__version__ = '0.0.0'Raise on subproc error everywhere in rever
|
import builtins
# setup xonsh ctx and execer
builtins.__xonsh_ctx__ = {}
from xonsh.execer import Execer
builtins.__xonsh_execer__ = Execer(xonsh_ctx=builtins.__xonsh_ctx__)
from xonsh.shell import Shell
builtins.__xonsh_shell__ = Shell(builtins.__xonsh_execer__,
ctx=builtins.__xonsh_ctx__,
shell_type='none')
builtins.__xonsh_env__['RAISE_SUBPROC_ERROR'] = True
# setup import hooks
import xonsh.imphooks
xonsh.imphooks.install_import_hooks()
__version__ = '0.0.0'
|
<commit_before>import builtins
# setup xonsh ctx and execer
builtins.__xonsh_ctx__ = {}
from xonsh.execer import Execer
builtins.__xonsh_execer__ = Execer(xonsh_ctx=builtins.__xonsh_ctx__)
from xonsh.shell import Shell
builtins.__xonsh_shell__ = Shell(builtins.__xonsh_execer__,
ctx=builtins.__xonsh_ctx__,
shell_type='none')
# setup import hooks
import xonsh.imphooks
xonsh.imphooks.install_import_hooks()
__version__ = '0.0.0'<commit_msg>Raise on subproc error everywhere in rever<commit_after>
|
import builtins
# setup xonsh ctx and execer
builtins.__xonsh_ctx__ = {}
from xonsh.execer import Execer
builtins.__xonsh_execer__ = Execer(xonsh_ctx=builtins.__xonsh_ctx__)
from xonsh.shell import Shell
builtins.__xonsh_shell__ = Shell(builtins.__xonsh_execer__,
ctx=builtins.__xonsh_ctx__,
shell_type='none')
builtins.__xonsh_env__['RAISE_SUBPROC_ERROR'] = True
# setup import hooks
import xonsh.imphooks
xonsh.imphooks.install_import_hooks()
__version__ = '0.0.0'
|
import builtins
# setup xonsh ctx and execer
builtins.__xonsh_ctx__ = {}
from xonsh.execer import Execer
builtins.__xonsh_execer__ = Execer(xonsh_ctx=builtins.__xonsh_ctx__)
from xonsh.shell import Shell
builtins.__xonsh_shell__ = Shell(builtins.__xonsh_execer__,
ctx=builtins.__xonsh_ctx__,
shell_type='none')
# setup import hooks
import xonsh.imphooks
xonsh.imphooks.install_import_hooks()
__version__ = '0.0.0'Raise on subproc error everywhere in reverimport builtins
# setup xonsh ctx and execer
builtins.__xonsh_ctx__ = {}
from xonsh.execer import Execer
builtins.__xonsh_execer__ = Execer(xonsh_ctx=builtins.__xonsh_ctx__)
from xonsh.shell import Shell
builtins.__xonsh_shell__ = Shell(builtins.__xonsh_execer__,
ctx=builtins.__xonsh_ctx__,
shell_type='none')
builtins.__xonsh_env__['RAISE_SUBPROC_ERROR'] = True
# setup import hooks
import xonsh.imphooks
xonsh.imphooks.install_import_hooks()
__version__ = '0.0.0'
|
<commit_before>import builtins
# setup xonsh ctx and execer
builtins.__xonsh_ctx__ = {}
from xonsh.execer import Execer
builtins.__xonsh_execer__ = Execer(xonsh_ctx=builtins.__xonsh_ctx__)
from xonsh.shell import Shell
builtins.__xonsh_shell__ = Shell(builtins.__xonsh_execer__,
ctx=builtins.__xonsh_ctx__,
shell_type='none')
# setup import hooks
import xonsh.imphooks
xonsh.imphooks.install_import_hooks()
__version__ = '0.0.0'<commit_msg>Raise on subproc error everywhere in rever<commit_after>import builtins
# setup xonsh ctx and execer
builtins.__xonsh_ctx__ = {}
from xonsh.execer import Execer
builtins.__xonsh_execer__ = Execer(xonsh_ctx=builtins.__xonsh_ctx__)
from xonsh.shell import Shell
builtins.__xonsh_shell__ = Shell(builtins.__xonsh_execer__,
ctx=builtins.__xonsh_ctx__,
shell_type='none')
builtins.__xonsh_env__['RAISE_SUBPROC_ERROR'] = True
# setup import hooks
import xonsh.imphooks
xonsh.imphooks.install_import_hooks()
__version__ = '0.0.0'
|
1b75e25746305ec47a72874e854744c395cceec6
|
src/ocspdash/constants.py
|
src/ocspdash/constants.py
|
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.path.join(os.path.expanduser('~'), '.ocspdash')
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_PATH = os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + OCSPDASH_DATABASE_PATH
CENSYS_RATE_LIMIT = 0.2 # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
|
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.environ.get('OCSPDASH_DIRECTORY', os.path.join(os.path.expanduser('~'), '.ocspdash'))
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
CENSYS_RATE_LIMIT = float(os.environ.get('OCSPDASH_RATE', 0.2)) # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
|
Allow config to be set from environment
|
Allow config to be set from environment
|
Python
|
mit
|
scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash
|
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.path.join(os.path.expanduser('~'), '.ocspdash')
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_PATH = os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + OCSPDASH_DATABASE_PATH
CENSYS_RATE_LIMIT = 0.2 # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
Allow config to be set from environment
|
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.environ.get('OCSPDASH_DIRECTORY', os.path.join(os.path.expanduser('~'), '.ocspdash'))
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
CENSYS_RATE_LIMIT = float(os.environ.get('OCSPDASH_RATE', 0.2)) # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
|
<commit_before>import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.path.join(os.path.expanduser('~'), '.ocspdash')
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_PATH = os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + OCSPDASH_DATABASE_PATH
CENSYS_RATE_LIMIT = 0.2 # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
<commit_msg>Allow config to be set from environment<commit_after>
|
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.environ.get('OCSPDASH_DIRECTORY', os.path.join(os.path.expanduser('~'), '.ocspdash'))
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
CENSYS_RATE_LIMIT = float(os.environ.get('OCSPDASH_RATE', 0.2)) # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
|
import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.path.join(os.path.expanduser('~'), '.ocspdash')
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_PATH = os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + OCSPDASH_DATABASE_PATH
CENSYS_RATE_LIMIT = 0.2 # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
Allow config to be set from environmentimport os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.environ.get('OCSPDASH_DIRECTORY', os.path.join(os.path.expanduser('~'), '.ocspdash'))
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
CENSYS_RATE_LIMIT = float(os.environ.get('OCSPDASH_RATE', 0.2)) # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
|
<commit_before>import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.path.join(os.path.expanduser('~'), '.ocspdash')
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_PATH = os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + OCSPDASH_DATABASE_PATH
CENSYS_RATE_LIMIT = 0.2 # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
<commit_msg>Allow config to be set from environment<commit_after>import os
import requests.utils
from . import __name__, __version__
OCSPDASH_API_VERSION = 'v0'
OCSPDASH_DIRECTORY = os.environ.get('OCSPDASH_DIRECTORY', os.path.join(os.path.expanduser('~'), '.ocspdash'))
if not os.path.exists(OCSPDASH_DIRECTORY):
os.makedirs(OCSPDASH_DIRECTORY)
OCSPDASH_DATABASE_CONNECTION = 'sqlite:///' + os.path.join(OCSPDASH_DIRECTORY, 'ocspdash.db')
CENSYS_RATE_LIMIT = float(os.environ.get('OCSPDASH_RATE', 0.2)) # max requests per second
OCSPDASH_USER_AGENT = ' '.join([requests.utils.default_user_agent(), f'{__name__}/{__version__}'])
|
255ef7b16258c67586d14e6c8d8d531a3553cd3e
|
bot/games/tests/test_game_queryset.py
|
bot/games/tests/test_game_queryset.py
|
from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
def test_get_by_name_distinct(self):
bf1 = Game.objects.create(name='Battlefield 1')
Game.objects.create(name='Battlefield™ 1 Open Beta', alias_for=bf1)
Game.objects.create(name='Battlefield™ 1', alias_for=bf1)
game = Game.objects.get_by_name('Battlefield 1')
self.assertEqual(bf1, game)
|
Add extra test for regression
|
Add extra test for regression
|
Python
|
mit
|
sergei-maertens/discord-bot,sergei-maertens/discord-bot,sergei-maertens/discord-bot
|
from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
Add extra test for regression
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
def test_get_by_name_distinct(self):
bf1 = Game.objects.create(name='Battlefield 1')
Game.objects.create(name='Battlefield™ 1 Open Beta', alias_for=bf1)
Game.objects.create(name='Battlefield™ 1', alias_for=bf1)
game = Game.objects.get_by_name('Battlefield 1')
self.assertEqual(bf1, game)
|
<commit_before>from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
<commit_msg>Add extra test for regression<commit_after>
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
def test_get_by_name_distinct(self):
bf1 = Game.objects.create(name='Battlefield 1')
Game.objects.create(name='Battlefield™ 1 Open Beta', alias_for=bf1)
Game.objects.create(name='Battlefield™ 1', alias_for=bf1)
game = Game.objects.get_by_name('Battlefield 1')
self.assertEqual(bf1, game)
|
from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
Add extra test for regression# -*- coding: utf-8 -*-
from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
def test_get_by_name_distinct(self):
bf1 = Game.objects.create(name='Battlefield 1')
Game.objects.create(name='Battlefield™ 1 Open Beta', alias_for=bf1)
Game.objects.create(name='Battlefield™ 1', alias_for=bf1)
game = Game.objects.get_by_name('Battlefield 1')
self.assertEqual(bf1, game)
|
<commit_before>from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
<commit_msg>Add extra test for regression<commit_after># -*- coding: utf-8 -*-
from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
def test_get_by_name_distinct(self):
bf1 = Game.objects.create(name='Battlefield 1')
Game.objects.create(name='Battlefield™ 1 Open Beta', alias_for=bf1)
Game.objects.create(name='Battlefield™ 1', alias_for=bf1)
game = Game.objects.get_by_name('Battlefield 1')
self.assertEqual(bf1, game)
|
b0b8483b6ff7085585a480308d553d2dd4c84c8b
|
pi/cli.py
|
pi/cli.py
|
import logging
import pkgutil
import pi
import pi.commands
commands = {}
for imp_importer, name, ispkg in pkgutil.iter_modules(pi.commands.__path__):
fullname = pi.commands.__name__ + '.' + name
# if fullname not in sys.modules:
imp_loader = imp_importer.find_module(fullname)
module = imp_loader.load_module(fullname)
commands[name] = module
def main():
import argparse
parser = argparse.ArgumentParser(description='Python package manipulation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands, help='Command to run')
parser.add_argument('--version', action='version', version=pi.__version__)
parser.add_argument('--verbose', action='store_true', help='Print extra information')
opts, _ = parser.parse_known_args()
loglevel = logging.DEBUG if opts.verbose else logging.INFO
# logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
logging.basicConfig(level=loglevel)
commands[opts.command].cli(parser)
if __name__ == '__main__':
main()
|
import logging
import pkgutil
import pi
import pi.commands
commands = {}
for imp_importer, name, ispkg in pkgutil.iter_modules(pi.commands.__path__):
fullname = pi.commands.__name__ + '.' + name
# if fullname not in sys.modules:
imp_loader = imp_importer.find_module(fullname)
module = imp_loader.load_module(fullname)
commands[name] = module
def main():
import argparse
parser = argparse.ArgumentParser(description='Python package manipulation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands, help='Command to run')
parser.add_argument('-V', '--version', action='version', version=pi.__version__)
parser.add_argument('-v', '--verbose', action='store_true', help='Print extra information')
opts, _ = parser.parse_known_args()
loglevel = logging.DEBUG if opts.verbose else logging.INFO
# logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
logging.basicConfig(level=loglevel)
commands[opts.command].cli(parser)
if __name__ == '__main__':
main()
|
Add short flags for version and verbose to match 'python' command
|
Add short flags for version and verbose to match 'python' command
|
Python
|
mit
|
chbrown/pi
|
import logging
import pkgutil
import pi
import pi.commands
commands = {}
for imp_importer, name, ispkg in pkgutil.iter_modules(pi.commands.__path__):
fullname = pi.commands.__name__ + '.' + name
# if fullname not in sys.modules:
imp_loader = imp_importer.find_module(fullname)
module = imp_loader.load_module(fullname)
commands[name] = module
def main():
import argparse
parser = argparse.ArgumentParser(description='Python package manipulation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands, help='Command to run')
parser.add_argument('--version', action='version', version=pi.__version__)
parser.add_argument('--verbose', action='store_true', help='Print extra information')
opts, _ = parser.parse_known_args()
loglevel = logging.DEBUG if opts.verbose else logging.INFO
# logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
logging.basicConfig(level=loglevel)
commands[opts.command].cli(parser)
if __name__ == '__main__':
main()
Add short flags for version and verbose to match 'python' command
|
import logging
import pkgutil
import pi
import pi.commands
commands = {}
for imp_importer, name, ispkg in pkgutil.iter_modules(pi.commands.__path__):
fullname = pi.commands.__name__ + '.' + name
# if fullname not in sys.modules:
imp_loader = imp_importer.find_module(fullname)
module = imp_loader.load_module(fullname)
commands[name] = module
def main():
import argparse
parser = argparse.ArgumentParser(description='Python package manipulation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands, help='Command to run')
parser.add_argument('-V', '--version', action='version', version=pi.__version__)
parser.add_argument('-v', '--verbose', action='store_true', help='Print extra information')
opts, _ = parser.parse_known_args()
loglevel = logging.DEBUG if opts.verbose else logging.INFO
# logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
logging.basicConfig(level=loglevel)
commands[opts.command].cli(parser)
if __name__ == '__main__':
main()
|
<commit_before>import logging
import pkgutil
import pi
import pi.commands
commands = {}
for imp_importer, name, ispkg in pkgutil.iter_modules(pi.commands.__path__):
fullname = pi.commands.__name__ + '.' + name
# if fullname not in sys.modules:
imp_loader = imp_importer.find_module(fullname)
module = imp_loader.load_module(fullname)
commands[name] = module
def main():
import argparse
parser = argparse.ArgumentParser(description='Python package manipulation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands, help='Command to run')
parser.add_argument('--version', action='version', version=pi.__version__)
parser.add_argument('--verbose', action='store_true', help='Print extra information')
opts, _ = parser.parse_known_args()
loglevel = logging.DEBUG if opts.verbose else logging.INFO
# logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
logging.basicConfig(level=loglevel)
commands[opts.command].cli(parser)
if __name__ == '__main__':
main()
<commit_msg>Add short flags for version and verbose to match 'python' command<commit_after>
|
import logging
import pkgutil
import pi
import pi.commands
commands = {}
for imp_importer, name, ispkg in pkgutil.iter_modules(pi.commands.__path__):
fullname = pi.commands.__name__ + '.' + name
# if fullname not in sys.modules:
imp_loader = imp_importer.find_module(fullname)
module = imp_loader.load_module(fullname)
commands[name] = module
def main():
import argparse
parser = argparse.ArgumentParser(description='Python package manipulation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands, help='Command to run')
parser.add_argument('-V', '--version', action='version', version=pi.__version__)
parser.add_argument('-v', '--verbose', action='store_true', help='Print extra information')
opts, _ = parser.parse_known_args()
loglevel = logging.DEBUG if opts.verbose else logging.INFO
# logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
logging.basicConfig(level=loglevel)
commands[opts.command].cli(parser)
if __name__ == '__main__':
main()
|
import logging
import pkgutil
import pi
import pi.commands
commands = {}
for imp_importer, name, ispkg in pkgutil.iter_modules(pi.commands.__path__):
fullname = pi.commands.__name__ + '.' + name
# if fullname not in sys.modules:
imp_loader = imp_importer.find_module(fullname)
module = imp_loader.load_module(fullname)
commands[name] = module
def main():
import argparse
parser = argparse.ArgumentParser(description='Python package manipulation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands, help='Command to run')
parser.add_argument('--version', action='version', version=pi.__version__)
parser.add_argument('--verbose', action='store_true', help='Print extra information')
opts, _ = parser.parse_known_args()
loglevel = logging.DEBUG if opts.verbose else logging.INFO
# logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
logging.basicConfig(level=loglevel)
commands[opts.command].cli(parser)
if __name__ == '__main__':
main()
Add short flags for version and verbose to match 'python' commandimport logging
import pkgutil
import pi
import pi.commands
commands = {}
for imp_importer, name, ispkg in pkgutil.iter_modules(pi.commands.__path__):
fullname = pi.commands.__name__ + '.' + name
# if fullname not in sys.modules:
imp_loader = imp_importer.find_module(fullname)
module = imp_loader.load_module(fullname)
commands[name] = module
def main():
import argparse
parser = argparse.ArgumentParser(description='Python package manipulation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands, help='Command to run')
parser.add_argument('-V', '--version', action='version', version=pi.__version__)
parser.add_argument('-v', '--verbose', action='store_true', help='Print extra information')
opts, _ = parser.parse_known_args()
loglevel = logging.DEBUG if opts.verbose else logging.INFO
# logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
logging.basicConfig(level=loglevel)
commands[opts.command].cli(parser)
if __name__ == '__main__':
main()
|
<commit_before>import logging
import pkgutil
import pi
import pi.commands
commands = {}
for imp_importer, name, ispkg in pkgutil.iter_modules(pi.commands.__path__):
fullname = pi.commands.__name__ + '.' + name
# if fullname not in sys.modules:
imp_loader = imp_importer.find_module(fullname)
module = imp_loader.load_module(fullname)
commands[name] = module
def main():
import argparse
parser = argparse.ArgumentParser(description='Python package manipulation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands, help='Command to run')
parser.add_argument('--version', action='version', version=pi.__version__)
parser.add_argument('--verbose', action='store_true', help='Print extra information')
opts, _ = parser.parse_known_args()
loglevel = logging.DEBUG if opts.verbose else logging.INFO
# logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
logging.basicConfig(level=loglevel)
commands[opts.command].cli(parser)
if __name__ == '__main__':
main()
<commit_msg>Add short flags for version and verbose to match 'python' command<commit_after>import logging
import pkgutil
import pi
import pi.commands
commands = {}
for imp_importer, name, ispkg in pkgutil.iter_modules(pi.commands.__path__):
fullname = pi.commands.__name__ + '.' + name
# if fullname not in sys.modules:
imp_loader = imp_importer.find_module(fullname)
module = imp_loader.load_module(fullname)
commands[name] = module
def main():
import argparse
parser = argparse.ArgumentParser(description='Python package manipulation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands, help='Command to run')
parser.add_argument('-V', '--version', action='version', version=pi.__version__)
parser.add_argument('-v', '--verbose', action='store_true', help='Print extra information')
opts, _ = parser.parse_known_args()
loglevel = logging.DEBUG if opts.verbose else logging.INFO
# logging.basicConfig(format='%(levelname)s: %(message)s', level=loglevel)
logging.basicConfig(level=loglevel)
commands[opts.command].cli(parser)
if __name__ == '__main__':
main()
|
8e131a0382bac04aa8e04a4aeb3f9cf31d36671f
|
stock_move_description/__openerp__.py
|
stock_move_description/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock move description",
'version': '1.0',
'category': 'Warehouse Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_account',
'delivery',
],
'data': [
'security/stock_security.xml',
'stock_config_settings_view.xml',
'stock_move_view.xml',
],
'test': [
'test/stock_move_description.yml',
],
'installable': True
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock move description",
'version': '1.0',
'category': 'Warehouse Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_account',
],
'data': [
'security/stock_security.xml',
'stock_config_settings_view.xml',
'stock_move_view.xml',
],
'test': [
'test/stock_move_description.yml',
],
'installable': True
}
|
Remove delivery from depends as useless
|
Remove delivery from depends as useless
|
Python
|
agpl-3.0
|
open-synergy/stock-logistics-workflow,gurneyalex/stock-logistics-workflow,brain-tec/stock-logistics-workflow,Antiun/stock-logistics-workflow,BT-jmichaud/stock-logistics-workflow,Eficent/stock-logistics-workflow,gurneyalex/stock-logistics-workflow,archetipo/stock-logistics-workflow,acsone/stock-logistics-workflow,BT-fgarbely/stock-logistics-workflow,vrenaville/stock-logistics-workflow,xpansa/stock-logistics-workflow,akretion/stock-logistics-workflow,raycarnes/stock-logistics-workflow,Endika/stock-logistics-workflow,xpansa/stock-logistics-workflow,acsone/stock-logistics-workflow,brain-tec/stock-logistics-workflow,open-synergy/stock-logistics-workflow,Eficent/stock-logistics-workflow,damdam-s/stock-logistics-workflow,OpenCode/stock-logistics-workflow,akretion/stock-logistics-workflow
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock move description",
'version': '1.0',
'category': 'Warehouse Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_account',
'delivery',
],
'data': [
'security/stock_security.xml',
'stock_config_settings_view.xml',
'stock_move_view.xml',
],
'test': [
'test/stock_move_description.yml',
],
'installable': True
}
Remove delivery from depends as useless
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock move description",
'version': '1.0',
'category': 'Warehouse Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_account',
],
'data': [
'security/stock_security.xml',
'stock_config_settings_view.xml',
'stock_move_view.xml',
],
'test': [
'test/stock_move_description.yml',
],
'installable': True
}
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock move description",
'version': '1.0',
'category': 'Warehouse Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_account',
'delivery',
],
'data': [
'security/stock_security.xml',
'stock_config_settings_view.xml',
'stock_move_view.xml',
],
'test': [
'test/stock_move_description.yml',
],
'installable': True
}
<commit_msg>Remove delivery from depends as useless<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock move description",
'version': '1.0',
'category': 'Warehouse Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_account',
],
'data': [
'security/stock_security.xml',
'stock_config_settings_view.xml',
'stock_move_view.xml',
],
'test': [
'test/stock_move_description.yml',
],
'installable': True
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock move description",
'version': '1.0',
'category': 'Warehouse Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_account',
'delivery',
],
'data': [
'security/stock_security.xml',
'stock_config_settings_view.xml',
'stock_move_view.xml',
],
'test': [
'test/stock_move_description.yml',
],
'installable': True
}
Remove delivery from depends as useless# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock move description",
'version': '1.0',
'category': 'Warehouse Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_account',
],
'data': [
'security/stock_security.xml',
'stock_config_settings_view.xml',
'stock_move_view.xml',
],
'test': [
'test/stock_move_description.yml',
],
'installable': True
}
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock move description",
'version': '1.0',
'category': 'Warehouse Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_account',
'delivery',
],
'data': [
'security/stock_security.xml',
'stock_config_settings_view.xml',
'stock_move_view.xml',
],
'test': [
'test/stock_move_description.yml',
],
'installable': True
}
<commit_msg>Remove delivery from depends as useless<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock move description",
'version': '1.0',
'category': 'Warehouse Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_account',
],
'data': [
'security/stock_security.xml',
'stock_config_settings_view.xml',
'stock_move_view.xml',
],
'test': [
'test/stock_move_description.yml',
],
'installable': True
}
|
d4dd06558287c655477ce9da9542f748d0261695
|
notebooks/computer_vision/track_meta.py
|
notebooks/computer_vision/track_meta.py
|
# See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='ryanholbrook',
course_name='computer_vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision'
)
lessons = [
dict(
# By convention, this should be a lowercase noun-phrase.
topic='Testing',
),
]
notebooks = [
dict(
filename='test.ipynb',
lesson_idx=0,
type='exercise',
scriptid=1,
),
]
for nb in notebooks:
nb['dataset_sources'] = ["ryanholbrook/stanford-cars-for-learn"]
|
# See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='ryanholbrook',
course_name='Computer Vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision'
)
lessons = [
{'topic': topic_name} for topic_name in
[
'The Convolutional Classifier',
'Convnet Architecture',
'Filter, Detect, Condense',
# 'Convolution and Pooling',
# 'Exploring Convnets',
# 'Transfer Learning',
# 'Data Augmentation',
]
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
),
]
for nb in notebooks:
nb['dataset_sources'] = [
"ryanholbrook/stanford-cars-for-learn",
"ryanholbrook/saved-models",
]
|
Add tracking for lessons 1, 2, 3
|
Add tracking for lessons 1, 2, 3
|
Python
|
apache-2.0
|
Kaggle/learntools,Kaggle/learntools
|
# See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='ryanholbrook',
course_name='computer_vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision'
)
lessons = [
dict(
# By convention, this should be a lowercase noun-phrase.
topic='Testing',
),
]
notebooks = [
dict(
filename='test.ipynb',
lesson_idx=0,
type='exercise',
scriptid=1,
),
]
for nb in notebooks:
nb['dataset_sources'] = ["ryanholbrook/stanford-cars-for-learn"]
Add tracking for lessons 1, 2, 3
|
# See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='ryanholbrook',
course_name='Computer Vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision'
)
lessons = [
{'topic': topic_name} for topic_name in
[
'The Convolutional Classifier',
'Convnet Architecture',
'Filter, Detect, Condense',
# 'Convolution and Pooling',
# 'Exploring Convnets',
# 'Transfer Learning',
# 'Data Augmentation',
]
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
),
]
for nb in notebooks:
nb['dataset_sources'] = [
"ryanholbrook/stanford-cars-for-learn",
"ryanholbrook/saved-models",
]
|
<commit_before># See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='ryanholbrook',
course_name='computer_vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision'
)
lessons = [
dict(
# By convention, this should be a lowercase noun-phrase.
topic='Testing',
),
]
notebooks = [
dict(
filename='test.ipynb',
lesson_idx=0,
type='exercise',
scriptid=1,
),
]
for nb in notebooks:
nb['dataset_sources'] = ["ryanholbrook/stanford-cars-for-learn"]
<commit_msg>Add tracking for lessons 1, 2, 3<commit_after>
|
# See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='ryanholbrook',
course_name='Computer Vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision'
)
lessons = [
{'topic': topic_name} for topic_name in
[
'The Convolutional Classifier',
'Convnet Architecture',
'Filter, Detect, Condense',
# 'Convolution and Pooling',
# 'Exploring Convnets',
# 'Transfer Learning',
# 'Data Augmentation',
]
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
),
]
for nb in notebooks:
nb['dataset_sources'] = [
"ryanholbrook/stanford-cars-for-learn",
"ryanholbrook/saved-models",
]
|
# See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='ryanholbrook',
course_name='computer_vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision'
)
lessons = [
dict(
# By convention, this should be a lowercase noun-phrase.
topic='Testing',
),
]
notebooks = [
dict(
filename='test.ipynb',
lesson_idx=0,
type='exercise',
scriptid=1,
),
]
for nb in notebooks:
nb['dataset_sources'] = ["ryanholbrook/stanford-cars-for-learn"]
Add tracking for lessons 1, 2, 3# See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='ryanholbrook',
course_name='Computer Vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision'
)
lessons = [
{'topic': topic_name} for topic_name in
[
'The Convolutional Classifier',
'Convnet Architecture',
'Filter, Detect, Condense',
# 'Convolution and Pooling',
# 'Exploring Convnets',
# 'Transfer Learning',
# 'Data Augmentation',
]
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
),
]
for nb in notebooks:
nb['dataset_sources'] = [
"ryanholbrook/stanford-cars-for-learn",
"ryanholbrook/saved-models",
]
|
<commit_before># See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='ryanholbrook',
course_name='computer_vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision'
)
lessons = [
dict(
# By convention, this should be a lowercase noun-phrase.
topic='Testing',
),
]
notebooks = [
dict(
filename='test.ipynb',
lesson_idx=0,
type='exercise',
scriptid=1,
),
]
for nb in notebooks:
nb['dataset_sources'] = ["ryanholbrook/stanford-cars-for-learn"]
<commit_msg>Add tracking for lessons 1, 2, 3<commit_after># See also examples/example_track/track_meta.py for a longer, commented example
track = dict(
author_username='ryanholbrook',
course_name='Computer Vision',
course_url='https://www.kaggle.com/ryanholbrook/computer-vision'
)
lessons = [
{'topic': topic_name} for topic_name in
[
'The Convolutional Classifier',
'Convnet Architecture',
'Filter, Detect, Condense',
# 'Convolution and Pooling',
# 'Exploring Convnets',
# 'Transfer Learning',
# 'Data Augmentation',
]
]
notebooks = [
dict(
filename='tut1.ipynb',
lesson_idx=0,
type='tutorial',
),
dict(
filename='tut2.ipynb',
lesson_idx=1,
type='tutorial',
),
dict(
filename='tut3.ipynb',
lesson_idx=2,
type='tutorial',
),
]
for nb in notebooks:
nb['dataset_sources'] = [
"ryanholbrook/stanford-cars-for-learn",
"ryanholbrook/saved-models",
]
|
2e63438deb6f733e7e905f4ea299aa0bdce88b3c
|
changes/api/author_build_index.py
|
changes/api/author_build_index.py
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from uuid import UUID
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return None
return Author.query.filter_by(email=user.email).first()
try:
author_id = UUID(author_id)
except ValueError:
return None
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return '', 404
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
Validate author_id and return 404 for missing data
|
Validate author_id and return 404 for missing data
|
Python
|
apache-2.0
|
wfxiang08/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
Validate author_id and return 404 for missing data
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from uuid import UUID
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return None
return Author.query.filter_by(email=user.email).first()
try:
author_id = UUID(author_id)
except ValueError:
return None
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return '', 404
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
<commit_before>from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
<commit_msg>Validate author_id and return 404 for missing data<commit_after>
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from uuid import UUID
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return None
return Author.query.filter_by(email=user.email).first()
try:
author_id = UUID(author_id)
except ValueError:
return None
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return '', 404
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
Validate author_id and return 404 for missing datafrom __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from uuid import UUID
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return None
return Author.query.filter_by(email=user.email).first()
try:
author_id = UUID(author_id)
except ValueError:
return None
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return '', 404
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
<commit_before>from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
<commit_msg>Validate author_id and return 404 for missing data<commit_after>from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from uuid import UUID
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return None
return Author.query.filter_by(email=user.email).first()
try:
author_id = UUID(author_id)
except ValueError:
return None
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return '', 404
queryset = Build.query.options(
joinedload('project'),
joinedload('author'),
joinedload('source').joinedload('revision'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
00435d8f0cc906878cd6084c78c17cbc5a49b66e
|
spacy/tests/parser/test_beam_parse.py
|
spacy/tests/parser/test_beam_parse.py
|
# coding: utf8
from __future__ import unicode_literals
import pytest
@pytest.mark.models('en')
def test_beam_parse(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
|
# coding: utf8
from __future__ import unicode_literals
import pytest
from ...language import Language
from ...pipeline import DependencyParser
@pytest.mark.models('en')
def test_beam_parse_en(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
def test_beam_parse():
nlp = Language()
nlp.add_pipe(DependencyParser(nlp.vocab), name='parser')
nlp.parser.add_label('nsubj')
nlp.begin_training()
doc = nlp.make_doc(u'Australia is a country')
nlp.parser(doc, beam_width=2)
|
Add extra beam parsing test
|
Add extra beam parsing test
|
Python
|
mit
|
aikramer2/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,honnibal/spaCy,honnibal/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,spacy-io/spaCy,aikramer2/spaCy
|
# coding: utf8
from __future__ import unicode_literals
import pytest
@pytest.mark.models('en')
def test_beam_parse(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
Add extra beam parsing test
|
# coding: utf8
from __future__ import unicode_literals
import pytest
from ...language import Language
from ...pipeline import DependencyParser
@pytest.mark.models('en')
def test_beam_parse_en(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
def test_beam_parse():
nlp = Language()
nlp.add_pipe(DependencyParser(nlp.vocab), name='parser')
nlp.parser.add_label('nsubj')
nlp.begin_training()
doc = nlp.make_doc(u'Australia is a country')
nlp.parser(doc, beam_width=2)
|
<commit_before># coding: utf8
from __future__ import unicode_literals
import pytest
@pytest.mark.models('en')
def test_beam_parse(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
<commit_msg>Add extra beam parsing test<commit_after>
|
# coding: utf8
from __future__ import unicode_literals
import pytest
from ...language import Language
from ...pipeline import DependencyParser
@pytest.mark.models('en')
def test_beam_parse_en(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
def test_beam_parse():
nlp = Language()
nlp.add_pipe(DependencyParser(nlp.vocab), name='parser')
nlp.parser.add_label('nsubj')
nlp.begin_training()
doc = nlp.make_doc(u'Australia is a country')
nlp.parser(doc, beam_width=2)
|
# coding: utf8
from __future__ import unicode_literals
import pytest
@pytest.mark.models('en')
def test_beam_parse(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
Add extra beam parsing test# coding: utf8
from __future__ import unicode_literals
import pytest
from ...language import Language
from ...pipeline import DependencyParser
@pytest.mark.models('en')
def test_beam_parse_en(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
def test_beam_parse():
nlp = Language()
nlp.add_pipe(DependencyParser(nlp.vocab), name='parser')
nlp.parser.add_label('nsubj')
nlp.begin_training()
doc = nlp.make_doc(u'Australia is a country')
nlp.parser(doc, beam_width=2)
|
<commit_before># coding: utf8
from __future__ import unicode_literals
import pytest
@pytest.mark.models('en')
def test_beam_parse(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
<commit_msg>Add extra beam parsing test<commit_after># coding: utf8
from __future__ import unicode_literals
import pytest
from ...language import Language
from ...pipeline import DependencyParser
@pytest.mark.models('en')
def test_beam_parse_en(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
def test_beam_parse():
nlp = Language()
nlp.add_pipe(DependencyParser(nlp.vocab), name='parser')
nlp.parser.add_label('nsubj')
nlp.begin_training()
doc = nlp.make_doc(u'Australia is a country')
nlp.parser(doc, beam_width=2)
|
c03b731d9fcd64a0989c6b73245578eafc099b4f
|
greenquote.py
|
greenquote.py
|
import sys
import os
from threading import Thread
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'HEROKU_POSTGRESQL_GOLD_URL', ''
)
engine = create_engine(app.config['DATABASE'])
display_val = "Loading data..."
# def load_data():
# dfdict = getqf.scraper()
# df = dfdict['nsdqct.csv']
# df.to_sql(name='entries', con = engine, if_exists = 'replace')
# output = pd.read_sql_query('SELECT * FROM entries', engine)
# mean = output[[2]].mean()
# display_val = u"The mean is :" + str(mean)
# thread1 = Thread(target = load_data)
# thread1.start()
@app.route('/')
def hello():
return display_val
if __name__ == "__main__":
app.run(debug=True)
|
import sys
import os
from threading import Thread
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
# app.config['DATABASE'] = os.environ.get(
# 'HEROKU_POSTGRESQL_GOLD_URL', ''
# )
# engine = create_engine(app.config['DATABASE'])
display_val = "Loading data..."
# def load_data():
# dfdict = getqf.scraper()
# df = dfdict['nsdqct.csv']
# df.to_sql(name='entries', con = engine, if_exists = 'replace')
# output = pd.read_sql_query('SELECT * FROM entries', engine)
# mean = output[[2]].mean()
# display_val = u"The mean is :" + str(mean)
# thread1 = Thread(target = load_data)
# thread1.start()
@app.route('/')
def hello():
return display_val
if __name__ == "__main__":
app.run(debug=True)
|
Comment more to test heroku deployment.
|
Comment more to test heroku deployment.
|
Python
|
mit
|
caseymacphee/green_quote,caseymacphee/green_quote
|
import sys
import os
from threading import Thread
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'HEROKU_POSTGRESQL_GOLD_URL', ''
)
engine = create_engine(app.config['DATABASE'])
display_val = "Loading data..."
# def load_data():
# dfdict = getqf.scraper()
# df = dfdict['nsdqct.csv']
# df.to_sql(name='entries', con = engine, if_exists = 'replace')
# output = pd.read_sql_query('SELECT * FROM entries', engine)
# mean = output[[2]].mean()
# display_val = u"The mean is :" + str(mean)
# thread1 = Thread(target = load_data)
# thread1.start()
@app.route('/')
def hello():
return display_val
if __name__ == "__main__":
app.run(debug=True)
Comment more to test heroku deployment.
|
import sys
import os
from threading import Thread
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
# app.config['DATABASE'] = os.environ.get(
# 'HEROKU_POSTGRESQL_GOLD_URL', ''
# )
# engine = create_engine(app.config['DATABASE'])
display_val = "Loading data..."
# def load_data():
# dfdict = getqf.scraper()
# df = dfdict['nsdqct.csv']
# df.to_sql(name='entries', con = engine, if_exists = 'replace')
# output = pd.read_sql_query('SELECT * FROM entries', engine)
# mean = output[[2]].mean()
# display_val = u"The mean is :" + str(mean)
# thread1 = Thread(target = load_data)
# thread1.start()
@app.route('/')
def hello():
return display_val
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>import sys
import os
from threading import Thread
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'HEROKU_POSTGRESQL_GOLD_URL', ''
)
engine = create_engine(app.config['DATABASE'])
display_val = "Loading data..."
# def load_data():
# dfdict = getqf.scraper()
# df = dfdict['nsdqct.csv']
# df.to_sql(name='entries', con = engine, if_exists = 'replace')
# output = pd.read_sql_query('SELECT * FROM entries', engine)
# mean = output[[2]].mean()
# display_val = u"The mean is :" + str(mean)
# thread1 = Thread(target = load_data)
# thread1.start()
@app.route('/')
def hello():
return display_val
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Comment more to test heroku deployment.<commit_after>
|
import sys
import os
from threading import Thread
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
# app.config['DATABASE'] = os.environ.get(
# 'HEROKU_POSTGRESQL_GOLD_URL', ''
# )
# engine = create_engine(app.config['DATABASE'])
display_val = "Loading data..."
# def load_data():
# dfdict = getqf.scraper()
# df = dfdict['nsdqct.csv']
# df.to_sql(name='entries', con = engine, if_exists = 'replace')
# output = pd.read_sql_query('SELECT * FROM entries', engine)
# mean = output[[2]].mean()
# display_val = u"The mean is :" + str(mean)
# thread1 = Thread(target = load_data)
# thread1.start()
@app.route('/')
def hello():
return display_val
if __name__ == "__main__":
app.run(debug=True)
|
import sys
import os
from threading import Thread
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'HEROKU_POSTGRESQL_GOLD_URL', ''
)
engine = create_engine(app.config['DATABASE'])
display_val = "Loading data..."
# def load_data():
# dfdict = getqf.scraper()
# df = dfdict['nsdqct.csv']
# df.to_sql(name='entries', con = engine, if_exists = 'replace')
# output = pd.read_sql_query('SELECT * FROM entries', engine)
# mean = output[[2]].mean()
# display_val = u"The mean is :" + str(mean)
# thread1 = Thread(target = load_data)
# thread1.start()
@app.route('/')
def hello():
return display_val
if __name__ == "__main__":
app.run(debug=True)
Comment more to test heroku deployment.import sys
import os
from threading import Thread
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
# app.config['DATABASE'] = os.environ.get(
# 'HEROKU_POSTGRESQL_GOLD_URL', ''
# )
# engine = create_engine(app.config['DATABASE'])
display_val = "Loading data..."
# def load_data():
# dfdict = getqf.scraper()
# df = dfdict['nsdqct.csv']
# df.to_sql(name='entries', con = engine, if_exists = 'replace')
# output = pd.read_sql_query('SELECT * FROM entries', engine)
# mean = output[[2]].mean()
# display_val = u"The mean is :" + str(mean)
# thread1 = Thread(target = load_data)
# thread1.start()
@app.route('/')
def hello():
return display_val
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>import sys
import os
from threading import Thread
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'HEROKU_POSTGRESQL_GOLD_URL', ''
)
engine = create_engine(app.config['DATABASE'])
display_val = "Loading data..."
# def load_data():
# dfdict = getqf.scraper()
# df = dfdict['nsdqct.csv']
# df.to_sql(name='entries', con = engine, if_exists = 'replace')
# output = pd.read_sql_query('SELECT * FROM entries', engine)
# mean = output[[2]].mean()
# display_val = u"The mean is :" + str(mean)
# thread1 = Thread(target = load_data)
# thread1.start()
@app.route('/')
def hello():
return display_val
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Comment more to test heroku deployment.<commit_after>import sys
import os
from threading import Thread
from flask import Flask
import pandas as pd
sys.path.insert(0, "../financialScraper")
from financialScraper import getqf
from sqlalchemy import create_engine
app = Flask(__name__)
# app.config['DATABASE'] = os.environ.get(
# 'HEROKU_POSTGRESQL_GOLD_URL', ''
# )
# engine = create_engine(app.config['DATABASE'])
display_val = "Loading data..."
# def load_data():
# dfdict = getqf.scraper()
# df = dfdict['nsdqct.csv']
# df.to_sql(name='entries', con = engine, if_exists = 'replace')
# output = pd.read_sql_query('SELECT * FROM entries', engine)
# mean = output[[2]].mean()
# display_val = u"The mean is :" + str(mean)
# thread1 = Thread(target = load_data)
# thread1.start()
@app.route('/')
def hello():
return display_val
if __name__ == "__main__":
app.run(debug=True)
|
671aeff6fbdab93945a7b8a8f242bff9afc6a613
|
src/odin/fields/future.py
|
src/odin/fields/future.py
|
from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value is "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if value in self.enum:
return value.value
|
from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and (value in self.enum):
return value.value
|
Fix value is "" hand value being None in prepare
|
Fix value is "" hand value being None in prepare
|
Python
|
bsd-3-clause
|
python-odin/odin
|
from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value is "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if value in self.enum:
return value.value
Fix value is "" hand value being None in prepare
|
from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and (value in self.enum):
return value.value
|
<commit_before>from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value is "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if value in self.enum:
return value.value
<commit_msg>Fix value is "" hand value being None in prepare<commit_after>
|
from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and (value in self.enum):
return value.value
|
from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value is "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if value in self.enum:
return value.value
Fix value is "" hand value being None in preparefrom __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and (value in self.enum):
return value.value
|
<commit_before>from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value is "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if value in self.enum:
return value.value
<commit_msg>Fix value is "" hand value being None in prepare<commit_after>from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and (value in self.enum):
return value.value
|
c8c3227cba90a931edb9ae7ee89c5318258a2f25
|
todoist/managers/live_notifications.py
|
todoist/managers/live_notifications.py
|
# -*- coding: utf-8 -*-
from .generic import Manager, GetByIdMixin, AllMixin, SyncMixin
class LiveNotificationsManager(Manager, GetByIdMixin, AllMixin, SyncMixin):
state_name = 'live_notifications'
object_type = None # there is no object type associated
def set_last_read(self, id):
"""
Sets in the local state the last notification read.
"""
cmd = {
'type': 'live_notifications_set_last_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
|
# -*- coding: utf-8 -*-
from .generic import Manager, GetByIdMixin, AllMixin, SyncMixin
class LiveNotificationsManager(Manager, GetByIdMixin, AllMixin, SyncMixin):
state_name = 'live_notifications'
object_type = None # there is no object type associated
def set_last_read(self, id):
"""
Sets the last known notification.
"""
cmd = {
'type': 'live_notifications_set_last_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
def mark_read(self, id):
"""
Marks notification as read.
"""
cmd = {
'type': 'live_notifications_mark_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
def mark_read_all(self):
"""
Marks all notifications as read.
"""
cmd = {
'type': 'live_notifications_mark_read_all',
'uuid': self.api.generate_uuid(),
}
self.queue.append(cmd)
def mark_unread(self, id):
"""
Marks notification as unread.
"""
cmd = {
'type': 'live_notifications_mark_unread',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
|
Add support for new is_unread live notification state.
|
Add support for new is_unread live notification state.
|
Python
|
mit
|
Doist/todoist-python
|
# -*- coding: utf-8 -*-
from .generic import Manager, GetByIdMixin, AllMixin, SyncMixin
class LiveNotificationsManager(Manager, GetByIdMixin, AllMixin, SyncMixin):
state_name = 'live_notifications'
object_type = None # there is no object type associated
def set_last_read(self, id):
"""
Sets in the local state the last notification read.
"""
cmd = {
'type': 'live_notifications_set_last_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
Add support for new is_unread live notification state.
|
# -*- coding: utf-8 -*-
from .generic import Manager, GetByIdMixin, AllMixin, SyncMixin
class LiveNotificationsManager(Manager, GetByIdMixin, AllMixin, SyncMixin):
state_name = 'live_notifications'
object_type = None # there is no object type associated
def set_last_read(self, id):
"""
Sets the last known notification.
"""
cmd = {
'type': 'live_notifications_set_last_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
def mark_read(self, id):
"""
Marks notification as read.
"""
cmd = {
'type': 'live_notifications_mark_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
def mark_read_all(self):
"""
Marks all notifications as read.
"""
cmd = {
'type': 'live_notifications_mark_read_all',
'uuid': self.api.generate_uuid(),
}
self.queue.append(cmd)
def mark_unread(self, id):
"""
Marks notification as unread.
"""
cmd = {
'type': 'live_notifications_mark_unread',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
|
<commit_before># -*- coding: utf-8 -*-
from .generic import Manager, GetByIdMixin, AllMixin, SyncMixin
class LiveNotificationsManager(Manager, GetByIdMixin, AllMixin, SyncMixin):
state_name = 'live_notifications'
object_type = None # there is no object type associated
def set_last_read(self, id):
"""
Sets in the local state the last notification read.
"""
cmd = {
'type': 'live_notifications_set_last_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
<commit_msg>Add support for new is_unread live notification state.<commit_after>
|
# -*- coding: utf-8 -*-
from .generic import Manager, GetByIdMixin, AllMixin, SyncMixin
class LiveNotificationsManager(Manager, GetByIdMixin, AllMixin, SyncMixin):
state_name = 'live_notifications'
object_type = None # there is no object type associated
def set_last_read(self, id):
"""
Sets the last known notification.
"""
cmd = {
'type': 'live_notifications_set_last_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
def mark_read(self, id):
"""
Marks notification as read.
"""
cmd = {
'type': 'live_notifications_mark_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
def mark_read_all(self):
"""
Marks all notifications as read.
"""
cmd = {
'type': 'live_notifications_mark_read_all',
'uuid': self.api.generate_uuid(),
}
self.queue.append(cmd)
def mark_unread(self, id):
"""
Marks notification as unread.
"""
cmd = {
'type': 'live_notifications_mark_unread',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
|
# -*- coding: utf-8 -*-
from .generic import Manager, GetByIdMixin, AllMixin, SyncMixin
class LiveNotificationsManager(Manager, GetByIdMixin, AllMixin, SyncMixin):
state_name = 'live_notifications'
object_type = None # there is no object type associated
def set_last_read(self, id):
"""
Sets in the local state the last notification read.
"""
cmd = {
'type': 'live_notifications_set_last_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
Add support for new is_unread live notification state.# -*- coding: utf-8 -*-
from .generic import Manager, GetByIdMixin, AllMixin, SyncMixin
class LiveNotificationsManager(Manager, GetByIdMixin, AllMixin, SyncMixin):
state_name = 'live_notifications'
object_type = None # there is no object type associated
def set_last_read(self, id):
"""
Sets the last known notification.
"""
cmd = {
'type': 'live_notifications_set_last_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
def mark_read(self, id):
"""
Marks notification as read.
"""
cmd = {
'type': 'live_notifications_mark_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
def mark_read_all(self):
"""
Marks all notifications as read.
"""
cmd = {
'type': 'live_notifications_mark_read_all',
'uuid': self.api.generate_uuid(),
}
self.queue.append(cmd)
def mark_unread(self, id):
"""
Marks notification as unread.
"""
cmd = {
'type': 'live_notifications_mark_unread',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
|
<commit_before># -*- coding: utf-8 -*-
from .generic import Manager, GetByIdMixin, AllMixin, SyncMixin
class LiveNotificationsManager(Manager, GetByIdMixin, AllMixin, SyncMixin):
state_name = 'live_notifications'
object_type = None # there is no object type associated
def set_last_read(self, id):
"""
Sets in the local state the last notification read.
"""
cmd = {
'type': 'live_notifications_set_last_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
<commit_msg>Add support for new is_unread live notification state.<commit_after># -*- coding: utf-8 -*-
from .generic import Manager, GetByIdMixin, AllMixin, SyncMixin
class LiveNotificationsManager(Manager, GetByIdMixin, AllMixin, SyncMixin):
state_name = 'live_notifications'
object_type = None # there is no object type associated
def set_last_read(self, id):
"""
Sets the last known notification.
"""
cmd = {
'type': 'live_notifications_set_last_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
def mark_read(self, id):
"""
Marks notification as read.
"""
cmd = {
'type': 'live_notifications_mark_read',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
def mark_read_all(self):
"""
Marks all notifications as read.
"""
cmd = {
'type': 'live_notifications_mark_read_all',
'uuid': self.api.generate_uuid(),
}
self.queue.append(cmd)
def mark_unread(self, id):
"""
Marks notification as unread.
"""
cmd = {
'type': 'live_notifications_mark_unread',
'uuid': self.api.generate_uuid(),
'args': {
'id': id,
},
}
self.queue.append(cmd)
|
0fb16c44b13ca467fb8ede67bdc93450712cb2bb
|
test/tiles/hitile_test.py
|
test/tiles/hitile_test.py
|
import dask.array as da
import h5py
import clodius.tiles.hitile as hghi
import numpy as np
import os.path as op
import tempfile
def test_hitile():
array_size = int(1e6)
chunk_size = 2**19
data = da.from_array(np.random.random((array_size,)), chunks=(chunk_size,))
with tempfile.TemporaryDirectory() as td:
output_file = op.join(td, 'blah.hitile')
hghi.array_to_hitile(data, output_file, zoom_step=6)
with h5py.File(output_file, 'r') as f:
(means, mins, maxs) = hghi.get_data(f, 0, 0)
# print("means, mins:", means[:10], mins[:10], maxs[:10])
|
import dask.array as da
import h5py
import clodius.tiles.hitile as hghi
import numpy as np
import os.path as op
import tempfile
def test_hitile():
array_size = int(1e6)
chunk_size = 2**19
data = np.random.random((array_size,))
with tempfile.TemporaryDirectory() as td:
output_file = op.join(td, 'blah.hitile')
hghi.array_to_hitile(
data, output_file, zoom_step=6, chunks=(chunk_size,)
)
with h5py.File(output_file, 'r') as f:
(means, mins, maxs) = hghi.get_data(f, 0, 0)
# print("means, mins:", means[:10], mins[:10], maxs[:10])
|
Fix error of applying dask twice
|
Fix error of applying dask twice
|
Python
|
mit
|
hms-dbmi/clodius,hms-dbmi/clodius
|
import dask.array as da
import h5py
import clodius.tiles.hitile as hghi
import numpy as np
import os.path as op
import tempfile
def test_hitile():
array_size = int(1e6)
chunk_size = 2**19
data = da.from_array(np.random.random((array_size,)), chunks=(chunk_size,))
with tempfile.TemporaryDirectory() as td:
output_file = op.join(td, 'blah.hitile')
hghi.array_to_hitile(data, output_file, zoom_step=6)
with h5py.File(output_file, 'r') as f:
(means, mins, maxs) = hghi.get_data(f, 0, 0)
# print("means, mins:", means[:10], mins[:10], maxs[:10])
Fix error of applying dask twice
|
import dask.array as da
import h5py
import clodius.tiles.hitile as hghi
import numpy as np
import os.path as op
import tempfile
def test_hitile():
array_size = int(1e6)
chunk_size = 2**19
data = np.random.random((array_size,))
with tempfile.TemporaryDirectory() as td:
output_file = op.join(td, 'blah.hitile')
hghi.array_to_hitile(
data, output_file, zoom_step=6, chunks=(chunk_size,)
)
with h5py.File(output_file, 'r') as f:
(means, mins, maxs) = hghi.get_data(f, 0, 0)
# print("means, mins:", means[:10], mins[:10], maxs[:10])
|
<commit_before>import dask.array as da
import h5py
import clodius.tiles.hitile as hghi
import numpy as np
import os.path as op
import tempfile
def test_hitile():
array_size = int(1e6)
chunk_size = 2**19
data = da.from_array(np.random.random((array_size,)), chunks=(chunk_size,))
with tempfile.TemporaryDirectory() as td:
output_file = op.join(td, 'blah.hitile')
hghi.array_to_hitile(data, output_file, zoom_step=6)
with h5py.File(output_file, 'r') as f:
(means, mins, maxs) = hghi.get_data(f, 0, 0)
# print("means, mins:", means[:10], mins[:10], maxs[:10])
<commit_msg>Fix error of applying dask twice<commit_after>
|
import dask.array as da
import h5py
import clodius.tiles.hitile as hghi
import numpy as np
import os.path as op
import tempfile
def test_hitile():
array_size = int(1e6)
chunk_size = 2**19
data = np.random.random((array_size,))
with tempfile.TemporaryDirectory() as td:
output_file = op.join(td, 'blah.hitile')
hghi.array_to_hitile(
data, output_file, zoom_step=6, chunks=(chunk_size,)
)
with h5py.File(output_file, 'r') as f:
(means, mins, maxs) = hghi.get_data(f, 0, 0)
# print("means, mins:", means[:10], mins[:10], maxs[:10])
|
import dask.array as da
import h5py
import clodius.tiles.hitile as hghi
import numpy as np
import os.path as op
import tempfile
def test_hitile():
array_size = int(1e6)
chunk_size = 2**19
data = da.from_array(np.random.random((array_size,)), chunks=(chunk_size,))
with tempfile.TemporaryDirectory() as td:
output_file = op.join(td, 'blah.hitile')
hghi.array_to_hitile(data, output_file, zoom_step=6)
with h5py.File(output_file, 'r') as f:
(means, mins, maxs) = hghi.get_data(f, 0, 0)
# print("means, mins:", means[:10], mins[:10], maxs[:10])
Fix error of applying dask twiceimport dask.array as da
import h5py
import clodius.tiles.hitile as hghi
import numpy as np
import os.path as op
import tempfile
def test_hitile():
array_size = int(1e6)
chunk_size = 2**19
data = np.random.random((array_size,))
with tempfile.TemporaryDirectory() as td:
output_file = op.join(td, 'blah.hitile')
hghi.array_to_hitile(
data, output_file, zoom_step=6, chunks=(chunk_size,)
)
with h5py.File(output_file, 'r') as f:
(means, mins, maxs) = hghi.get_data(f, 0, 0)
# print("means, mins:", means[:10], mins[:10], maxs[:10])
|
<commit_before>import dask.array as da
import h5py
import clodius.tiles.hitile as hghi
import numpy as np
import os.path as op
import tempfile
def test_hitile():
array_size = int(1e6)
chunk_size = 2**19
data = da.from_array(np.random.random((array_size,)), chunks=(chunk_size,))
with tempfile.TemporaryDirectory() as td:
output_file = op.join(td, 'blah.hitile')
hghi.array_to_hitile(data, output_file, zoom_step=6)
with h5py.File(output_file, 'r') as f:
(means, mins, maxs) = hghi.get_data(f, 0, 0)
# print("means, mins:", means[:10], mins[:10], maxs[:10])
<commit_msg>Fix error of applying dask twice<commit_after>import dask.array as da
import h5py
import clodius.tiles.hitile as hghi
import numpy as np
import os.path as op
import tempfile
def test_hitile():
array_size = int(1e6)
chunk_size = 2**19
data = np.random.random((array_size,))
with tempfile.TemporaryDirectory() as td:
output_file = op.join(td, 'blah.hitile')
hghi.array_to_hitile(
data, output_file, zoom_step=6, chunks=(chunk_size,)
)
with h5py.File(output_file, 'r') as f:
(means, mins, maxs) = hghi.get_data(f, 0, 0)
# print("means, mins:", means[:10], mins[:10], maxs[:10])
|
dabc4eb0ad59599a0e801a3af5423861c7dd2105
|
test_valid_object_file.py
|
test_valid_object_file.py
|
from astropy.table import Table
TABLE_NAME = 'feder_object_list.csv'
def test_table_can_be_read():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
|
from astropy.table import Table
from astropy.coordinates import ICRS, name_resolve
from astropy import units as u
TABLE_NAME = 'feder_object_list.csv'
MAX_SEP = 5 # arcsec
def test_table_can_be_read_and_coords_good():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
for row in objs:
try:
simbad_pos = ICRS.from_name(row['object'])
except name_resolve.NameResolveError:
continue
table_pos = ICRS(row['ra'], row['dec'], unit=(u.hour, u.degree))
# CHANGE ASSERT TO IF/THEN, print name then assert 0
sep = table_pos.separation(simbad_pos).arcsec
warn = ''
if sep > MAX_SEP:
warn = ('Bad RA/Dec for object {}, '
'separation is {} arcsec'.format(row['object'], sep))
print (warn)
assert len(warn) == 0
|
Add test that object coordinates are accurate
|
Add test that object coordinates are accurate
Skips over any cases where simbad cannot resolve the name, so it is not perfect...
|
Python
|
bsd-2-clause
|
mwcraig/feder-object-list
|
from astropy.table import Table
TABLE_NAME = 'feder_object_list.csv'
def test_table_can_be_read():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
Add test that object coordinates are accurate
Skips over any cases where simbad cannot resolve the name, so it is not perfect...
|
from astropy.table import Table
from astropy.coordinates import ICRS, name_resolve
from astropy import units as u
TABLE_NAME = 'feder_object_list.csv'
MAX_SEP = 5 # arcsec
def test_table_can_be_read_and_coords_good():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
for row in objs:
try:
simbad_pos = ICRS.from_name(row['object'])
except name_resolve.NameResolveError:
continue
table_pos = ICRS(row['ra'], row['dec'], unit=(u.hour, u.degree))
# CHANGE ASSERT TO IF/THEN, print name then assert 0
sep = table_pos.separation(simbad_pos).arcsec
warn = ''
if sep > MAX_SEP:
warn = ('Bad RA/Dec for object {}, '
'separation is {} arcsec'.format(row['object'], sep))
print (warn)
assert len(warn) == 0
|
<commit_before>from astropy.table import Table
TABLE_NAME = 'feder_object_list.csv'
def test_table_can_be_read():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
<commit_msg>Add test that object coordinates are accurate
Skips over any cases where simbad cannot resolve the name, so it is not perfect...<commit_after>
|
from astropy.table import Table
from astropy.coordinates import ICRS, name_resolve
from astropy import units as u
TABLE_NAME = 'feder_object_list.csv'
MAX_SEP = 5 # arcsec
def test_table_can_be_read_and_coords_good():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
for row in objs:
try:
simbad_pos = ICRS.from_name(row['object'])
except name_resolve.NameResolveError:
continue
table_pos = ICRS(row['ra'], row['dec'], unit=(u.hour, u.degree))
# CHANGE ASSERT TO IF/THEN, print name then assert 0
sep = table_pos.separation(simbad_pos).arcsec
warn = ''
if sep > MAX_SEP:
warn = ('Bad RA/Dec for object {}, '
'separation is {} arcsec'.format(row['object'], sep))
print (warn)
assert len(warn) == 0
|
from astropy.table import Table
TABLE_NAME = 'feder_object_list.csv'
def test_table_can_be_read():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
Add test that object coordinates are accurate
Skips over any cases where simbad cannot resolve the name, so it is not perfect...from astropy.table import Table
from astropy.coordinates import ICRS, name_resolve
from astropy import units as u
TABLE_NAME = 'feder_object_list.csv'
MAX_SEP = 5 # arcsec
def test_table_can_be_read_and_coords_good():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
for row in objs:
try:
simbad_pos = ICRS.from_name(row['object'])
except name_resolve.NameResolveError:
continue
table_pos = ICRS(row['ra'], row['dec'], unit=(u.hour, u.degree))
# CHANGE ASSERT TO IF/THEN, print name then assert 0
sep = table_pos.separation(simbad_pos).arcsec
warn = ''
if sep > MAX_SEP:
warn = ('Bad RA/Dec for object {}, '
'separation is {} arcsec'.format(row['object'], sep))
print (warn)
assert len(warn) == 0
|
<commit_before>from astropy.table import Table
TABLE_NAME = 'feder_object_list.csv'
def test_table_can_be_read():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
<commit_msg>Add test that object coordinates are accurate
Skips over any cases where simbad cannot resolve the name, so it is not perfect...<commit_after>from astropy.table import Table
from astropy.coordinates import ICRS, name_resolve
from astropy import units as u
TABLE_NAME = 'feder_object_list.csv'
MAX_SEP = 5 # arcsec
def test_table_can_be_read_and_coords_good():
objs = Table.read(TABLE_NAME, format='ascii', delimiter=',')
columns = ['object', 'ra', 'dec']
for col in columns:
assert col in objs.colnames
for row in objs:
try:
simbad_pos = ICRS.from_name(row['object'])
except name_resolve.NameResolveError:
continue
table_pos = ICRS(row['ra'], row['dec'], unit=(u.hour, u.degree))
# CHANGE ASSERT TO IF/THEN, print name then assert 0
sep = table_pos.separation(simbad_pos).arcsec
warn = ''
if sep > MAX_SEP:
warn = ('Bad RA/Dec for object {}, '
'separation is {} arcsec'.format(row['object'], sep))
print (warn)
assert len(warn) == 0
|
358dc8e31477c27da8f286f19daa736489625035
|
tests/integ/test_basic.py
|
tests/integ/test_basic.py
|
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
|
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
|
Use consistent load for integ test stability
|
Use consistent load for integ test stability
|
Python
|
mit
|
numberoverzero/bloop,numberoverzero/bloop
|
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
Use consistent load for integ test stability
|
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
|
<commit_before>"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
<commit_msg>Use consistent load for integ test stability<commit_after>
|
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
|
"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
Use consistent load for integ test stability"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
|
<commit_before>"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
<commit_msg>Use consistent load for integ test stability<commit_after>"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
|
a2b2e6b79ac28d886b3bb682beeadab06018de66
|
test/copies/gyptest-attribs.py
|
test/copies/gyptest-attribs.py
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that copying files preserves file attributes.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(path, chdir='src')
in_stat = os.stat(os.path.join('src', path))
out_stat = os.stat(out_path)
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
test = TestGyp.TestGyp()
test.run_gyp('copies-attribs.gyp', chdir='src')
test.build('copies-attribs.gyp', chdir='src')
if sys.platform != 'win32':
out_path = test.built_file_path('executable-file.sh', chdir='src')
test.must_contain(out_path,
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n')
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that copying files preserves file attributes.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(path, chdir='src')
in_stat = os.stat(os.path.join('src', path))
out_stat = os.stat(out_path)
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
# Doesn't pass with the android generator, see gyp bug 379.
test = TestGyp.TestGyp(formats=['!android'])
test.run_gyp('copies-attribs.gyp', chdir='src')
test.build('copies-attribs.gyp', chdir='src')
if sys.platform != 'win32':
out_path = test.built_file_path('executable-file.sh', chdir='src')
test.must_contain(out_path,
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n')
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
test.pass_test()
|
Disable new test from r1779 for the android generator.
|
Disable new test from r1779 for the android generator.
BUG=gyp:379
TBR=torne@chromium.org
Review URL: https://codereview.chromium.org/68333002
|
Python
|
bsd-3-clause
|
witwall/gyp,witwall/gyp,witwall/gyp,witwall/gyp,witwall/gyp
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that copying files preserves file attributes.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(path, chdir='src')
in_stat = os.stat(os.path.join('src', path))
out_stat = os.stat(out_path)
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
test = TestGyp.TestGyp()
test.run_gyp('copies-attribs.gyp', chdir='src')
test.build('copies-attribs.gyp', chdir='src')
if sys.platform != 'win32':
out_path = test.built_file_path('executable-file.sh', chdir='src')
test.must_contain(out_path,
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n')
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
test.pass_test()
Disable new test from r1779 for the android generator.
BUG=gyp:379
TBR=torne@chromium.org
Review URL: https://codereview.chromium.org/68333002
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that copying files preserves file attributes.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(path, chdir='src')
in_stat = os.stat(os.path.join('src', path))
out_stat = os.stat(out_path)
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
# Doesn't pass with the android generator, see gyp bug 379.
test = TestGyp.TestGyp(formats=['!android'])
test.run_gyp('copies-attribs.gyp', chdir='src')
test.build('copies-attribs.gyp', chdir='src')
if sys.platform != 'win32':
out_path = test.built_file_path('executable-file.sh', chdir='src')
test.must_contain(out_path,
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n')
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
test.pass_test()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that copying files preserves file attributes.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(path, chdir='src')
in_stat = os.stat(os.path.join('src', path))
out_stat = os.stat(out_path)
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
test = TestGyp.TestGyp()
test.run_gyp('copies-attribs.gyp', chdir='src')
test.build('copies-attribs.gyp', chdir='src')
if sys.platform != 'win32':
out_path = test.built_file_path('executable-file.sh', chdir='src')
test.must_contain(out_path,
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n')
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
test.pass_test()
<commit_msg>Disable new test from r1779 for the android generator.
BUG=gyp:379
TBR=torne@chromium.org
Review URL: https://codereview.chromium.org/68333002<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that copying files preserves file attributes.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(path, chdir='src')
in_stat = os.stat(os.path.join('src', path))
out_stat = os.stat(out_path)
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
# Doesn't pass with the android generator, see gyp bug 379.
test = TestGyp.TestGyp(formats=['!android'])
test.run_gyp('copies-attribs.gyp', chdir='src')
test.build('copies-attribs.gyp', chdir='src')
if sys.platform != 'win32':
out_path = test.built_file_path('executable-file.sh', chdir='src')
test.must_contain(out_path,
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n')
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that copying files preserves file attributes.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(path, chdir='src')
in_stat = os.stat(os.path.join('src', path))
out_stat = os.stat(out_path)
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
test = TestGyp.TestGyp()
test.run_gyp('copies-attribs.gyp', chdir='src')
test.build('copies-attribs.gyp', chdir='src')
if sys.platform != 'win32':
out_path = test.built_file_path('executable-file.sh', chdir='src')
test.must_contain(out_path,
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n')
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
test.pass_test()
Disable new test from r1779 for the android generator.
BUG=gyp:379
TBR=torne@chromium.org
Review URL: https://codereview.chromium.org/68333002#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that copying files preserves file attributes.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(path, chdir='src')
in_stat = os.stat(os.path.join('src', path))
out_stat = os.stat(out_path)
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
# Doesn't pass with the android generator, see gyp bug 379.
test = TestGyp.TestGyp(formats=['!android'])
test.run_gyp('copies-attribs.gyp', chdir='src')
test.build('copies-attribs.gyp', chdir='src')
if sys.platform != 'win32':
out_path = test.built_file_path('executable-file.sh', chdir='src')
test.must_contain(out_path,
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n')
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
test.pass_test()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that copying files preserves file attributes.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(path, chdir='src')
in_stat = os.stat(os.path.join('src', path))
out_stat = os.stat(out_path)
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
test = TestGyp.TestGyp()
test.run_gyp('copies-attribs.gyp', chdir='src')
test.build('copies-attribs.gyp', chdir='src')
if sys.platform != 'win32':
out_path = test.built_file_path('executable-file.sh', chdir='src')
test.must_contain(out_path,
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n')
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
test.pass_test()
<commit_msg>Disable new test from r1779 for the android generator.
BUG=gyp:379
TBR=torne@chromium.org
Review URL: https://codereview.chromium.org/68333002<commit_after>#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that copying files preserves file attributes.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(path, chdir='src')
in_stat = os.stat(os.path.join('src', path))
out_stat = os.stat(out_path)
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
# Doesn't pass with the android generator, see gyp bug 379.
test = TestGyp.TestGyp(formats=['!android'])
test.run_gyp('copies-attribs.gyp', chdir='src')
test.build('copies-attribs.gyp', chdir='src')
if sys.platform != 'win32':
out_path = test.built_file_path('executable-file.sh', chdir='src')
test.must_contain(out_path,
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n')
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
test.pass_test()
|
2c86118cfa2c75787fea22909aaec767e432151e
|
tests/test_add_language/decorators.py
|
tests/test_add_language/decorators.py
|
# tests.decorators
import sys
from functools import wraps
from StringIO import StringIO
from mock import patch
def redirect_stdout(func):
"""temporarily redirect stdout to new output stream"""
@wraps(func)
def wrapper(*args, **kwargs):
original_stdout = sys.stdout
out = StringIO()
try:
sys.stdout = out
return func(out, *args, **kwargs)
finally:
sys.stdout = original_stdout
return wrapper
def use_user_prefs(user_prefs):
"""temporarily use the given values for user preferences"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
with patch('yvs.shared.get_user_prefs', return_value=user_prefs):
return func(*args, **kwargs)
return wrapper
return decorator
|
# tests.decorators
import sys
from functools import wraps
from StringIO import StringIO
def redirect_stdout(func):
"""temporarily redirect stdout to new output stream"""
@wraps(func)
def wrapper(*args, **kwargs):
original_stdout = sys.stdout
out = StringIO()
try:
sys.stdout = out
return func(out, *args, **kwargs)
finally:
sys.stdout = original_stdout
return wrapper
|
Remove use_user_prefs decorator for add_language
|
Remove use_user_prefs decorator for add_language
|
Python
|
mit
|
caleb531/youversion-suggest,caleb531/youversion-suggest
|
# tests.decorators
import sys
from functools import wraps
from StringIO import StringIO
from mock import patch
def redirect_stdout(func):
"""temporarily redirect stdout to new output stream"""
@wraps(func)
def wrapper(*args, **kwargs):
original_stdout = sys.stdout
out = StringIO()
try:
sys.stdout = out
return func(out, *args, **kwargs)
finally:
sys.stdout = original_stdout
return wrapper
def use_user_prefs(user_prefs):
"""temporarily use the given values for user preferences"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
with patch('yvs.shared.get_user_prefs', return_value=user_prefs):
return func(*args, **kwargs)
return wrapper
return decorator
Remove use_user_prefs decorator for add_language
|
# tests.decorators
import sys
from functools import wraps
from StringIO import StringIO
def redirect_stdout(func):
"""temporarily redirect stdout to new output stream"""
@wraps(func)
def wrapper(*args, **kwargs):
original_stdout = sys.stdout
out = StringIO()
try:
sys.stdout = out
return func(out, *args, **kwargs)
finally:
sys.stdout = original_stdout
return wrapper
|
<commit_before># tests.decorators
import sys
from functools import wraps
from StringIO import StringIO
from mock import patch
def redirect_stdout(func):
"""temporarily redirect stdout to new output stream"""
@wraps(func)
def wrapper(*args, **kwargs):
original_stdout = sys.stdout
out = StringIO()
try:
sys.stdout = out
return func(out, *args, **kwargs)
finally:
sys.stdout = original_stdout
return wrapper
def use_user_prefs(user_prefs):
"""temporarily use the given values for user preferences"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
with patch('yvs.shared.get_user_prefs', return_value=user_prefs):
return func(*args, **kwargs)
return wrapper
return decorator
<commit_msg>Remove use_user_prefs decorator for add_language<commit_after>
|
# tests.decorators
import sys
from functools import wraps
from StringIO import StringIO
def redirect_stdout(func):
"""temporarily redirect stdout to new output stream"""
@wraps(func)
def wrapper(*args, **kwargs):
original_stdout = sys.stdout
out = StringIO()
try:
sys.stdout = out
return func(out, *args, **kwargs)
finally:
sys.stdout = original_stdout
return wrapper
|
# tests.decorators
import sys
from functools import wraps
from StringIO import StringIO
from mock import patch
def redirect_stdout(func):
"""temporarily redirect stdout to new output stream"""
@wraps(func)
def wrapper(*args, **kwargs):
original_stdout = sys.stdout
out = StringIO()
try:
sys.stdout = out
return func(out, *args, **kwargs)
finally:
sys.stdout = original_stdout
return wrapper
def use_user_prefs(user_prefs):
"""temporarily use the given values for user preferences"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
with patch('yvs.shared.get_user_prefs', return_value=user_prefs):
return func(*args, **kwargs)
return wrapper
return decorator
Remove use_user_prefs decorator for add_language# tests.decorators
import sys
from functools import wraps
from StringIO import StringIO
def redirect_stdout(func):
"""temporarily redirect stdout to new output stream"""
@wraps(func)
def wrapper(*args, **kwargs):
original_stdout = sys.stdout
out = StringIO()
try:
sys.stdout = out
return func(out, *args, **kwargs)
finally:
sys.stdout = original_stdout
return wrapper
|
<commit_before># tests.decorators
import sys
from functools import wraps
from StringIO import StringIO
from mock import patch
def redirect_stdout(func):
"""temporarily redirect stdout to new output stream"""
@wraps(func)
def wrapper(*args, **kwargs):
original_stdout = sys.stdout
out = StringIO()
try:
sys.stdout = out
return func(out, *args, **kwargs)
finally:
sys.stdout = original_stdout
return wrapper
def use_user_prefs(user_prefs):
"""temporarily use the given values for user preferences"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
with patch('yvs.shared.get_user_prefs', return_value=user_prefs):
return func(*args, **kwargs)
return wrapper
return decorator
<commit_msg>Remove use_user_prefs decorator for add_language<commit_after># tests.decorators
import sys
from functools import wraps
from StringIO import StringIO
def redirect_stdout(func):
"""temporarily redirect stdout to new output stream"""
@wraps(func)
def wrapper(*args, **kwargs):
original_stdout = sys.stdout
out = StringIO()
try:
sys.stdout = out
return func(out, *args, **kwargs)
finally:
sys.stdout = original_stdout
return wrapper
|
bc005622a6fcce2ec53bf93a9b6519f923904a61
|
turbustat/statistics/stats_warnings.py
|
turbustat/statistics/stats_warnings.py
|
# Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
class TurbuStatTestingWarning(Warning):
'''
Turbustat.statistics warning for untested methods.
'''
|
# Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
class TurbuStatTestingWarning(Warning):
'''
Turbustat.statistics warning for untested methods.
'''
class TurbuStatMetricWarning(Warning):
'''
Turbustat.statistics warning for misusing a distance metric.
'''
|
Add warning for where a distance metric is being misused
|
Add warning for where a distance metric is being misused
|
Python
|
mit
|
Astroua/TurbuStat,e-koch/TurbuStat
|
# Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
class TurbuStatTestingWarning(Warning):
'''
Turbustat.statistics warning for untested methods.
'''Add warning for where a distance metric is being misused
|
# Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
class TurbuStatTestingWarning(Warning):
'''
Turbustat.statistics warning for untested methods.
'''
class TurbuStatMetricWarning(Warning):
'''
Turbustat.statistics warning for misusing a distance metric.
'''
|
<commit_before># Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
class TurbuStatTestingWarning(Warning):
'''
Turbustat.statistics warning for untested methods.
'''<commit_msg>Add warning for where a distance metric is being misused<commit_after>
|
# Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
class TurbuStatTestingWarning(Warning):
'''
Turbustat.statistics warning for untested methods.
'''
class TurbuStatMetricWarning(Warning):
'''
Turbustat.statistics warning for misusing a distance metric.
'''
|
# Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
class TurbuStatTestingWarning(Warning):
'''
Turbustat.statistics warning for untested methods.
'''Add warning for where a distance metric is being misused# Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
class TurbuStatTestingWarning(Warning):
'''
Turbustat.statistics warning for untested methods.
'''
class TurbuStatMetricWarning(Warning):
'''
Turbustat.statistics warning for misusing a distance metric.
'''
|
<commit_before># Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
class TurbuStatTestingWarning(Warning):
'''
Turbustat.statistics warning for untested methods.
'''<commit_msg>Add warning for where a distance metric is being misused<commit_after># Licensed under an MIT open source license - see LICENSE
from __future__ import print_function, absolute_import, division
class TurbuStatTestingWarning(Warning):
'''
Turbustat.statistics warning for untested methods.
'''
class TurbuStatMetricWarning(Warning):
'''
Turbustat.statistics warning for misusing a distance metric.
'''
|
dfd02ec10a904c5ce52162fa512e0850c789ce32
|
language_explorer/staging_settings.py
|
language_explorer/staging_settings.py
|
# Prod-like, but with resources in different locations
# Data sources
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/home/esteele/lex_data_bundle/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
|
# Prod-like, but with resources in different locations
# Data sources
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/Users/esteele/Code/language_explorer/data/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
|
Use staging for creating a static copy, so refer to in-place assets, not deployed assets
|
Use staging for creating a static copy, so refer to in-place assets, not deployed assets
|
Python
|
mit
|
edwinsteele/language_explorer,edwinsteele/language_explorer,edwinsteele/language_explorer
|
# Prod-like, but with resources in different locations
# Data sources
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/home/esteele/lex_data_bundle/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
Use staging for creating a static copy, so refer to in-place assets, not deployed assets
|
# Prod-like, but with resources in different locations
# Data sources
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/Users/esteele/Code/language_explorer/data/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
|
<commit_before># Prod-like, but with resources in different locations
# Data sources
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/home/esteele/lex_data_bundle/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
<commit_msg>Use staging for creating a static copy, so refer to in-place assets, not deployed assets<commit_after>
|
# Prod-like, but with resources in different locations
# Data sources
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/Users/esteele/Code/language_explorer/data/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
|
# Prod-like, but with resources in different locations
# Data sources
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/home/esteele/lex_data_bundle/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
Use staging for creating a static copy, so refer to in-place assets, not deployed assets# Prod-like, but with resources in different locations
# Data sources
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/Users/esteele/Code/language_explorer/data/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
|
<commit_before># Prod-like, but with resources in different locations
# Data sources
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/home/esteele/lex_data_bundle/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
<commit_msg>Use staging for creating a static copy, so refer to in-place assets, not deployed assets<commit_after># Prod-like, but with resources in different locations
# Data sources
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/Users/esteele/Code/language_explorer/data/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
|
db8524c1085c16552e548dc7c702f80747804814
|
unittesting/helpers/view_test_case.py
|
unittesting/helpers/view_test_case.py
|
import sublime
from unittest import TestCase
class ViewTestCase(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
settings = self.view.settings()
default_settings = getattr(self.__class__, 'view_settings', {})
for key, value in default_settings.items():
settings.set(key, value)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().focus_view(self.view)
self.view.window().run_command("close_file")
def _viewContents(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def assertViewContentsEqual(self, text):
self.assertEqual(self._viewContents(), text)
|
import sublime
from unittest import TestCase
class ViewTestCase(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
settings = self.view.settings()
default_settings = getattr(self.__class__, 'view_settings', {})
for key, value in default_settings.items():
settings.set(key, value)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.close()
def _viewContents(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def assertViewContentsEqual(self, text):
self.assertEqual(self._viewContents(), text)
|
Use view.close() to close view.
|
Use view.close() to close view.
|
Python
|
mit
|
randy3k/UnitTesting,randy3k/UnitTesting,randy3k/UnitTesting,randy3k/UnitTesting
|
import sublime
from unittest import TestCase
class ViewTestCase(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
settings = self.view.settings()
default_settings = getattr(self.__class__, 'view_settings', {})
for key, value in default_settings.items():
settings.set(key, value)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().focus_view(self.view)
self.view.window().run_command("close_file")
def _viewContents(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def assertViewContentsEqual(self, text):
self.assertEqual(self._viewContents(), text)
Use view.close() to close view.
|
import sublime
from unittest import TestCase
class ViewTestCase(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
settings = self.view.settings()
default_settings = getattr(self.__class__, 'view_settings', {})
for key, value in default_settings.items():
settings.set(key, value)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.close()
def _viewContents(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def assertViewContentsEqual(self, text):
self.assertEqual(self._viewContents(), text)
|
<commit_before>import sublime
from unittest import TestCase
class ViewTestCase(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
settings = self.view.settings()
default_settings = getattr(self.__class__, 'view_settings', {})
for key, value in default_settings.items():
settings.set(key, value)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().focus_view(self.view)
self.view.window().run_command("close_file")
def _viewContents(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def assertViewContentsEqual(self, text):
self.assertEqual(self._viewContents(), text)
<commit_msg>Use view.close() to close view.<commit_after>
|
import sublime
from unittest import TestCase
class ViewTestCase(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
settings = self.view.settings()
default_settings = getattr(self.__class__, 'view_settings', {})
for key, value in default_settings.items():
settings.set(key, value)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.close()
def _viewContents(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def assertViewContentsEqual(self, text):
self.assertEqual(self._viewContents(), text)
|
import sublime
from unittest import TestCase
class ViewTestCase(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
settings = self.view.settings()
default_settings = getattr(self.__class__, 'view_settings', {})
for key, value in default_settings.items():
settings.set(key, value)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().focus_view(self.view)
self.view.window().run_command("close_file")
def _viewContents(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def assertViewContentsEqual(self, text):
self.assertEqual(self._viewContents(), text)
Use view.close() to close view.import sublime
from unittest import TestCase
class ViewTestCase(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
settings = self.view.settings()
default_settings = getattr(self.__class__, 'view_settings', {})
for key, value in default_settings.items():
settings.set(key, value)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.close()
def _viewContents(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def assertViewContentsEqual(self, text):
self.assertEqual(self._viewContents(), text)
|
<commit_before>import sublime
from unittest import TestCase
class ViewTestCase(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
settings = self.view.settings()
default_settings = getattr(self.__class__, 'view_settings', {})
for key, value in default_settings.items():
settings.set(key, value)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().focus_view(self.view)
self.view.window().run_command("close_file")
def _viewContents(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def assertViewContentsEqual(self, text):
self.assertEqual(self._viewContents(), text)
<commit_msg>Use view.close() to close view.<commit_after>import sublime
from unittest import TestCase
class ViewTestCase(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
settings = self.view.settings()
default_settings = getattr(self.__class__, 'view_settings', {})
for key, value in default_settings.items():
settings.set(key, value)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.close()
def _viewContents(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def assertViewContentsEqual(self, text):
self.assertEqual(self._viewContents(), text)
|
c23acde7428d968016af760afe9624c138fc3074
|
test/library/gyptest-shared-obj-install-path.py
|
test/library/gyptest-shared-obj-install-path.py
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
Add with_statement import for python2.5.
|
Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003
|
Python
|
bsd-3-clause
|
csulmone/gyp,csulmone/gyp,csulmone/gyp,csulmone/gyp
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
<commit_msg>Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
<commit_msg>Add with_statement import for python2.5.
See http://www.python.org/dev/peps/pep-0343/ which describes
the with statement.
Review URL: http://codereview.chromium.org/5690003<commit_after>#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .so files that are order only dependencies are specified by
their install location rather than by their alias.
"""
# Python 2.5 needs this for the with statement.
from __future__ import with_statement
import os
import TestGyp
test = TestGyp.TestGyp(formats=['make'])
test.run_gyp('shared_dependency.gyp',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
with open('relocate/src/Makefile') as makefile:
make_contents = makefile.read()
# If we remove the code to generate lib1, Make should still be able
# to build lib2 since lib1.so already exists.
make_contents = make_contents.replace('include lib1.target.mk', '')
with open('relocate/src/Makefile', 'w') as makefile:
makefile.write(make_contents)
test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
test.pass_test()
|
5a4317a22f84355de98a09bba408bfba6d895507
|
examples/g/modulegen.py
|
examples/g/modulegen.py
|
#! /usr/bin/env python
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
def my_module_gen(out_file):
mod = Module('g')
mod.add_include('"g.h"')
mod.add_function('GDoA', None, [])
G = mod.add_cpp_namespace("G")
G.add_function('GDoB', None, [])
GInner = G.add_cpp_namespace("GInner")
GInner.add_function('GDoC', None, [])
mod.generate(FileCodeSink(out_file))
if __name__ == '__main__':
my_module_gen(sys.stdout)
|
#! /usr/bin/env python
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
def my_module_gen(out_file):
mod = Module('g')
mod.add_include('"g.h"')
mod.add_function('GDoA', None, [])
G = mod.add_cpp_namespace("G")
G.add_function('GDoB', None, [])
GInner = G.add_cpp_namespace("GInner")
GInner.add_function('GDoC', None, [])
G.add_include('<fstream>')
ofstream = G.add_class('ofstream', foreign_cpp_namespace='::std')
ofstream.add_enum('openmode', [
('app', 'std::ios_base::app'),
('ate', 'std::ios_base::ate'),
('binary', 'std::ios_base::binary'),
('in', 'std::ios_base::in'),
('out', 'std::ios_base::out'),
('trunc', 'std::ios_base::trunc'),
])
ofstream.add_constructor([Parameter.new("const char *", 'filename'),
Parameter.new("::std::ofstream::openmode", 'mode', default_value="std::ios_base::out")])
ofstream.add_method('close', None, [])
mod.generate(FileCodeSink(out_file))
if __name__ == '__main__':
my_module_gen(sys.stdout)
|
Add wrapping of std::ofstream to the example
|
Add wrapping of std::ofstream to the example
|
Python
|
lgpl-2.1
|
gjcarneiro/pybindgen,gjcarneiro/pybindgen,cawka/pybindgen-old,cawka/pybindgen-old,ftalbrecht/pybindgen,cawka/pybindgen-old,ftalbrecht/pybindgen,ftalbrecht/pybindgen,gjcarneiro/pybindgen,gjcarneiro/pybindgen,ftalbrecht/pybindgen,cawka/pybindgen-old
|
#! /usr/bin/env python
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
def my_module_gen(out_file):
mod = Module('g')
mod.add_include('"g.h"')
mod.add_function('GDoA', None, [])
G = mod.add_cpp_namespace("G")
G.add_function('GDoB', None, [])
GInner = G.add_cpp_namespace("GInner")
GInner.add_function('GDoC', None, [])
mod.generate(FileCodeSink(out_file))
if __name__ == '__main__':
my_module_gen(sys.stdout)
Add wrapping of std::ofstream to the example
|
#! /usr/bin/env python
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
def my_module_gen(out_file):
mod = Module('g')
mod.add_include('"g.h"')
mod.add_function('GDoA', None, [])
G = mod.add_cpp_namespace("G")
G.add_function('GDoB', None, [])
GInner = G.add_cpp_namespace("GInner")
GInner.add_function('GDoC', None, [])
G.add_include('<fstream>')
ofstream = G.add_class('ofstream', foreign_cpp_namespace='::std')
ofstream.add_enum('openmode', [
('app', 'std::ios_base::app'),
('ate', 'std::ios_base::ate'),
('binary', 'std::ios_base::binary'),
('in', 'std::ios_base::in'),
('out', 'std::ios_base::out'),
('trunc', 'std::ios_base::trunc'),
])
ofstream.add_constructor([Parameter.new("const char *", 'filename'),
Parameter.new("::std::ofstream::openmode", 'mode', default_value="std::ios_base::out")])
ofstream.add_method('close', None, [])
mod.generate(FileCodeSink(out_file))
if __name__ == '__main__':
my_module_gen(sys.stdout)
|
<commit_before>#! /usr/bin/env python
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
def my_module_gen(out_file):
mod = Module('g')
mod.add_include('"g.h"')
mod.add_function('GDoA', None, [])
G = mod.add_cpp_namespace("G")
G.add_function('GDoB', None, [])
GInner = G.add_cpp_namespace("GInner")
GInner.add_function('GDoC', None, [])
mod.generate(FileCodeSink(out_file))
if __name__ == '__main__':
my_module_gen(sys.stdout)
<commit_msg>Add wrapping of std::ofstream to the example<commit_after>
|
#! /usr/bin/env python
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
def my_module_gen(out_file):
mod = Module('g')
mod.add_include('"g.h"')
mod.add_function('GDoA', None, [])
G = mod.add_cpp_namespace("G")
G.add_function('GDoB', None, [])
GInner = G.add_cpp_namespace("GInner")
GInner.add_function('GDoC', None, [])
G.add_include('<fstream>')
ofstream = G.add_class('ofstream', foreign_cpp_namespace='::std')
ofstream.add_enum('openmode', [
('app', 'std::ios_base::app'),
('ate', 'std::ios_base::ate'),
('binary', 'std::ios_base::binary'),
('in', 'std::ios_base::in'),
('out', 'std::ios_base::out'),
('trunc', 'std::ios_base::trunc'),
])
ofstream.add_constructor([Parameter.new("const char *", 'filename'),
Parameter.new("::std::ofstream::openmode", 'mode', default_value="std::ios_base::out")])
ofstream.add_method('close', None, [])
mod.generate(FileCodeSink(out_file))
if __name__ == '__main__':
my_module_gen(sys.stdout)
|
#! /usr/bin/env python
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
def my_module_gen(out_file):
mod = Module('g')
mod.add_include('"g.h"')
mod.add_function('GDoA', None, [])
G = mod.add_cpp_namespace("G")
G.add_function('GDoB', None, [])
GInner = G.add_cpp_namespace("GInner")
GInner.add_function('GDoC', None, [])
mod.generate(FileCodeSink(out_file))
if __name__ == '__main__':
my_module_gen(sys.stdout)
Add wrapping of std::ofstream to the example#! /usr/bin/env python
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
def my_module_gen(out_file):
mod = Module('g')
mod.add_include('"g.h"')
mod.add_function('GDoA', None, [])
G = mod.add_cpp_namespace("G")
G.add_function('GDoB', None, [])
GInner = G.add_cpp_namespace("GInner")
GInner.add_function('GDoC', None, [])
G.add_include('<fstream>')
ofstream = G.add_class('ofstream', foreign_cpp_namespace='::std')
ofstream.add_enum('openmode', [
('app', 'std::ios_base::app'),
('ate', 'std::ios_base::ate'),
('binary', 'std::ios_base::binary'),
('in', 'std::ios_base::in'),
('out', 'std::ios_base::out'),
('trunc', 'std::ios_base::trunc'),
])
ofstream.add_constructor([Parameter.new("const char *", 'filename'),
Parameter.new("::std::ofstream::openmode", 'mode', default_value="std::ios_base::out")])
ofstream.add_method('close', None, [])
mod.generate(FileCodeSink(out_file))
if __name__ == '__main__':
my_module_gen(sys.stdout)
|
<commit_before>#! /usr/bin/env python
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
def my_module_gen(out_file):
mod = Module('g')
mod.add_include('"g.h"')
mod.add_function('GDoA', None, [])
G = mod.add_cpp_namespace("G")
G.add_function('GDoB', None, [])
GInner = G.add_cpp_namespace("GInner")
GInner.add_function('GDoC', None, [])
mod.generate(FileCodeSink(out_file))
if __name__ == '__main__':
my_module_gen(sys.stdout)
<commit_msg>Add wrapping of std::ofstream to the example<commit_after>#! /usr/bin/env python
import sys
import pybindgen
from pybindgen import ReturnValue, Parameter, Module, Function, FileCodeSink
def my_module_gen(out_file):
mod = Module('g')
mod.add_include('"g.h"')
mod.add_function('GDoA', None, [])
G = mod.add_cpp_namespace("G")
G.add_function('GDoB', None, [])
GInner = G.add_cpp_namespace("GInner")
GInner.add_function('GDoC', None, [])
G.add_include('<fstream>')
ofstream = G.add_class('ofstream', foreign_cpp_namespace='::std')
ofstream.add_enum('openmode', [
('app', 'std::ios_base::app'),
('ate', 'std::ios_base::ate'),
('binary', 'std::ios_base::binary'),
('in', 'std::ios_base::in'),
('out', 'std::ios_base::out'),
('trunc', 'std::ios_base::trunc'),
])
ofstream.add_constructor([Parameter.new("const char *", 'filename'),
Parameter.new("::std::ofstream::openmode", 'mode', default_value="std::ios_base::out")])
ofstream.add_method('close', None, [])
mod.generate(FileCodeSink(out_file))
if __name__ == '__main__':
my_module_gen(sys.stdout)
|
88210804900c48a895c6ed90ae20dd08dc32e162
|
alfred_listener/views.py
|
alfred_listener/views.py
|
from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload', '')
try:
payload_data = json.loads(payload)
except ValueError:
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
|
from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload')
try:
payload_data = json.loads(payload)
except (ValueError, TypeError):
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
|
Improve loading of payload from json
|
Improve loading of payload from json
|
Python
|
isc
|
alfredhq/alfred-listener
|
from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload', '')
try:
payload_data = json.loads(payload)
except ValueError:
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
Improve loading of payload from json
|
from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload')
try:
payload_data = json.loads(payload)
except (ValueError, TypeError):
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
|
<commit_before>from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload', '')
try:
payload_data = json.loads(payload)
except ValueError:
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
<commit_msg>Improve loading of payload from json<commit_after>
|
from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload')
try:
payload_data = json.loads(payload)
except (ValueError, TypeError):
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
|
from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload', '')
try:
payload_data = json.loads(payload)
except ValueError:
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
Improve loading of payload from jsonfrom flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload')
try:
payload_data = json.loads(payload)
except (ValueError, TypeError):
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
|
<commit_before>from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload', '')
try:
payload_data = json.loads(payload)
except ValueError:
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
<commit_msg>Improve loading of payload from json<commit_after>from flask import Blueprint, request, json
from alfred_db.models import Repository, Commit
from .database import db
from .helpers import parse_hook_data
webhooks = Blueprint('webhooks', __name__)
@webhooks.route('/', methods=['POST'])
def handler():
payload = request.form.get('payload')
try:
payload_data = json.loads(payload)
except (ValueError, TypeError):
return 'Bad request', 400
hook_data = parse_hook_data(payload_data)
repository = db.session.query(Repository).filter_by(
name=hook_data['repo_name'], user=hook_data['repo_user']
).first()
if repository is None:
repository = Repository(
name=hook_data['repo_name'],
user=hook_data['repo_user'],
url=hook_data['repo_url']
)
db.session.add(repository)
db.session.commit()
commit = db.session.query(Commit).filter_by(
hash=hook_data['hash'], repository_id=repository.id
).first()
if commit is None:
commit = Commit(
repository_id=repository.id,
hash=hook_data['hash'],
ref=hook_data['ref'],
compare_url=hook_data['compare_url'],
committer_name=hook_data['committer_name'],
committer_email=hook_data['committer_email'],
message=hook_data['message']
)
db.session.add(commit)
db.session.commit()
return 'OK'
|
c86c32453e241543317509495357e05c73b57047
|
django_tenant_templates/middleware.py
|
django_tenant_templates/middleware.py
|
"""
Middleware!
"""
from django_tenant_templates import local
class TenantMiddleware(object):
"""Middleware for enabling tenant-aware template loading."""
slug_property_name = 'tenant_slug'
def process_request(self, request):
local.tenant_slug = getattr(request, self.slug_property_name, None)
|
"""
Middleware!
"""
from django_tenant_templates import local
class TenantMiddleware(object):
"""Middleware for enabling tenant-aware template loading."""
slug_property_name = 'tenant_slug'
def process_request(self, request):
local.tenant_slug = getattr(request, self.slug_property_name, None)
def process_exception(self, request, exception):
try:
del local.tenant_slug
except AttributeError:
pass
|
Remove the thread local on exceptions
|
Remove the thread local on exceptions
|
Python
|
mit
|
grampajoe/django-tenant-templates
|
"""
Middleware!
"""
from django_tenant_templates import local
class TenantMiddleware(object):
"""Middleware for enabling tenant-aware template loading."""
slug_property_name = 'tenant_slug'
def process_request(self, request):
local.tenant_slug = getattr(request, self.slug_property_name, None)
Remove the thread local on exceptions
|
"""
Middleware!
"""
from django_tenant_templates import local
class TenantMiddleware(object):
"""Middleware for enabling tenant-aware template loading."""
slug_property_name = 'tenant_slug'
def process_request(self, request):
local.tenant_slug = getattr(request, self.slug_property_name, None)
def process_exception(self, request, exception):
try:
del local.tenant_slug
except AttributeError:
pass
|
<commit_before>"""
Middleware!
"""
from django_tenant_templates import local
class TenantMiddleware(object):
"""Middleware for enabling tenant-aware template loading."""
slug_property_name = 'tenant_slug'
def process_request(self, request):
local.tenant_slug = getattr(request, self.slug_property_name, None)
<commit_msg>Remove the thread local on exceptions<commit_after>
|
"""
Middleware!
"""
from django_tenant_templates import local
class TenantMiddleware(object):
"""Middleware for enabling tenant-aware template loading."""
slug_property_name = 'tenant_slug'
def process_request(self, request):
local.tenant_slug = getattr(request, self.slug_property_name, None)
def process_exception(self, request, exception):
try:
del local.tenant_slug
except AttributeError:
pass
|
"""
Middleware!
"""
from django_tenant_templates import local
class TenantMiddleware(object):
"""Middleware for enabling tenant-aware template loading."""
slug_property_name = 'tenant_slug'
def process_request(self, request):
local.tenant_slug = getattr(request, self.slug_property_name, None)
Remove the thread local on exceptions"""
Middleware!
"""
from django_tenant_templates import local
class TenantMiddleware(object):
"""Middleware for enabling tenant-aware template loading."""
slug_property_name = 'tenant_slug'
def process_request(self, request):
local.tenant_slug = getattr(request, self.slug_property_name, None)
def process_exception(self, request, exception):
try:
del local.tenant_slug
except AttributeError:
pass
|
<commit_before>"""
Middleware!
"""
from django_tenant_templates import local
class TenantMiddleware(object):
"""Middleware for enabling tenant-aware template loading."""
slug_property_name = 'tenant_slug'
def process_request(self, request):
local.tenant_slug = getattr(request, self.slug_property_name, None)
<commit_msg>Remove the thread local on exceptions<commit_after>"""
Middleware!
"""
from django_tenant_templates import local
class TenantMiddleware(object):
"""Middleware for enabling tenant-aware template loading."""
slug_property_name = 'tenant_slug'
def process_request(self, request):
local.tenant_slug = getattr(request, self.slug_property_name, None)
def process_exception(self, request, exception):
try:
del local.tenant_slug
except AttributeError:
pass
|
cd5e52c8e1d481c8e1bf1e7a71b0c421e53c93c9
|
featureflow/__init__.py
|
featureflow/__init__.py
|
__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
|
__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
from eventlog import EventLog, RedisChannel
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
|
Add EventLog stuff to package-level exports
|
Add EventLog stuff to package-level exports
|
Python
|
mit
|
JohnVinyard/featureflow,JohnVinyard/featureflow
|
__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
Add EventLog stuff to package-level exports
|
__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
from eventlog import EventLog, RedisChannel
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
|
<commit_before>__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
<commit_msg>Add EventLog stuff to package-level exports<commit_after>
|
__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
from eventlog import EventLog, RedisChannel
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
|
__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
Add EventLog stuff to package-level exports__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
from eventlog import EventLog, RedisChannel
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
|
<commit_before>__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
<commit_msg>Add EventLog stuff to package-level exports<commit_after>__version__ = '1.16.14'
from model import BaseModel
from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \
PickleFeature
from extractor import Node, Graph, Aggregator, NotEnoughData
from bytestream import ByteStream, ByteStreamFeature, ZipWrapper, iter_zip
from data import \
IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \
KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \
InMemoryDatabase
from datawriter import DataWriter
from database_iterator import DatabaseIterator
from encoder import IdentityEncoder
from decoder import Decoder
from lmdbstore import LmdbDatabase
from objectstore import ObjectStoreDatabase
from persistence import PersistenceSettings
from iteratornode import IteratorNode
from eventlog import EventLog, RedisChannel
try:
from nmpy import NumpyEncoder, PackedNumpyEncoder, StreamingNumpyDecoder, \
BaseNumpyDecoder, NumpyMetaData, NumpyFeature
except ImportError:
pass
|
e56e50cdecafd9de67255afe4567bc6c41cf2474
|
skyfield/tests/test_against_horizons.py
|
skyfield/tests/test_against_horizons.py
|
"""Tests against HORIZONS numbers."""
from skyfield import api
# see the top-level project ./horizons/ directory for where the
# following numbers come from; soon, we should automate the fetching of
# such numbers and their injection into test cases, as we do for results
# from NOVAS.
"""
Date__(UT)__HR:MN hEcl-Lon hEcl-Lat r rdot
********************************************************************
$$SOE
1980-Jan-01 00:00 151.3229 1.0130 5.378949180806 0.4314383
$$EOE
"""
def test_ecliptic_latlon():
astrometric = api.sun(utc=(1980, 1, 1)).observe(api.jupiter)
lat, lon, distance = astrometric.ecliptic_latlon()
assert '{:.4f}'.format(lat.degrees) == '1.0130'
assert '{:.4f}'.format(lon.degrees) == '151.3227'
# That last value should really be '151.3227' according to HORIZONS
# but we are just getting started here so the tiny difference is
# being filed away as something to look at later!
|
"""Tests against HORIZONS numbers."""
from skyfield import api
# see the top-level project ./horizons/ directory for where the
# following numbers come from; soon, we should automate the fetching of
# such numbers and their injection into test cases, as we do for results
# from NOVAS.
"""
Date__(UT)__HR:MN hEcl-Lon hEcl-Lat r rdot
********************************************************************
$$SOE
1980-Jan-01 00:00 151.3229 1.0130 5.378949180806 0.4314383
$$EOE
"""
def test_ecliptic_latlon():
astrometric = api.sun(utc=(1980, 1, 1)).observe(api.jupiter)
lat, lon, distance = astrometric.ecliptic_latlon()
assert '{0:.4f}'.format(lat.degrees) == '1.0130'
assert '{0:.4f}'.format(lon.degrees) == '151.3227'
# That last value should really be '151.3227' according to HORIZONS
# but we are just getting started here so the tiny difference is
# being filed away as something to look at later!
|
Fix .format() patterns in test for Python 2.6
|
Fix .format() patterns in test for Python 2.6
|
Python
|
mit
|
GuidoBR/python-skyfield,ozialien/python-skyfield,skyfielders/python-skyfield,skyfielders/python-skyfield,exoanalytic/python-skyfield,GuidoBR/python-skyfield,ozialien/python-skyfield,exoanalytic/python-skyfield
|
"""Tests against HORIZONS numbers."""
from skyfield import api
# see the top-level project ./horizons/ directory for where the
# following numbers come from; soon, we should automate the fetching of
# such numbers and their injection into test cases, as we do for results
# from NOVAS.
"""
Date__(UT)__HR:MN hEcl-Lon hEcl-Lat r rdot
********************************************************************
$$SOE
1980-Jan-01 00:00 151.3229 1.0130 5.378949180806 0.4314383
$$EOE
"""
def test_ecliptic_latlon():
astrometric = api.sun(utc=(1980, 1, 1)).observe(api.jupiter)
lat, lon, distance = astrometric.ecliptic_latlon()
assert '{:.4f}'.format(lat.degrees) == '1.0130'
assert '{:.4f}'.format(lon.degrees) == '151.3227'
# That last value should really be '151.3227' according to HORIZONS
# but we are just getting started here so the tiny difference is
# being filed away as something to look at later!
Fix .format() patterns in test for Python 2.6
|
"""Tests against HORIZONS numbers."""
from skyfield import api
# see the top-level project ./horizons/ directory for where the
# following numbers come from; soon, we should automate the fetching of
# such numbers and their injection into test cases, as we do for results
# from NOVAS.
"""
Date__(UT)__HR:MN hEcl-Lon hEcl-Lat r rdot
********************************************************************
$$SOE
1980-Jan-01 00:00 151.3229 1.0130 5.378949180806 0.4314383
$$EOE
"""
def test_ecliptic_latlon():
astrometric = api.sun(utc=(1980, 1, 1)).observe(api.jupiter)
lat, lon, distance = astrometric.ecliptic_latlon()
assert '{0:.4f}'.format(lat.degrees) == '1.0130'
assert '{0:.4f}'.format(lon.degrees) == '151.3227'
# That last value should really be '151.3227' according to HORIZONS
# but we are just getting started here so the tiny difference is
# being filed away as something to look at later!
|
<commit_before>"""Tests against HORIZONS numbers."""
from skyfield import api
# see the top-level project ./horizons/ directory for where the
# following numbers come from; soon, we should automate the fetching of
# such numbers and their injection into test cases, as we do for results
# from NOVAS.
"""
Date__(UT)__HR:MN hEcl-Lon hEcl-Lat r rdot
********************************************************************
$$SOE
1980-Jan-01 00:00 151.3229 1.0130 5.378949180806 0.4314383
$$EOE
"""
def test_ecliptic_latlon():
astrometric = api.sun(utc=(1980, 1, 1)).observe(api.jupiter)
lat, lon, distance = astrometric.ecliptic_latlon()
assert '{:.4f}'.format(lat.degrees) == '1.0130'
assert '{:.4f}'.format(lon.degrees) == '151.3227'
# That last value should really be '151.3227' according to HORIZONS
# but we are just getting started here so the tiny difference is
# being filed away as something to look at later!
<commit_msg>Fix .format() patterns in test for Python 2.6<commit_after>
|
"""Tests against HORIZONS numbers."""
from skyfield import api
# see the top-level project ./horizons/ directory for where the
# following numbers come from; soon, we should automate the fetching of
# such numbers and their injection into test cases, as we do for results
# from NOVAS.
"""
Date__(UT)__HR:MN hEcl-Lon hEcl-Lat r rdot
********************************************************************
$$SOE
1980-Jan-01 00:00 151.3229 1.0130 5.378949180806 0.4314383
$$EOE
"""
def test_ecliptic_latlon():
astrometric = api.sun(utc=(1980, 1, 1)).observe(api.jupiter)
lat, lon, distance = astrometric.ecliptic_latlon()
assert '{0:.4f}'.format(lat.degrees) == '1.0130'
assert '{0:.4f}'.format(lon.degrees) == '151.3227'
# That last value should really be '151.3227' according to HORIZONS
# but we are just getting started here so the tiny difference is
# being filed away as something to look at later!
|
"""Tests against HORIZONS numbers."""
from skyfield import api
# see the top-level project ./horizons/ directory for where the
# following numbers come from; soon, we should automate the fetching of
# such numbers and their injection into test cases, as we do for results
# from NOVAS.
"""
Date__(UT)__HR:MN hEcl-Lon hEcl-Lat r rdot
********************************************************************
$$SOE
1980-Jan-01 00:00 151.3229 1.0130 5.378949180806 0.4314383
$$EOE
"""
def test_ecliptic_latlon():
astrometric = api.sun(utc=(1980, 1, 1)).observe(api.jupiter)
lat, lon, distance = astrometric.ecliptic_latlon()
assert '{:.4f}'.format(lat.degrees) == '1.0130'
assert '{:.4f}'.format(lon.degrees) == '151.3227'
# That last value should really be '151.3227' according to HORIZONS
# but we are just getting started here so the tiny difference is
# being filed away as something to look at later!
Fix .format() patterns in test for Python 2.6"""Tests against HORIZONS numbers."""
from skyfield import api
# see the top-level project ./horizons/ directory for where the
# following numbers come from; soon, we should automate the fetching of
# such numbers and their injection into test cases, as we do for results
# from NOVAS.
"""
Date__(UT)__HR:MN hEcl-Lon hEcl-Lat r rdot
********************************************************************
$$SOE
1980-Jan-01 00:00 151.3229 1.0130 5.378949180806 0.4314383
$$EOE
"""
def test_ecliptic_latlon():
astrometric = api.sun(utc=(1980, 1, 1)).observe(api.jupiter)
lat, lon, distance = astrometric.ecliptic_latlon()
assert '{0:.4f}'.format(lat.degrees) == '1.0130'
assert '{0:.4f}'.format(lon.degrees) == '151.3227'
# That last value should really be '151.3227' according to HORIZONS
# but we are just getting started here so the tiny difference is
# being filed away as something to look at later!
|
<commit_before>"""Tests against HORIZONS numbers."""
from skyfield import api
# see the top-level project ./horizons/ directory for where the
# following numbers come from; soon, we should automate the fetching of
# such numbers and their injection into test cases, as we do for results
# from NOVAS.
"""
Date__(UT)__HR:MN hEcl-Lon hEcl-Lat r rdot
********************************************************************
$$SOE
1980-Jan-01 00:00 151.3229 1.0130 5.378949180806 0.4314383
$$EOE
"""
def test_ecliptic_latlon():
astrometric = api.sun(utc=(1980, 1, 1)).observe(api.jupiter)
lat, lon, distance = astrometric.ecliptic_latlon()
assert '{:.4f}'.format(lat.degrees) == '1.0130'
assert '{:.4f}'.format(lon.degrees) == '151.3227'
# That last value should really be '151.3227' according to HORIZONS
# but we are just getting started here so the tiny difference is
# being filed away as something to look at later!
<commit_msg>Fix .format() patterns in test for Python 2.6<commit_after>"""Tests against HORIZONS numbers."""
from skyfield import api
# see the top-level project ./horizons/ directory for where the
# following numbers come from; soon, we should automate the fetching of
# such numbers and their injection into test cases, as we do for results
# from NOVAS.
"""
Date__(UT)__HR:MN hEcl-Lon hEcl-Lat r rdot
********************************************************************
$$SOE
1980-Jan-01 00:00 151.3229 1.0130 5.378949180806 0.4314383
$$EOE
"""
def test_ecliptic_latlon():
astrometric = api.sun(utc=(1980, 1, 1)).observe(api.jupiter)
lat, lon, distance = astrometric.ecliptic_latlon()
assert '{0:.4f}'.format(lat.degrees) == '1.0130'
assert '{0:.4f}'.format(lon.degrees) == '151.3227'
# That last value should really be '151.3227' according to HORIZONS
# but we are just getting started here so the tiny difference is
# being filed away as something to look at later!
|
fca7ad2068dfec30ad210964234957b46e6436bc
|
tests/test_client.py
|
tests/test_client.py
|
import unittest
from bluesnap.client import Client
class ClientTestCase(unittest.TestCase):
DUMMY_CREDENTIALS = {
'username': 'username',
'password': 'password',
'default_store_id': '1',
'seller_id': '1',
}
def setUp(self):
self.client = Client(env='live', **self.DUMMY_CREDENTIALS)
def test_env(self):
self.assertEqual(Client.ENDPOINTS.keys(), ['live', 'sandbox'])
for env, endpoint_url in Client.ENDPOINTS.iteritems():
client = Client(env=env, **self.DUMMY_CREDENTIALS)
self.assertEqual(client.endpoint_url, endpoint_url)
|
import unittest
from bluesnap.client import Client
class ClientTestCase(unittest.TestCase):
DUMMY_CREDENTIALS = {
'username': 'username',
'password': 'password',
'default_store_id': '1',
'seller_id': '1',
'default_currency': 'GBP'
}
def setUp(self):
self.client = Client(env='live', **self.DUMMY_CREDENTIALS)
def test_env(self):
self.assertEqual(Client.ENDPOINTS.keys(), ['live', 'sandbox'])
for env, endpoint_url in Client.ENDPOINTS.iteritems():
client = Client(env=env, **self.DUMMY_CREDENTIALS)
self.assertEqual(client.endpoint_url, endpoint_url)
|
Send default_currency in Client init on client test
|
Send default_currency in Client init on client test
|
Python
|
mit
|
kowito/bluesnap-python,kowito/bluesnap-python,justyoyo/bluesnap-python,justyoyo/bluesnap-python
|
import unittest
from bluesnap.client import Client
class ClientTestCase(unittest.TestCase):
DUMMY_CREDENTIALS = {
'username': 'username',
'password': 'password',
'default_store_id': '1',
'seller_id': '1',
}
def setUp(self):
self.client = Client(env='live', **self.DUMMY_CREDENTIALS)
def test_env(self):
self.assertEqual(Client.ENDPOINTS.keys(), ['live', 'sandbox'])
for env, endpoint_url in Client.ENDPOINTS.iteritems():
client = Client(env=env, **self.DUMMY_CREDENTIALS)
self.assertEqual(client.endpoint_url, endpoint_url)
Send default_currency in Client init on client test
|
import unittest
from bluesnap.client import Client
class ClientTestCase(unittest.TestCase):
DUMMY_CREDENTIALS = {
'username': 'username',
'password': 'password',
'default_store_id': '1',
'seller_id': '1',
'default_currency': 'GBP'
}
def setUp(self):
self.client = Client(env='live', **self.DUMMY_CREDENTIALS)
def test_env(self):
self.assertEqual(Client.ENDPOINTS.keys(), ['live', 'sandbox'])
for env, endpoint_url in Client.ENDPOINTS.iteritems():
client = Client(env=env, **self.DUMMY_CREDENTIALS)
self.assertEqual(client.endpoint_url, endpoint_url)
|
<commit_before>import unittest
from bluesnap.client import Client
class ClientTestCase(unittest.TestCase):
DUMMY_CREDENTIALS = {
'username': 'username',
'password': 'password',
'default_store_id': '1',
'seller_id': '1',
}
def setUp(self):
self.client = Client(env='live', **self.DUMMY_CREDENTIALS)
def test_env(self):
self.assertEqual(Client.ENDPOINTS.keys(), ['live', 'sandbox'])
for env, endpoint_url in Client.ENDPOINTS.iteritems():
client = Client(env=env, **self.DUMMY_CREDENTIALS)
self.assertEqual(client.endpoint_url, endpoint_url)
<commit_msg>Send default_currency in Client init on client test<commit_after>
|
import unittest
from bluesnap.client import Client
class ClientTestCase(unittest.TestCase):
DUMMY_CREDENTIALS = {
'username': 'username',
'password': 'password',
'default_store_id': '1',
'seller_id': '1',
'default_currency': 'GBP'
}
def setUp(self):
self.client = Client(env='live', **self.DUMMY_CREDENTIALS)
def test_env(self):
self.assertEqual(Client.ENDPOINTS.keys(), ['live', 'sandbox'])
for env, endpoint_url in Client.ENDPOINTS.iteritems():
client = Client(env=env, **self.DUMMY_CREDENTIALS)
self.assertEqual(client.endpoint_url, endpoint_url)
|
import unittest
from bluesnap.client import Client
class ClientTestCase(unittest.TestCase):
DUMMY_CREDENTIALS = {
'username': 'username',
'password': 'password',
'default_store_id': '1',
'seller_id': '1',
}
def setUp(self):
self.client = Client(env='live', **self.DUMMY_CREDENTIALS)
def test_env(self):
self.assertEqual(Client.ENDPOINTS.keys(), ['live', 'sandbox'])
for env, endpoint_url in Client.ENDPOINTS.iteritems():
client = Client(env=env, **self.DUMMY_CREDENTIALS)
self.assertEqual(client.endpoint_url, endpoint_url)
Send default_currency in Client init on client testimport unittest
from bluesnap.client import Client
class ClientTestCase(unittest.TestCase):
DUMMY_CREDENTIALS = {
'username': 'username',
'password': 'password',
'default_store_id': '1',
'seller_id': '1',
'default_currency': 'GBP'
}
def setUp(self):
self.client = Client(env='live', **self.DUMMY_CREDENTIALS)
def test_env(self):
self.assertEqual(Client.ENDPOINTS.keys(), ['live', 'sandbox'])
for env, endpoint_url in Client.ENDPOINTS.iteritems():
client = Client(env=env, **self.DUMMY_CREDENTIALS)
self.assertEqual(client.endpoint_url, endpoint_url)
|
<commit_before>import unittest
from bluesnap.client import Client
class ClientTestCase(unittest.TestCase):
DUMMY_CREDENTIALS = {
'username': 'username',
'password': 'password',
'default_store_id': '1',
'seller_id': '1',
}
def setUp(self):
self.client = Client(env='live', **self.DUMMY_CREDENTIALS)
def test_env(self):
self.assertEqual(Client.ENDPOINTS.keys(), ['live', 'sandbox'])
for env, endpoint_url in Client.ENDPOINTS.iteritems():
client = Client(env=env, **self.DUMMY_CREDENTIALS)
self.assertEqual(client.endpoint_url, endpoint_url)
<commit_msg>Send default_currency in Client init on client test<commit_after>import unittest
from bluesnap.client import Client
class ClientTestCase(unittest.TestCase):
DUMMY_CREDENTIALS = {
'username': 'username',
'password': 'password',
'default_store_id': '1',
'seller_id': '1',
'default_currency': 'GBP'
}
def setUp(self):
self.client = Client(env='live', **self.DUMMY_CREDENTIALS)
def test_env(self):
self.assertEqual(Client.ENDPOINTS.keys(), ['live', 'sandbox'])
for env, endpoint_url in Client.ENDPOINTS.iteritems():
client = Client(env=env, **self.DUMMY_CREDENTIALS)
self.assertEqual(client.endpoint_url, endpoint_url)
|
c83e0134104d4ee6de9a3e5b7d0e34be2a684daa
|
tests/test_shared.py
|
tests/test_shared.py
|
# -*- coding: utf-8 -*-
from flask.ext.testing import TestCase
import os
import tempfile
import shutil
import websmash
class ModelTestCase(TestCase):
def create_app(self):
self.app = websmash.app
self.dl = websmash.dl
self.app.config['TESTING'] = True
self.app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
websmash.mail.suppress = True
return self.app
def setUp(self):
self.db = websmash.db
self.db.create_all()
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
class WebsmashTestCase(ModelTestCase):
def create_app(self):
return super(WebsmashTestCase, self).create_app()
def setUp(self):
super(WebsmashTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
(fd, self.tmp_name) = tempfile.mkstemp(dir=self.tmpdir, suffix='.fa')
self.tmp_file = os.fdopen(fd, 'w+b')
self.tmp_file.write('>test\nATGACCGAGAGTACATAG\n')
self.app.config['RESULTS_PATH'] = self.tmpdir
def tearDown(self):
super(WebsmashTestCase, self).tearDown()
self.tmp_file.close()
shutil.rmtree(self.tmpdir)
|
# -*- coding: utf-8 -*-
from flask.ext.testing import TestCase
import os
import tempfile
import shutil
import websmash
class ModelTestCase(TestCase):
def create_app(self):
self.app = websmash.app
self.dl = websmash.dl
self.app.config['TESTING'] = True
self.app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
websmash.mail.suppress = True
return self.app
def setUp(self):
self.db = websmash.db
self.db.create_all()
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
class WebsmashTestCase(ModelTestCase):
def create_app(self):
return super(WebsmashTestCase, self).create_app()
def setUp(self):
super(WebsmashTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
(fd, self.tmp_name) = tempfile.mkstemp(dir=self.tmpdir, suffix='.fa')
tmp_file = os.fdopen(fd, 'w+b')
tmp_file.write('>test\nATGACCGAGAGTACATAG\n')
tmp_file.close()
self.tmp_file = open(self.tmp_name, 'r')
self.app.config['RESULTS_PATH'] = self.tmpdir
def tearDown(self):
super(WebsmashTestCase, self).tearDown()
self.tmp_file.close()
shutil.rmtree(self.tmpdir)
|
Fix the temp file initialization
|
tests: Fix the temp file initialization
Signed-off-by: Kai Blin <94ddc6985b47aef772521e302594241f46a8f665@biotech.uni-tuebingen.de>
|
Python
|
agpl-3.0
|
antismash/ps-web,antismash/ps-web,antismash/websmash,antismash/ps-web
|
# -*- coding: utf-8 -*-
from flask.ext.testing import TestCase
import os
import tempfile
import shutil
import websmash
class ModelTestCase(TestCase):
def create_app(self):
self.app = websmash.app
self.dl = websmash.dl
self.app.config['TESTING'] = True
self.app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
websmash.mail.suppress = True
return self.app
def setUp(self):
self.db = websmash.db
self.db.create_all()
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
class WebsmashTestCase(ModelTestCase):
def create_app(self):
return super(WebsmashTestCase, self).create_app()
def setUp(self):
super(WebsmashTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
(fd, self.tmp_name) = tempfile.mkstemp(dir=self.tmpdir, suffix='.fa')
self.tmp_file = os.fdopen(fd, 'w+b')
self.tmp_file.write('>test\nATGACCGAGAGTACATAG\n')
self.app.config['RESULTS_PATH'] = self.tmpdir
def tearDown(self):
super(WebsmashTestCase, self).tearDown()
self.tmp_file.close()
shutil.rmtree(self.tmpdir)
tests: Fix the temp file initialization
Signed-off-by: Kai Blin <94ddc6985b47aef772521e302594241f46a8f665@biotech.uni-tuebingen.de>
|
# -*- coding: utf-8 -*-
from flask.ext.testing import TestCase
import os
import tempfile
import shutil
import websmash
class ModelTestCase(TestCase):
def create_app(self):
self.app = websmash.app
self.dl = websmash.dl
self.app.config['TESTING'] = True
self.app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
websmash.mail.suppress = True
return self.app
def setUp(self):
self.db = websmash.db
self.db.create_all()
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
class WebsmashTestCase(ModelTestCase):
def create_app(self):
return super(WebsmashTestCase, self).create_app()
def setUp(self):
super(WebsmashTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
(fd, self.tmp_name) = tempfile.mkstemp(dir=self.tmpdir, suffix='.fa')
tmp_file = os.fdopen(fd, 'w+b')
tmp_file.write('>test\nATGACCGAGAGTACATAG\n')
tmp_file.close()
self.tmp_file = open(self.tmp_name, 'r')
self.app.config['RESULTS_PATH'] = self.tmpdir
def tearDown(self):
super(WebsmashTestCase, self).tearDown()
self.tmp_file.close()
shutil.rmtree(self.tmpdir)
|
<commit_before># -*- coding: utf-8 -*-
from flask.ext.testing import TestCase
import os
import tempfile
import shutil
import websmash
class ModelTestCase(TestCase):
def create_app(self):
self.app = websmash.app
self.dl = websmash.dl
self.app.config['TESTING'] = True
self.app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
websmash.mail.suppress = True
return self.app
def setUp(self):
self.db = websmash.db
self.db.create_all()
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
class WebsmashTestCase(ModelTestCase):
def create_app(self):
return super(WebsmashTestCase, self).create_app()
def setUp(self):
super(WebsmashTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
(fd, self.tmp_name) = tempfile.mkstemp(dir=self.tmpdir, suffix='.fa')
self.tmp_file = os.fdopen(fd, 'w+b')
self.tmp_file.write('>test\nATGACCGAGAGTACATAG\n')
self.app.config['RESULTS_PATH'] = self.tmpdir
def tearDown(self):
super(WebsmashTestCase, self).tearDown()
self.tmp_file.close()
shutil.rmtree(self.tmpdir)
<commit_msg>tests: Fix the temp file initialization
Signed-off-by: Kai Blin <94ddc6985b47aef772521e302594241f46a8f665@biotech.uni-tuebingen.de><commit_after>
|
# -*- coding: utf-8 -*-
from flask.ext.testing import TestCase
import os
import tempfile
import shutil
import websmash
class ModelTestCase(TestCase):
def create_app(self):
self.app = websmash.app
self.dl = websmash.dl
self.app.config['TESTING'] = True
self.app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
websmash.mail.suppress = True
return self.app
def setUp(self):
self.db = websmash.db
self.db.create_all()
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
class WebsmashTestCase(ModelTestCase):
def create_app(self):
return super(WebsmashTestCase, self).create_app()
def setUp(self):
super(WebsmashTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
(fd, self.tmp_name) = tempfile.mkstemp(dir=self.tmpdir, suffix='.fa')
tmp_file = os.fdopen(fd, 'w+b')
tmp_file.write('>test\nATGACCGAGAGTACATAG\n')
tmp_file.close()
self.tmp_file = open(self.tmp_name, 'r')
self.app.config['RESULTS_PATH'] = self.tmpdir
def tearDown(self):
super(WebsmashTestCase, self).tearDown()
self.tmp_file.close()
shutil.rmtree(self.tmpdir)
|
# -*- coding: utf-8 -*-
from flask.ext.testing import TestCase
import os
import tempfile
import shutil
import websmash
class ModelTestCase(TestCase):
def create_app(self):
self.app = websmash.app
self.dl = websmash.dl
self.app.config['TESTING'] = True
self.app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
websmash.mail.suppress = True
return self.app
def setUp(self):
self.db = websmash.db
self.db.create_all()
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
class WebsmashTestCase(ModelTestCase):
def create_app(self):
return super(WebsmashTestCase, self).create_app()
def setUp(self):
super(WebsmashTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
(fd, self.tmp_name) = tempfile.mkstemp(dir=self.tmpdir, suffix='.fa')
self.tmp_file = os.fdopen(fd, 'w+b')
self.tmp_file.write('>test\nATGACCGAGAGTACATAG\n')
self.app.config['RESULTS_PATH'] = self.tmpdir
def tearDown(self):
super(WebsmashTestCase, self).tearDown()
self.tmp_file.close()
shutil.rmtree(self.tmpdir)
tests: Fix the temp file initialization
Signed-off-by: Kai Blin <94ddc6985b47aef772521e302594241f46a8f665@biotech.uni-tuebingen.de># -*- coding: utf-8 -*-
from flask.ext.testing import TestCase
import os
import tempfile
import shutil
import websmash
class ModelTestCase(TestCase):
def create_app(self):
self.app = websmash.app
self.dl = websmash.dl
self.app.config['TESTING'] = True
self.app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
websmash.mail.suppress = True
return self.app
def setUp(self):
self.db = websmash.db
self.db.create_all()
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
class WebsmashTestCase(ModelTestCase):
def create_app(self):
return super(WebsmashTestCase, self).create_app()
def setUp(self):
super(WebsmashTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
(fd, self.tmp_name) = tempfile.mkstemp(dir=self.tmpdir, suffix='.fa')
tmp_file = os.fdopen(fd, 'w+b')
tmp_file.write('>test\nATGACCGAGAGTACATAG\n')
tmp_file.close()
self.tmp_file = open(self.tmp_name, 'r')
self.app.config['RESULTS_PATH'] = self.tmpdir
def tearDown(self):
super(WebsmashTestCase, self).tearDown()
self.tmp_file.close()
shutil.rmtree(self.tmpdir)
|
<commit_before># -*- coding: utf-8 -*-
from flask.ext.testing import TestCase
import os
import tempfile
import shutil
import websmash
class ModelTestCase(TestCase):
def create_app(self):
self.app = websmash.app
self.dl = websmash.dl
self.app.config['TESTING'] = True
self.app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
websmash.mail.suppress = True
return self.app
def setUp(self):
self.db = websmash.db
self.db.create_all()
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
class WebsmashTestCase(ModelTestCase):
def create_app(self):
return super(WebsmashTestCase, self).create_app()
def setUp(self):
super(WebsmashTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
(fd, self.tmp_name) = tempfile.mkstemp(dir=self.tmpdir, suffix='.fa')
self.tmp_file = os.fdopen(fd, 'w+b')
self.tmp_file.write('>test\nATGACCGAGAGTACATAG\n')
self.app.config['RESULTS_PATH'] = self.tmpdir
def tearDown(self):
super(WebsmashTestCase, self).tearDown()
self.tmp_file.close()
shutil.rmtree(self.tmpdir)
<commit_msg>tests: Fix the temp file initialization
Signed-off-by: Kai Blin <94ddc6985b47aef772521e302594241f46a8f665@biotech.uni-tuebingen.de><commit_after># -*- coding: utf-8 -*-
from flask.ext.testing import TestCase
import os
import tempfile
import shutil
import websmash
class ModelTestCase(TestCase):
def create_app(self):
self.app = websmash.app
self.dl = websmash.dl
self.app.config['TESTING'] = True
self.app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
websmash.mail.suppress = True
return self.app
def setUp(self):
self.db = websmash.db
self.db.create_all()
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
class WebsmashTestCase(ModelTestCase):
def create_app(self):
return super(WebsmashTestCase, self).create_app()
def setUp(self):
super(WebsmashTestCase, self).setUp()
self.tmpdir = tempfile.mkdtemp()
(fd, self.tmp_name) = tempfile.mkstemp(dir=self.tmpdir, suffix='.fa')
tmp_file = os.fdopen(fd, 'w+b')
tmp_file.write('>test\nATGACCGAGAGTACATAG\n')
tmp_file.close()
self.tmp_file = open(self.tmp_name, 'r')
self.app.config['RESULTS_PATH'] = self.tmpdir
def tearDown(self):
super(WebsmashTestCase, self).tearDown()
self.tmp_file.close()
shutil.rmtree(self.tmpdir)
|
184d31de904fad249c618766c715fef94ed4f369
|
tools/upload_pending_delete.py
|
tools/upload_pending_delete.py
|
#!/usr/bin/env python
import sys
import os
import random
import urllib
import urllib2
import re
# POST_URL = 'http://localhost:8000/domains/'
POST_URL = 'http://scoretool.appspot.com/domains/'
TOP_LEVEL_DOMAINS = 'com net org'.split()
NAMES_PER_REQUEST = 200
def upload(filename):
date, tld, ext = os.path.basename(filename).split('.')
names = []
for line in open(filename):
names.extend(line.split())
while names:
data = {
'names': ' '.join(names[:NAMES_PER_REQUEST]),
'com_expiration': '',
'net_expiration': '',
'org_expiration': '',
'submit_names': 'submit'}
data['%s_expiration' % tld] = date
print data
response = urllib2.urlopen(POST_URL, urllib.urlencode(data))
if len(names) > NAMES_PER_REQUEST:
names = names[NAMES_PER_REQUEST:]
else:
break
if __name__ == '__main__':
for filename in sys.argv[1:]:
upload(filename)
|
#!/usr/bin/env python
import sys
import os
import random
import urllib
import urllib2
import re
# POST_URL = 'http://localhost:8000/domains/'
POST_URL = 'http://scoretool.appspot.com/domains/'
TOP_LEVEL_DOMAINS = 'com net org'.split()
NAMES_PER_REQUEST = 200
def upload(filename):
date, tld, ext = os.path.basename(filename).split('.', 2)
names = []
for line in open(filename):
names.extend(line.split())
while names:
data = {
'names': ' '.join(names[:NAMES_PER_REQUEST]),
'com_expiration': '',
'net_expiration': '',
'org_expiration': '',
'submit_names': 'submit'}
data['%s_expiration' % tld] = date
print data
response = urllib2.urlopen(POST_URL, urllib.urlencode(data))
if len(names) > NAMES_PER_REQUEST:
names = names[NAMES_PER_REQUEST:]
else:
break
if __name__ == '__main__':
for filename in sys.argv[1:]:
upload(filename)
|
Split filename only twice (allow ext to contain dots).
|
Split filename only twice (allow ext to contain dots).
|
Python
|
mit
|
jcrocholl/nxdom,jcrocholl/nxdom
|
#!/usr/bin/env python
import sys
import os
import random
import urllib
import urllib2
import re
# POST_URL = 'http://localhost:8000/domains/'
POST_URL = 'http://scoretool.appspot.com/domains/'
TOP_LEVEL_DOMAINS = 'com net org'.split()
NAMES_PER_REQUEST = 200
def upload(filename):
date, tld, ext = os.path.basename(filename).split('.')
names = []
for line in open(filename):
names.extend(line.split())
while names:
data = {
'names': ' '.join(names[:NAMES_PER_REQUEST]),
'com_expiration': '',
'net_expiration': '',
'org_expiration': '',
'submit_names': 'submit'}
data['%s_expiration' % tld] = date
print data
response = urllib2.urlopen(POST_URL, urllib.urlencode(data))
if len(names) > NAMES_PER_REQUEST:
names = names[NAMES_PER_REQUEST:]
else:
break
if __name__ == '__main__':
for filename in sys.argv[1:]:
upload(filename)
Split filename only twice (allow ext to contain dots).
|
#!/usr/bin/env python
import sys
import os
import random
import urllib
import urllib2
import re
# POST_URL = 'http://localhost:8000/domains/'
POST_URL = 'http://scoretool.appspot.com/domains/'
TOP_LEVEL_DOMAINS = 'com net org'.split()
NAMES_PER_REQUEST = 200
def upload(filename):
date, tld, ext = os.path.basename(filename).split('.', 2)
names = []
for line in open(filename):
names.extend(line.split())
while names:
data = {
'names': ' '.join(names[:NAMES_PER_REQUEST]),
'com_expiration': '',
'net_expiration': '',
'org_expiration': '',
'submit_names': 'submit'}
data['%s_expiration' % tld] = date
print data
response = urllib2.urlopen(POST_URL, urllib.urlencode(data))
if len(names) > NAMES_PER_REQUEST:
names = names[NAMES_PER_REQUEST:]
else:
break
if __name__ == '__main__':
for filename in sys.argv[1:]:
upload(filename)
|
<commit_before>#!/usr/bin/env python
import sys
import os
import random
import urllib
import urllib2
import re
# POST_URL = 'http://localhost:8000/domains/'
POST_URL = 'http://scoretool.appspot.com/domains/'
TOP_LEVEL_DOMAINS = 'com net org'.split()
NAMES_PER_REQUEST = 200
def upload(filename):
date, tld, ext = os.path.basename(filename).split('.')
names = []
for line in open(filename):
names.extend(line.split())
while names:
data = {
'names': ' '.join(names[:NAMES_PER_REQUEST]),
'com_expiration': '',
'net_expiration': '',
'org_expiration': '',
'submit_names': 'submit'}
data['%s_expiration' % tld] = date
print data
response = urllib2.urlopen(POST_URL, urllib.urlencode(data))
if len(names) > NAMES_PER_REQUEST:
names = names[NAMES_PER_REQUEST:]
else:
break
if __name__ == '__main__':
for filename in sys.argv[1:]:
upload(filename)
<commit_msg>Split filename only twice (allow ext to contain dots).<commit_after>
|
#!/usr/bin/env python
import sys
import os
import random
import urllib
import urllib2
import re
# POST_URL = 'http://localhost:8000/domains/'
POST_URL = 'http://scoretool.appspot.com/domains/'
TOP_LEVEL_DOMAINS = 'com net org'.split()
NAMES_PER_REQUEST = 200
def upload(filename):
date, tld, ext = os.path.basename(filename).split('.', 2)
names = []
for line in open(filename):
names.extend(line.split())
while names:
data = {
'names': ' '.join(names[:NAMES_PER_REQUEST]),
'com_expiration': '',
'net_expiration': '',
'org_expiration': '',
'submit_names': 'submit'}
data['%s_expiration' % tld] = date
print data
response = urllib2.urlopen(POST_URL, urllib.urlencode(data))
if len(names) > NAMES_PER_REQUEST:
names = names[NAMES_PER_REQUEST:]
else:
break
if __name__ == '__main__':
for filename in sys.argv[1:]:
upload(filename)
|
#!/usr/bin/env python
import sys
import os
import random
import urllib
import urllib2
import re
# POST_URL = 'http://localhost:8000/domains/'
POST_URL = 'http://scoretool.appspot.com/domains/'
TOP_LEVEL_DOMAINS = 'com net org'.split()
NAMES_PER_REQUEST = 200
def upload(filename):
date, tld, ext = os.path.basename(filename).split('.')
names = []
for line in open(filename):
names.extend(line.split())
while names:
data = {
'names': ' '.join(names[:NAMES_PER_REQUEST]),
'com_expiration': '',
'net_expiration': '',
'org_expiration': '',
'submit_names': 'submit'}
data['%s_expiration' % tld] = date
print data
response = urllib2.urlopen(POST_URL, urllib.urlencode(data))
if len(names) > NAMES_PER_REQUEST:
names = names[NAMES_PER_REQUEST:]
else:
break
if __name__ == '__main__':
for filename in sys.argv[1:]:
upload(filename)
Split filename only twice (allow ext to contain dots).#!/usr/bin/env python
import sys
import os
import random
import urllib
import urllib2
import re
# POST_URL = 'http://localhost:8000/domains/'
POST_URL = 'http://scoretool.appspot.com/domains/'
TOP_LEVEL_DOMAINS = 'com net org'.split()
NAMES_PER_REQUEST = 200
def upload(filename):
date, tld, ext = os.path.basename(filename).split('.', 2)
names = []
for line in open(filename):
names.extend(line.split())
while names:
data = {
'names': ' '.join(names[:NAMES_PER_REQUEST]),
'com_expiration': '',
'net_expiration': '',
'org_expiration': '',
'submit_names': 'submit'}
data['%s_expiration' % tld] = date
print data
response = urllib2.urlopen(POST_URL, urllib.urlencode(data))
if len(names) > NAMES_PER_REQUEST:
names = names[NAMES_PER_REQUEST:]
else:
break
if __name__ == '__main__':
for filename in sys.argv[1:]:
upload(filename)
|
<commit_before>#!/usr/bin/env python
import sys
import os
import random
import urllib
import urllib2
import re
# POST_URL = 'http://localhost:8000/domains/'
POST_URL = 'http://scoretool.appspot.com/domains/'
TOP_LEVEL_DOMAINS = 'com net org'.split()
NAMES_PER_REQUEST = 200
def upload(filename):
date, tld, ext = os.path.basename(filename).split('.')
names = []
for line in open(filename):
names.extend(line.split())
while names:
data = {
'names': ' '.join(names[:NAMES_PER_REQUEST]),
'com_expiration': '',
'net_expiration': '',
'org_expiration': '',
'submit_names': 'submit'}
data['%s_expiration' % tld] = date
print data
response = urllib2.urlopen(POST_URL, urllib.urlencode(data))
if len(names) > NAMES_PER_REQUEST:
names = names[NAMES_PER_REQUEST:]
else:
break
if __name__ == '__main__':
for filename in sys.argv[1:]:
upload(filename)
<commit_msg>Split filename only twice (allow ext to contain dots).<commit_after>#!/usr/bin/env python
import sys
import os
import random
import urllib
import urllib2
import re
# POST_URL = 'http://localhost:8000/domains/'
POST_URL = 'http://scoretool.appspot.com/domains/'
TOP_LEVEL_DOMAINS = 'com net org'.split()
NAMES_PER_REQUEST = 200
def upload(filename):
date, tld, ext = os.path.basename(filename).split('.', 2)
names = []
for line in open(filename):
names.extend(line.split())
while names:
data = {
'names': ' '.join(names[:NAMES_PER_REQUEST]),
'com_expiration': '',
'net_expiration': '',
'org_expiration': '',
'submit_names': 'submit'}
data['%s_expiration' % tld] = date
print data
response = urllib2.urlopen(POST_URL, urllib.urlencode(data))
if len(names) > NAMES_PER_REQUEST:
names = names[NAMES_PER_REQUEST:]
else:
break
if __name__ == '__main__':
for filename in sys.argv[1:]:
upload(filename)
|
5547b0bfcd3903d7786be91c136366ada9c3ebae
|
detection.py
|
detection.py
|
import os
import sys
import datetime
from django.utils import timezone
from datetime import timedelta
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madapp.settings")
from django.core.management import execute_from_command_line
from django.db.models import Count, Avg
from madapp import settings
from madapp.mad.models import *
import time
INTERVAL = 0.1
while True:
flows = TemporaryFlows.objects.all()
for flow in flows:
collectedflows =StatsTable(id_switch = flow.id_switch, switchport = flow.switchport, ip_src = flow.ip_src, ip_dst = flow.ip_dst, src_port = flow.src_port, dst_port = flow.dst_port, timestamp = timezone.now())
collectedflows.save()
TemporaryFlows.objects.all().delete()
time.sleep(60)
|
import os
import sys
import datetime
import django
import commands
from django.utils import timezone
from datetime import timedelta
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madapp.settings")
from django.core.management import execute_from_command_line
from django.db.models import Count, Avg
import django.db.models.query
from madapp import settings
from madapp.mad.models import *
import time
django.setup()
INTERVAL = 0.1
while True:
flows = TemporaryFlows.objects.all()
# for flow in flows:
# collectedflows =StatsTable(id_switch = flow.id_switch, switchport = flow.switchport, ip_src = flow.ip_src, ip_dst = flow.ip_dst, src_port = flow.src_port, dst_port = flow.dst_port, timestamp = timezone.now())
# collectedflows.save()
dl_temp = TemporaryFlows.objects.all().delete()
time.sleep(60)
|
Change in the Flows storage
|
Change in the Flows storage
|
Python
|
apache-2.0
|
gilneidp/TADD,gilneidp/TADD,gilneidp/TADD,gilneidp/TADD,gilneidp/TADD
|
import os
import sys
import datetime
from django.utils import timezone
from datetime import timedelta
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madapp.settings")
from django.core.management import execute_from_command_line
from django.db.models import Count, Avg
from madapp import settings
from madapp.mad.models import *
import time
INTERVAL = 0.1
while True:
flows = TemporaryFlows.objects.all()
for flow in flows:
collectedflows =StatsTable(id_switch = flow.id_switch, switchport = flow.switchport, ip_src = flow.ip_src, ip_dst = flow.ip_dst, src_port = flow.src_port, dst_port = flow.dst_port, timestamp = timezone.now())
collectedflows.save()
TemporaryFlows.objects.all().delete()
time.sleep(60)
Change in the Flows storage
|
import os
import sys
import datetime
import django
import commands
from django.utils import timezone
from datetime import timedelta
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madapp.settings")
from django.core.management import execute_from_command_line
from django.db.models import Count, Avg
import django.db.models.query
from madapp import settings
from madapp.mad.models import *
import time
django.setup()
INTERVAL = 0.1
while True:
flows = TemporaryFlows.objects.all()
# for flow in flows:
# collectedflows =StatsTable(id_switch = flow.id_switch, switchport = flow.switchport, ip_src = flow.ip_src, ip_dst = flow.ip_dst, src_port = flow.src_port, dst_port = flow.dst_port, timestamp = timezone.now())
# collectedflows.save()
dl_temp = TemporaryFlows.objects.all().delete()
time.sleep(60)
|
<commit_before>import os
import sys
import datetime
from django.utils import timezone
from datetime import timedelta
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madapp.settings")
from django.core.management import execute_from_command_line
from django.db.models import Count, Avg
from madapp import settings
from madapp.mad.models import *
import time
INTERVAL = 0.1
while True:
flows = TemporaryFlows.objects.all()
for flow in flows:
collectedflows =StatsTable(id_switch = flow.id_switch, switchport = flow.switchport, ip_src = flow.ip_src, ip_dst = flow.ip_dst, src_port = flow.src_port, dst_port = flow.dst_port, timestamp = timezone.now())
collectedflows.save()
TemporaryFlows.objects.all().delete()
time.sleep(60)
<commit_msg>Change in the Flows storage<commit_after>
|
import os
import sys
import datetime
import django
import commands
from django.utils import timezone
from datetime import timedelta
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madapp.settings")
from django.core.management import execute_from_command_line
from django.db.models import Count, Avg
import django.db.models.query
from madapp import settings
from madapp.mad.models import *
import time
django.setup()
INTERVAL = 0.1
while True:
flows = TemporaryFlows.objects.all()
# for flow in flows:
# collectedflows =StatsTable(id_switch = flow.id_switch, switchport = flow.switchport, ip_src = flow.ip_src, ip_dst = flow.ip_dst, src_port = flow.src_port, dst_port = flow.dst_port, timestamp = timezone.now())
# collectedflows.save()
dl_temp = TemporaryFlows.objects.all().delete()
time.sleep(60)
|
import os
import sys
import datetime
from django.utils import timezone
from datetime import timedelta
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madapp.settings")
from django.core.management import execute_from_command_line
from django.db.models import Count, Avg
from madapp import settings
from madapp.mad.models import *
import time
INTERVAL = 0.1
while True:
flows = TemporaryFlows.objects.all()
for flow in flows:
collectedflows =StatsTable(id_switch = flow.id_switch, switchport = flow.switchport, ip_src = flow.ip_src, ip_dst = flow.ip_dst, src_port = flow.src_port, dst_port = flow.dst_port, timestamp = timezone.now())
collectedflows.save()
TemporaryFlows.objects.all().delete()
time.sleep(60)
Change in the Flows storageimport os
import sys
import datetime
import django
import commands
from django.utils import timezone
from datetime import timedelta
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madapp.settings")
from django.core.management import execute_from_command_line
from django.db.models import Count, Avg
import django.db.models.query
from madapp import settings
from madapp.mad.models import *
import time
django.setup()
INTERVAL = 0.1
while True:
flows = TemporaryFlows.objects.all()
# for flow in flows:
# collectedflows =StatsTable(id_switch = flow.id_switch, switchport = flow.switchport, ip_src = flow.ip_src, ip_dst = flow.ip_dst, src_port = flow.src_port, dst_port = flow.dst_port, timestamp = timezone.now())
# collectedflows.save()
dl_temp = TemporaryFlows.objects.all().delete()
time.sleep(60)
|
<commit_before>import os
import sys
import datetime
from django.utils import timezone
from datetime import timedelta
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madapp.settings")
from django.core.management import execute_from_command_line
from django.db.models import Count, Avg
from madapp import settings
from madapp.mad.models import *
import time
INTERVAL = 0.1
while True:
flows = TemporaryFlows.objects.all()
for flow in flows:
collectedflows =StatsTable(id_switch = flow.id_switch, switchport = flow.switchport, ip_src = flow.ip_src, ip_dst = flow.ip_dst, src_port = flow.src_port, dst_port = flow.dst_port, timestamp = timezone.now())
collectedflows.save()
TemporaryFlows.objects.all().delete()
time.sleep(60)
<commit_msg>Change in the Flows storage<commit_after>import os
import sys
import datetime
import django
import commands
from django.utils import timezone
from datetime import timedelta
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "madapp.settings")
from django.core.management import execute_from_command_line
from django.db.models import Count, Avg
import django.db.models.query
from madapp import settings
from madapp.mad.models import *
import time
django.setup()
INTERVAL = 0.1
while True:
flows = TemporaryFlows.objects.all()
# for flow in flows:
# collectedflows =StatsTable(id_switch = flow.id_switch, switchport = flow.switchport, ip_src = flow.ip_src, ip_dst = flow.ip_dst, src_port = flow.src_port, dst_port = flow.dst_port, timestamp = timezone.now())
# collectedflows.save()
dl_temp = TemporaryFlows.objects.all().delete()
time.sleep(60)
|
474ba4b8983c0f0f40e7a9a7e045cec79dc6845f
|
SigmaPi/Secure/models.py
|
SigmaPi/Secure/models.py
|
from django.db import models
from django.contrib.auth.models import Group
class CalendarKey(models.Model):
# The group which has access to this key.
group = models.ForeignKey(Group, related_name="calendar_key", default=1)
# The calendar key.
key = models.CharField(max_length=100)
|
from django.db import models
from django.contrib.auth.models import Group
class CalendarKey(models.Model):
# The group which has access to this key.
group = models.ForeignKey(Group, related_name="calendar_key", default=1)
# The calendar key.
key = models.CharField(max_length=100)
def __unicode__(self):
return self.group + " " + self.key
|
Add __unicode__ method to CalendarKey model.
|
Add __unicode__ method to CalendarKey model.
|
Python
|
mit
|
sigmapi-gammaiota/sigmapi-web,sigmapi-gammaiota/sigmapi-web,sigmapi-gammaiota/sigmapi-web,sigmapi-gammaiota/sigmapi-web
|
from django.db import models
from django.contrib.auth.models import Group
class CalendarKey(models.Model):
# The group which has access to this key.
group = models.ForeignKey(Group, related_name="calendar_key", default=1)
# The calendar key.
key = models.CharField(max_length=100)
Add __unicode__ method to CalendarKey model.
|
from django.db import models
from django.contrib.auth.models import Group
class CalendarKey(models.Model):
# The group which has access to this key.
group = models.ForeignKey(Group, related_name="calendar_key", default=1)
# The calendar key.
key = models.CharField(max_length=100)
def __unicode__(self):
return self.group + " " + self.key
|
<commit_before>from django.db import models
from django.contrib.auth.models import Group
class CalendarKey(models.Model):
# The group which has access to this key.
group = models.ForeignKey(Group, related_name="calendar_key", default=1)
# The calendar key.
key = models.CharField(max_length=100)
<commit_msg>Add __unicode__ method to CalendarKey model.<commit_after>
|
from django.db import models
from django.contrib.auth.models import Group
class CalendarKey(models.Model):
# The group which has access to this key.
group = models.ForeignKey(Group, related_name="calendar_key", default=1)
# The calendar key.
key = models.CharField(max_length=100)
def __unicode__(self):
return self.group + " " + self.key
|
from django.db import models
from django.contrib.auth.models import Group
class CalendarKey(models.Model):
# The group which has access to this key.
group = models.ForeignKey(Group, related_name="calendar_key", default=1)
# The calendar key.
key = models.CharField(max_length=100)
Add __unicode__ method to CalendarKey model.from django.db import models
from django.contrib.auth.models import Group
class CalendarKey(models.Model):
# The group which has access to this key.
group = models.ForeignKey(Group, related_name="calendar_key", default=1)
# The calendar key.
key = models.CharField(max_length=100)
def __unicode__(self):
return self.group + " " + self.key
|
<commit_before>from django.db import models
from django.contrib.auth.models import Group
class CalendarKey(models.Model):
# The group which has access to this key.
group = models.ForeignKey(Group, related_name="calendar_key", default=1)
# The calendar key.
key = models.CharField(max_length=100)
<commit_msg>Add __unicode__ method to CalendarKey model.<commit_after>from django.db import models
from django.contrib.auth.models import Group
class CalendarKey(models.Model):
# The group which has access to this key.
group = models.ForeignKey(Group, related_name="calendar_key", default=1)
# The calendar key.
key = models.CharField(max_length=100)
def __unicode__(self):
return self.group + " " + self.key
|
60f88e2e90ff411f121236a0e44100ca2022f9bb
|
test_sequencer.py
|
test_sequencer.py
|
def run(tests):
print '=> Going to run', len(tests), 'tests'
ok = []
fail = []
for number, test in enumerate(tests):
print '\t-> [' + str(number) + '/' + str(len(tests)) + ']', test.__doc__
error = test()
if error is None:
ok.append((number, test))
else:
fail.append((number, test, error))
print ''
print 'RESULTS'
print '\tOK: ' + str(len(ok))
print '\tFAILED: ' + str(len(fail))
if len(fail) > 0:
print ''
print '--- Failures ---'
for number, test, error in fail:
print 'Test ' + str(number) + ' - ' + test.__name__ + ' (' + test.__doc__ + '):'
print str(error)
def ask(question):
answer = None
while True:
print '\t\t-?', question, '[Y/N]',
answer = raw_input()
if answer.strip().upper() == 'Y' or answer.strip().upper() == 'N':
break
return True if answer.strip().upper() == 'Y' else False
|
import sys
# "Test" is a function. It takes no arguments and returns any encountered errors.
# If there is no error, test should return 'None'. Tests shouldn't have any dependencies
# amongst themselves.
def run(tests):
"""If no arguments (sys.argv) are given, runs tests. If there are any arguments they are
interpreted as names of tests to actually run, it will skip other tests"""
filter = set(sys.argv[1:])
if len(filter) > 0:
to_run = []
for test in tests:
if test.__name__ in filter:
to_run.append(test)
tests = to_run
print '=> Going to run {0} tests'.format(len(tests))
ok = []
fail = []
for number, test in enumerate(tests):
print '\t-> [{0}/{1}] {2} ({3})'.format(number, len(tests), test.__name__, test.__doc__)
error = test()
if error is None:
ok.append((number, test))
else:
fail.append((number, test, error))
print ''
print 'RESULTS'
print '\tOK: {0}'.format(len(ok))
print '\tFAILED: {0}'.format(len(fail))
if len(fail) > 0:
print ''
print '--- Failures ---'
for number, test, error in fail:
print 'Test {0} - {1} ({2})\n{3}'.format(number, test.__name__, test.__doc__, error)
def ask(question):
answer = None
while True:
print '\t\t-? {0} [Y/N]'.format(question),
answer = raw_input()
if answer.strip().upper() == 'Y' or answer.strip().upper() == 'N':
break
return True if answer.strip().upper() == 'Y' else False
|
Use formatted strings, add tests filter
|
Use formatted strings, add tests filter
|
Python
|
mit
|
fmfi-svt-deadlock/hw-testing,fmfi-svt-deadlock/hw-testing
|
def run(tests):
print '=> Going to run', len(tests), 'tests'
ok = []
fail = []
for number, test in enumerate(tests):
print '\t-> [' + str(number) + '/' + str(len(tests)) + ']', test.__doc__
error = test()
if error is None:
ok.append((number, test))
else:
fail.append((number, test, error))
print ''
print 'RESULTS'
print '\tOK: ' + str(len(ok))
print '\tFAILED: ' + str(len(fail))
if len(fail) > 0:
print ''
print '--- Failures ---'
for number, test, error in fail:
print 'Test ' + str(number) + ' - ' + test.__name__ + ' (' + test.__doc__ + '):'
print str(error)
def ask(question):
answer = None
while True:
print '\t\t-?', question, '[Y/N]',
answer = raw_input()
if answer.strip().upper() == 'Y' or answer.strip().upper() == 'N':
break
return True if answer.strip().upper() == 'Y' else False
Use formatted strings, add tests filter
|
import sys
# "Test" is a function. It takes no arguments and returns any encountered errors.
# If there is no error, test should return 'None'. Tests shouldn't have any dependencies
# amongst themselves.
def run(tests):
"""If no arguments (sys.argv) are given, runs tests. If there are any arguments they are
interpreted as names of tests to actually run, it will skip other tests"""
filter = set(sys.argv[1:])
if len(filter) > 0:
to_run = []
for test in tests:
if test.__name__ in filter:
to_run.append(test)
tests = to_run
print '=> Going to run {0} tests'.format(len(tests))
ok = []
fail = []
for number, test in enumerate(tests):
print '\t-> [{0}/{1}] {2} ({3})'.format(number, len(tests), test.__name__, test.__doc__)
error = test()
if error is None:
ok.append((number, test))
else:
fail.append((number, test, error))
print ''
print 'RESULTS'
print '\tOK: {0}'.format(len(ok))
print '\tFAILED: {0}'.format(len(fail))
if len(fail) > 0:
print ''
print '--- Failures ---'
for number, test, error in fail:
print 'Test {0} - {1} ({2})\n{3}'.format(number, test.__name__, test.__doc__, error)
def ask(question):
answer = None
while True:
print '\t\t-? {0} [Y/N]'.format(question),
answer = raw_input()
if answer.strip().upper() == 'Y' or answer.strip().upper() == 'N':
break
return True if answer.strip().upper() == 'Y' else False
|
<commit_before>def run(tests):
print '=> Going to run', len(tests), 'tests'
ok = []
fail = []
for number, test in enumerate(tests):
print '\t-> [' + str(number) + '/' + str(len(tests)) + ']', test.__doc__
error = test()
if error is None:
ok.append((number, test))
else:
fail.append((number, test, error))
print ''
print 'RESULTS'
print '\tOK: ' + str(len(ok))
print '\tFAILED: ' + str(len(fail))
if len(fail) > 0:
print ''
print '--- Failures ---'
for number, test, error in fail:
print 'Test ' + str(number) + ' - ' + test.__name__ + ' (' + test.__doc__ + '):'
print str(error)
def ask(question):
answer = None
while True:
print '\t\t-?', question, '[Y/N]',
answer = raw_input()
if answer.strip().upper() == 'Y' or answer.strip().upper() == 'N':
break
return True if answer.strip().upper() == 'Y' else False
<commit_msg>Use formatted strings, add tests filter<commit_after>
|
import sys
# "Test" is a function. It takes no arguments and returns any encountered errors.
# If there is no error, test should return 'None'. Tests shouldn't have any dependencies
# amongst themselves.
def run(tests):
"""If no arguments (sys.argv) are given, runs tests. If there are any arguments they are
interpreted as names of tests to actually run, it will skip other tests"""
filter = set(sys.argv[1:])
if len(filter) > 0:
to_run = []
for test in tests:
if test.__name__ in filter:
to_run.append(test)
tests = to_run
print '=> Going to run {0} tests'.format(len(tests))
ok = []
fail = []
for number, test in enumerate(tests):
print '\t-> [{0}/{1}] {2} ({3})'.format(number, len(tests), test.__name__, test.__doc__)
error = test()
if error is None:
ok.append((number, test))
else:
fail.append((number, test, error))
print ''
print 'RESULTS'
print '\tOK: {0}'.format(len(ok))
print '\tFAILED: {0}'.format(len(fail))
if len(fail) > 0:
print ''
print '--- Failures ---'
for number, test, error in fail:
print 'Test {0} - {1} ({2})\n{3}'.format(number, test.__name__, test.__doc__, error)
def ask(question):
answer = None
while True:
print '\t\t-? {0} [Y/N]'.format(question),
answer = raw_input()
if answer.strip().upper() == 'Y' or answer.strip().upper() == 'N':
break
return True if answer.strip().upper() == 'Y' else False
|
def run(tests):
print '=> Going to run', len(tests), 'tests'
ok = []
fail = []
for number, test in enumerate(tests):
print '\t-> [' + str(number) + '/' + str(len(tests)) + ']', test.__doc__
error = test()
if error is None:
ok.append((number, test))
else:
fail.append((number, test, error))
print ''
print 'RESULTS'
print '\tOK: ' + str(len(ok))
print '\tFAILED: ' + str(len(fail))
if len(fail) > 0:
print ''
print '--- Failures ---'
for number, test, error in fail:
print 'Test ' + str(number) + ' - ' + test.__name__ + ' (' + test.__doc__ + '):'
print str(error)
def ask(question):
answer = None
while True:
print '\t\t-?', question, '[Y/N]',
answer = raw_input()
if answer.strip().upper() == 'Y' or answer.strip().upper() == 'N':
break
return True if answer.strip().upper() == 'Y' else False
Use formatted strings, add tests filterimport sys
# "Test" is a function. It takes no arguments and returns any encountered errors.
# If there is no error, test should return 'None'. Tests shouldn't have any dependencies
# amongst themselves.
def run(tests):
"""If no arguments (sys.argv) are given, runs tests. If there are any arguments they are
interpreted as names of tests to actually run, it will skip other tests"""
filter = set(sys.argv[1:])
if len(filter) > 0:
to_run = []
for test in tests:
if test.__name__ in filter:
to_run.append(test)
tests = to_run
print '=> Going to run {0} tests'.format(len(tests))
ok = []
fail = []
for number, test in enumerate(tests):
print '\t-> [{0}/{1}] {2} ({3})'.format(number, len(tests), test.__name__, test.__doc__)
error = test()
if error is None:
ok.append((number, test))
else:
fail.append((number, test, error))
print ''
print 'RESULTS'
print '\tOK: {0}'.format(len(ok))
print '\tFAILED: {0}'.format(len(fail))
if len(fail) > 0:
print ''
print '--- Failures ---'
for number, test, error in fail:
print 'Test {0} - {1} ({2})\n{3}'.format(number, test.__name__, test.__doc__, error)
def ask(question):
answer = None
while True:
print '\t\t-? {0} [Y/N]'.format(question),
answer = raw_input()
if answer.strip().upper() == 'Y' or answer.strip().upper() == 'N':
break
return True if answer.strip().upper() == 'Y' else False
|
<commit_before>def run(tests):
print '=> Going to run', len(tests), 'tests'
ok = []
fail = []
for number, test in enumerate(tests):
print '\t-> [' + str(number) + '/' + str(len(tests)) + ']', test.__doc__
error = test()
if error is None:
ok.append((number, test))
else:
fail.append((number, test, error))
print ''
print 'RESULTS'
print '\tOK: ' + str(len(ok))
print '\tFAILED: ' + str(len(fail))
if len(fail) > 0:
print ''
print '--- Failures ---'
for number, test, error in fail:
print 'Test ' + str(number) + ' - ' + test.__name__ + ' (' + test.__doc__ + '):'
print str(error)
def ask(question):
answer = None
while True:
print '\t\t-?', question, '[Y/N]',
answer = raw_input()
if answer.strip().upper() == 'Y' or answer.strip().upper() == 'N':
break
return True if answer.strip().upper() == 'Y' else False
<commit_msg>Use formatted strings, add tests filter<commit_after>import sys
# "Test" is a function. It takes no arguments and returns any encountered errors.
# If there is no error, test should return 'None'. Tests shouldn't have any dependencies
# amongst themselves.
def run(tests):
"""If no arguments (sys.argv) are given, runs tests. If there are any arguments they are
interpreted as names of tests to actually run, it will skip other tests"""
filter = set(sys.argv[1:])
if len(filter) > 0:
to_run = []
for test in tests:
if test.__name__ in filter:
to_run.append(test)
tests = to_run
print '=> Going to run {0} tests'.format(len(tests))
ok = []
fail = []
for number, test in enumerate(tests):
print '\t-> [{0}/{1}] {2} ({3})'.format(number, len(tests), test.__name__, test.__doc__)
error = test()
if error is None:
ok.append((number, test))
else:
fail.append((number, test, error))
print ''
print 'RESULTS'
print '\tOK: {0}'.format(len(ok))
print '\tFAILED: {0}'.format(len(fail))
if len(fail) > 0:
print ''
print '--- Failures ---'
for number, test, error in fail:
print 'Test {0} - {1} ({2})\n{3}'.format(number, test.__name__, test.__doc__, error)
def ask(question):
answer = None
while True:
print '\t\t-? {0} [Y/N]'.format(question),
answer = raw_input()
if answer.strip().upper() == 'Y' or answer.strip().upper() == 'N':
break
return True if answer.strip().upper() == 'Y' else False
|
9c42a7925d4e872a6245301ef68b2b9aa1f0aa7b
|
tests/__init__.py
|
tests/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sys import version_info
from os import name as OS_NAME
__all__= ['unittest', 'skipIfNtMove']
if version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
skipIfNtMove = unittest.skipIf(OS_NAME == 'nt', "windows can not detect moves")
|
Declare unittest lib used within python version
|
Declare unittest lib used within python version
|
Python
|
apache-2.0
|
glorizen/watchdog,ymero/watchdog,javrasya/watchdog,mconstantin/watchdog,teleyinex/watchdog,gorakhargosh/watchdog,javrasya/watchdog,ymero/watchdog,javrasya/watchdog,mconstantin/watchdog,teleyinex/watchdog,teleyinex/watchdog,glorizen/watchdog,glorizen/watchdog,gorakhargosh/watchdog,mconstantin/watchdog,ymero/watchdog
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Declare unittest lib used within python version
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sys import version_info
from os import name as OS_NAME
__all__= ['unittest', 'skipIfNtMove']
if version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
skipIfNtMove = unittest.skipIf(OS_NAME == 'nt', "windows can not detect moves")
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<commit_msg>Declare unittest lib used within python version<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sys import version_info
from os import name as OS_NAME
__all__= ['unittest', 'skipIfNtMove']
if version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
skipIfNtMove = unittest.skipIf(OS_NAME == 'nt', "windows can not detect moves")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Declare unittest lib used within python version#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sys import version_info
from os import name as OS_NAME
__all__= ['unittest', 'skipIfNtMove']
if version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
skipIfNtMove = unittest.skipIf(OS_NAME == 'nt', "windows can not detect moves")
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<commit_msg>Declare unittest lib used within python version<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sys import version_info
from os import name as OS_NAME
__all__= ['unittest', 'skipIfNtMove']
if version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
skipIfNtMove = unittest.skipIf(OS_NAME == 'nt', "windows can not detect moves")
|
c68c88c0d90512bf315312b137b9a10ec5eee03e
|
tests/__init__.py
|
tests/__init__.py
|
import sys
# The unittest module got a significant overhaul
# in 2.7, so if we're in 2.6 we can use the backported
# version unittest2.
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
|
Add the compatiblity for py2.6
|
Add the compatiblity for py2.6
|
Python
|
apache-2.0
|
henrysher/kamboo,henrysher/kamboo
|
Add the compatiblity for py2.6
|
import sys
# The unittest module got a significant overhaul
# in 2.7, so if we're in 2.6 we can use the backported
# version unittest2.
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
|
<commit_before><commit_msg>Add the compatiblity for py2.6<commit_after>
|
import sys
# The unittest module got a significant overhaul
# in 2.7, so if we're in 2.6 we can use the backported
# version unittest2.
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
|
Add the compatiblity for py2.6import sys
# The unittest module got a significant overhaul
# in 2.7, so if we're in 2.6 we can use the backported
# version unittest2.
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
|
<commit_before><commit_msg>Add the compatiblity for py2.6<commit_after>import sys
# The unittest module got a significant overhaul
# in 2.7, so if we're in 2.6 we can use the backported
# version unittest2.
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
|
|
3ae6c0f4c4f13207386dbf0fa2004655e9f2c8d6
|
UM/View/CompositePass.py
|
UM/View/CompositePass.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in self._renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height, 999)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
self._layer_bindings = [ "default", "selection" ]
def setCompositeShader(self, shader):
self._shader = shader
def setLayerBindings(self, bindings):
self._layer_bindings = bindings
def render(self):
self._shader.bind()
texture_unit = 0
for binding in self._layer_bindings:
render_pass = self._renderer.getRenderPass(binding)
if not render_pass:
continue
self._gl.glActiveTexture(getattr(self._gl, "GL_TEXTURE{0}".format(texture_unit)))
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderFullScreenQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(getattr(self._gl, "GL_TEXTURE{0}".format(i)))
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
Add explicit render layer binding instead of assuming all render passes can be used for compositing
|
Add explicit render layer binding instead of assuming all render passes can be used for compositing
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in self._renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
Add explicit render layer binding instead of assuming all render passes can be used for compositing
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height, 999)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
self._layer_bindings = [ "default", "selection" ]
def setCompositeShader(self, shader):
self._shader = shader
def setLayerBindings(self, bindings):
self._layer_bindings = bindings
def render(self):
self._shader.bind()
texture_unit = 0
for binding in self._layer_bindings:
render_pass = self._renderer.getRenderPass(binding)
if not render_pass:
continue
self._gl.glActiveTexture(getattr(self._gl, "GL_TEXTURE{0}".format(texture_unit)))
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderFullScreenQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(getattr(self._gl, "GL_TEXTURE{0}".format(i)))
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
<commit_before># Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in self._renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
<commit_msg>Add explicit render layer binding instead of assuming all render passes can be used for compositing<commit_after>
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height, 999)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
self._layer_bindings = [ "default", "selection" ]
def setCompositeShader(self, shader):
self._shader = shader
def setLayerBindings(self, bindings):
self._layer_bindings = bindings
def render(self):
self._shader.bind()
texture_unit = 0
for binding in self._layer_bindings:
render_pass = self._renderer.getRenderPass(binding)
if not render_pass:
continue
self._gl.glActiveTexture(getattr(self._gl, "GL_TEXTURE{0}".format(texture_unit)))
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderFullScreenQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(getattr(self._gl, "GL_TEXTURE{0}".format(i)))
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in self._renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
Add explicit render layer binding instead of assuming all render passes can be used for compositing# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height, 999)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
self._layer_bindings = [ "default", "selection" ]
def setCompositeShader(self, shader):
self._shader = shader
def setLayerBindings(self, bindings):
self._layer_bindings = bindings
def render(self):
self._shader.bind()
texture_unit = 0
for binding in self._layer_bindings:
render_pass = self._renderer.getRenderPass(binding)
if not render_pass:
continue
self._gl.glActiveTexture(getattr(self._gl, "GL_TEXTURE{0}".format(texture_unit)))
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderFullScreenQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(getattr(self._gl, "GL_TEXTURE{0}".format(i)))
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
<commit_before># Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in self._renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
<commit_msg>Add explicit render layer binding instead of assuming all render passes can be used for compositing<commit_after># Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height, 999)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
self._layer_bindings = [ "default", "selection" ]
def setCompositeShader(self, shader):
self._shader = shader
def setLayerBindings(self, bindings):
self._layer_bindings = bindings
def render(self):
self._shader.bind()
texture_unit = 0
for binding in self._layer_bindings:
render_pass = self._renderer.getRenderPass(binding)
if not render_pass:
continue
self._gl.glActiveTexture(getattr(self._gl, "GL_TEXTURE{0}".format(texture_unit)))
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderFullScreenQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(getattr(self._gl, "GL_TEXTURE{0}".format(i)))
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
46566c568b20a037006cf7bbebdc70353e163bb2
|
paintings/processors.py
|
paintings/processors.py
|
from random import randrange
from paintings.models import Gallery, Painting
def get_Galleries(request):
return ( {'galleries' : Gallery.objects.all()} )
def get_random_canvasOilPainting(request):
paintings = Painting.objects.filter(surface='canvas', material='oil').extra(where=['width > height'])
rand_painting = paintings[ randrange( paintings.__len__() ) ]
return ( {'rand_painting' : rand_painting} )
|
from random import randrange
from paintings.models import Gallery, Painting
def get_Galleries(request):
return ( {'galleries' : Gallery.objects.all()} )
def get_random_canvasOilPainting(request):
try:
paintings = Painting.objects.filter(surface='canvas', material='oil').extra(where=['width > height'])
rand_painting = paintings[ randrange( paintings.__len__() ) ]
except:
rand_painting = None
return ( {'rand_painting' : rand_painting} )
|
Fix bug with empty db
|
Fix bug with empty db
|
Python
|
mit
|
hombit/olgart,hombit/olgart,hombit/olgart,hombit/olgart
|
from random import randrange
from paintings.models import Gallery, Painting
def get_Galleries(request):
return ( {'galleries' : Gallery.objects.all()} )
def get_random_canvasOilPainting(request):
paintings = Painting.objects.filter(surface='canvas', material='oil').extra(where=['width > height'])
rand_painting = paintings[ randrange( paintings.__len__() ) ]
return ( {'rand_painting' : rand_painting} )
Fix bug with empty db
|
from random import randrange
from paintings.models import Gallery, Painting
def get_Galleries(request):
return ( {'galleries' : Gallery.objects.all()} )
def get_random_canvasOilPainting(request):
try:
paintings = Painting.objects.filter(surface='canvas', material='oil').extra(where=['width > height'])
rand_painting = paintings[ randrange( paintings.__len__() ) ]
except:
rand_painting = None
return ( {'rand_painting' : rand_painting} )
|
<commit_before>from random import randrange
from paintings.models import Gallery, Painting
def get_Galleries(request):
return ( {'galleries' : Gallery.objects.all()} )
def get_random_canvasOilPainting(request):
paintings = Painting.objects.filter(surface='canvas', material='oil').extra(where=['width > height'])
rand_painting = paintings[ randrange( paintings.__len__() ) ]
return ( {'rand_painting' : rand_painting} )
<commit_msg>Fix bug with empty db<commit_after>
|
from random import randrange
from paintings.models import Gallery, Painting
def get_Galleries(request):
return ( {'galleries' : Gallery.objects.all()} )
def get_random_canvasOilPainting(request):
try:
paintings = Painting.objects.filter(surface='canvas', material='oil').extra(where=['width > height'])
rand_painting = paintings[ randrange( paintings.__len__() ) ]
except:
rand_painting = None
return ( {'rand_painting' : rand_painting} )
|
from random import randrange
from paintings.models import Gallery, Painting
def get_Galleries(request):
return ( {'galleries' : Gallery.objects.all()} )
def get_random_canvasOilPainting(request):
paintings = Painting.objects.filter(surface='canvas', material='oil').extra(where=['width > height'])
rand_painting = paintings[ randrange( paintings.__len__() ) ]
return ( {'rand_painting' : rand_painting} )
Fix bug with empty dbfrom random import randrange
from paintings.models import Gallery, Painting
def get_Galleries(request):
return ( {'galleries' : Gallery.objects.all()} )
def get_random_canvasOilPainting(request):
try:
paintings = Painting.objects.filter(surface='canvas', material='oil').extra(where=['width > height'])
rand_painting = paintings[ randrange( paintings.__len__() ) ]
except:
rand_painting = None
return ( {'rand_painting' : rand_painting} )
|
<commit_before>from random import randrange
from paintings.models import Gallery, Painting
def get_Galleries(request):
return ( {'galleries' : Gallery.objects.all()} )
def get_random_canvasOilPainting(request):
paintings = Painting.objects.filter(surface='canvas', material='oil').extra(where=['width > height'])
rand_painting = paintings[ randrange( paintings.__len__() ) ]
return ( {'rand_painting' : rand_painting} )
<commit_msg>Fix bug with empty db<commit_after>from random import randrange
from paintings.models import Gallery, Painting
def get_Galleries(request):
return ( {'galleries' : Gallery.objects.all()} )
def get_random_canvasOilPainting(request):
try:
paintings = Painting.objects.filter(surface='canvas', material='oil').extra(where=['width > height'])
rand_painting = paintings[ randrange( paintings.__len__() ) ]
except:
rand_painting = None
return ( {'rand_painting' : rand_painting} )
|
5b4c710df7149b0654fc731979978a9a561614a3
|
wluopensource/osl_flatpages/models.py
|
wluopensource/osl_flatpages/models.py
|
from django.db import models
import markdown
class Flatpage(models.Model):
page_name = models.CharField(max_length=100, primary_key=True, unique=True)
title = models.CharField(blank=True, max_length=100)
description = models.CharField(blank=True, max_length=255)
markdown_content = models.TextField('content')
content = models.TextField(editable=False)
def __unicode__(self):
return self.page_name
def save(self, force_insert=False, force_update=False):
self.content = markdown.markdown(self.markdown_content)
super(Flatpage, self).save(force_insert, force_update)
|
from django.db import models
import markdown
class Flatpage(models.Model):
page_name = models.CharField(max_length=100, primary_key=True, unique=True)
title = models.CharField(blank=True, max_length=100)
description = models.CharField(blank=True, max_length=255)
markdown_content = models.TextField('content')
content = models.TextField(editable=False)
class Meta:
ordering = ['page_name']
def __unicode__(self):
return self.page_name
def save(self, force_insert=False, force_update=False):
self.content = markdown.markdown(self.markdown_content)
super(Flatpage, self).save(force_insert, force_update)
|
Change flatpage ordering to order by page name ascending
|
Change flatpage ordering to order by page name ascending
|
Python
|
bsd-3-clause
|
jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website
|
from django.db import models
import markdown
class Flatpage(models.Model):
page_name = models.CharField(max_length=100, primary_key=True, unique=True)
title = models.CharField(blank=True, max_length=100)
description = models.CharField(blank=True, max_length=255)
markdown_content = models.TextField('content')
content = models.TextField(editable=False)
def __unicode__(self):
return self.page_name
def save(self, force_insert=False, force_update=False):
self.content = markdown.markdown(self.markdown_content)
super(Flatpage, self).save(force_insert, force_update)
Change flatpage ordering to order by page name ascending
|
from django.db import models
import markdown
class Flatpage(models.Model):
page_name = models.CharField(max_length=100, primary_key=True, unique=True)
title = models.CharField(blank=True, max_length=100)
description = models.CharField(blank=True, max_length=255)
markdown_content = models.TextField('content')
content = models.TextField(editable=False)
class Meta:
ordering = ['page_name']
def __unicode__(self):
return self.page_name
def save(self, force_insert=False, force_update=False):
self.content = markdown.markdown(self.markdown_content)
super(Flatpage, self).save(force_insert, force_update)
|
<commit_before>from django.db import models
import markdown
class Flatpage(models.Model):
page_name = models.CharField(max_length=100, primary_key=True, unique=True)
title = models.CharField(blank=True, max_length=100)
description = models.CharField(blank=True, max_length=255)
markdown_content = models.TextField('content')
content = models.TextField(editable=False)
def __unicode__(self):
return self.page_name
def save(self, force_insert=False, force_update=False):
self.content = markdown.markdown(self.markdown_content)
super(Flatpage, self).save(force_insert, force_update)
<commit_msg>Change flatpage ordering to order by page name ascending<commit_after>
|
from django.db import models
import markdown
class Flatpage(models.Model):
page_name = models.CharField(max_length=100, primary_key=True, unique=True)
title = models.CharField(blank=True, max_length=100)
description = models.CharField(blank=True, max_length=255)
markdown_content = models.TextField('content')
content = models.TextField(editable=False)
class Meta:
ordering = ['page_name']
def __unicode__(self):
return self.page_name
def save(self, force_insert=False, force_update=False):
self.content = markdown.markdown(self.markdown_content)
super(Flatpage, self).save(force_insert, force_update)
|
from django.db import models
import markdown
class Flatpage(models.Model):
page_name = models.CharField(max_length=100, primary_key=True, unique=True)
title = models.CharField(blank=True, max_length=100)
description = models.CharField(blank=True, max_length=255)
markdown_content = models.TextField('content')
content = models.TextField(editable=False)
def __unicode__(self):
return self.page_name
def save(self, force_insert=False, force_update=False):
self.content = markdown.markdown(self.markdown_content)
super(Flatpage, self).save(force_insert, force_update)
Change flatpage ordering to order by page name ascendingfrom django.db import models
import markdown
class Flatpage(models.Model):
page_name = models.CharField(max_length=100, primary_key=True, unique=True)
title = models.CharField(blank=True, max_length=100)
description = models.CharField(blank=True, max_length=255)
markdown_content = models.TextField('content')
content = models.TextField(editable=False)
class Meta:
ordering = ['page_name']
def __unicode__(self):
return self.page_name
def save(self, force_insert=False, force_update=False):
self.content = markdown.markdown(self.markdown_content)
super(Flatpage, self).save(force_insert, force_update)
|
<commit_before>from django.db import models
import markdown
class Flatpage(models.Model):
page_name = models.CharField(max_length=100, primary_key=True, unique=True)
title = models.CharField(blank=True, max_length=100)
description = models.CharField(blank=True, max_length=255)
markdown_content = models.TextField('content')
content = models.TextField(editable=False)
def __unicode__(self):
return self.page_name
def save(self, force_insert=False, force_update=False):
self.content = markdown.markdown(self.markdown_content)
super(Flatpage, self).save(force_insert, force_update)
<commit_msg>Change flatpage ordering to order by page name ascending<commit_after>from django.db import models
import markdown
class Flatpage(models.Model):
page_name = models.CharField(max_length=100, primary_key=True, unique=True)
title = models.CharField(blank=True, max_length=100)
description = models.CharField(blank=True, max_length=255)
markdown_content = models.TextField('content')
content = models.TextField(editable=False)
class Meta:
ordering = ['page_name']
def __unicode__(self):
return self.page_name
def save(self, force_insert=False, force_update=False):
self.content = markdown.markdown(self.markdown_content)
super(Flatpage, self).save(force_insert, force_update)
|
b6833a9ee7da9a59e50710c0bd4d3ad0b83439ab
|
fabfile.py
|
fabfile.py
|
import unipath
from fabric.api import *
from fabric.contrib import files
# Fab settings
env.hosts = ['ve.djangoproject.com']
# Deployment environment paths and settings and such.
env.deploy_base = unipath.Path('/home/buildbot')
env.virtualenv = env.deploy_base
env.code_dir = env.deploy_base.child('master')
env.git_url = 'git://github.com/jacobian/django-buildmaster.git'
# FIXME: make a deploy branch in this repo to deploy against.
env.default_deploy_ref = 'HEAD'
def deploy():
"""
Full deploy: new code, update dependencies, migrate, and restart services.
"""
deploy_code()
update_dependencies()
restart()
def restart():
sudo('service buildbot restart')
def deploy_code(ref=None):
"""
Update code on the servers from Git.
"""
ref = ref or env.default_deploy_ref
puts("Deploying %s" % ref)
if not files.exists(env.code_dir):
sudo('git clone %s %s' % (env.git_url, env.code_dir))
with cd(env.code_dir):
sudo('git fetch && git reset --hard %s' % ref)
def update_dependencies():
"""
Update dependencies in the virtualenv.
"""
pip = env.virtualenv.child('bin', 'pip')
reqs = env.code_dir.child('requirements.txt')
sudo('%s -q install -r %s' % (pip, reqs))
|
import unipath
from fabric.api import *
from fabric.contrib import files
# Fab settings
env.hosts = ['ve.djangoproject.com']
env.user = "buildbot"
# Deployment environment paths and settings and such.
env.deploy_base = unipath.Path('/home/buildbot')
env.virtualenv = env.deploy_base
env.code_dir = env.deploy_base.child('master')
env.git_url = 'git://github.com/jacobian/django-buildmaster.git'
# FIXME: make a deploy branch in this repo to deploy against.
env.default_deploy_ref = 'HEAD'
def deploy():
"""
Full deploy: new code, update dependencies, migrate, and restart services.
"""
deploy_code()
update_dependencies()
restart()
def restart():
pass #sudo('service buildbot restart')
def deploy_code(ref=None):
"""
Update code on the servers from Git.
"""
ref = ref or env.default_deploy_ref
puts("Deploying %s" % ref)
if not files.exists(env.code_dir):
run('git clone %s %s' % (env.git_url, env.code_dir))
with cd(env.code_dir):
run('git fetch && git reset --hard %s' % ref)
def update_dependencies():
"""
Update dependencies in the virtualenv.
"""
pip = env.virtualenv.child('bin', 'pip')
reqs = env.code_dir.child('deploy-requirements.txt')
run('%s -q install -r %s' % (pip, reqs))
|
Deploy as the buildbot user, not root.
|
Deploy as the buildbot user, not root.
|
Python
|
bsd-3-clause
|
jacobian-archive/django-buildmaster,hochanh/django-buildmaster
|
import unipath
from fabric.api import *
from fabric.contrib import files
# Fab settings
env.hosts = ['ve.djangoproject.com']
# Deployment environment paths and settings and such.
env.deploy_base = unipath.Path('/home/buildbot')
env.virtualenv = env.deploy_base
env.code_dir = env.deploy_base.child('master')
env.git_url = 'git://github.com/jacobian/django-buildmaster.git'
# FIXME: make a deploy branch in this repo to deploy against.
env.default_deploy_ref = 'HEAD'
def deploy():
"""
Full deploy: new code, update dependencies, migrate, and restart services.
"""
deploy_code()
update_dependencies()
restart()
def restart():
sudo('service buildbot restart')
def deploy_code(ref=None):
"""
Update code on the servers from Git.
"""
ref = ref or env.default_deploy_ref
puts("Deploying %s" % ref)
if not files.exists(env.code_dir):
sudo('git clone %s %s' % (env.git_url, env.code_dir))
with cd(env.code_dir):
sudo('git fetch && git reset --hard %s' % ref)
def update_dependencies():
"""
Update dependencies in the virtualenv.
"""
pip = env.virtualenv.child('bin', 'pip')
reqs = env.code_dir.child('requirements.txt')
sudo('%s -q install -r %s' % (pip, reqs))Deploy as the buildbot user, not root.
|
import unipath
from fabric.api import *
from fabric.contrib import files
# Fab settings
env.hosts = ['ve.djangoproject.com']
env.user = "buildbot"
# Deployment environment paths and settings and such.
env.deploy_base = unipath.Path('/home/buildbot')
env.virtualenv = env.deploy_base
env.code_dir = env.deploy_base.child('master')
env.git_url = 'git://github.com/jacobian/django-buildmaster.git'
# FIXME: make a deploy branch in this repo to deploy against.
env.default_deploy_ref = 'HEAD'
def deploy():
"""
Full deploy: new code, update dependencies, migrate, and restart services.
"""
deploy_code()
update_dependencies()
restart()
def restart():
pass #sudo('service buildbot restart')
def deploy_code(ref=None):
"""
Update code on the servers from Git.
"""
ref = ref or env.default_deploy_ref
puts("Deploying %s" % ref)
if not files.exists(env.code_dir):
run('git clone %s %s' % (env.git_url, env.code_dir))
with cd(env.code_dir):
run('git fetch && git reset --hard %s' % ref)
def update_dependencies():
"""
Update dependencies in the virtualenv.
"""
pip = env.virtualenv.child('bin', 'pip')
reqs = env.code_dir.child('deploy-requirements.txt')
run('%s -q install -r %s' % (pip, reqs))
|
<commit_before>import unipath
from fabric.api import *
from fabric.contrib import files
# Fab settings
env.hosts = ['ve.djangoproject.com']
# Deployment environment paths and settings and such.
env.deploy_base = unipath.Path('/home/buildbot')
env.virtualenv = env.deploy_base
env.code_dir = env.deploy_base.child('master')
env.git_url = 'git://github.com/jacobian/django-buildmaster.git'
# FIXME: make a deploy branch in this repo to deploy against.
env.default_deploy_ref = 'HEAD'
def deploy():
"""
Full deploy: new code, update dependencies, migrate, and restart services.
"""
deploy_code()
update_dependencies()
restart()
def restart():
sudo('service buildbot restart')
def deploy_code(ref=None):
"""
Update code on the servers from Git.
"""
ref = ref or env.default_deploy_ref
puts("Deploying %s" % ref)
if not files.exists(env.code_dir):
sudo('git clone %s %s' % (env.git_url, env.code_dir))
with cd(env.code_dir):
sudo('git fetch && git reset --hard %s' % ref)
def update_dependencies():
"""
Update dependencies in the virtualenv.
"""
pip = env.virtualenv.child('bin', 'pip')
reqs = env.code_dir.child('requirements.txt')
sudo('%s -q install -r %s' % (pip, reqs))<commit_msg>Deploy as the buildbot user, not root.<commit_after>
|
import unipath
from fabric.api import *
from fabric.contrib import files
# Fab settings
env.hosts = ['ve.djangoproject.com']
env.user = "buildbot"
# Deployment environment paths and settings and such.
env.deploy_base = unipath.Path('/home/buildbot')
env.virtualenv = env.deploy_base
env.code_dir = env.deploy_base.child('master')
env.git_url = 'git://github.com/jacobian/django-buildmaster.git'
# FIXME: make a deploy branch in this repo to deploy against.
env.default_deploy_ref = 'HEAD'
def deploy():
"""
Full deploy: new code, update dependencies, migrate, and restart services.
"""
deploy_code()
update_dependencies()
restart()
def restart():
pass #sudo('service buildbot restart')
def deploy_code(ref=None):
"""
Update code on the servers from Git.
"""
ref = ref or env.default_deploy_ref
puts("Deploying %s" % ref)
if not files.exists(env.code_dir):
run('git clone %s %s' % (env.git_url, env.code_dir))
with cd(env.code_dir):
run('git fetch && git reset --hard %s' % ref)
def update_dependencies():
"""
Update dependencies in the virtualenv.
"""
pip = env.virtualenv.child('bin', 'pip')
reqs = env.code_dir.child('deploy-requirements.txt')
run('%s -q install -r %s' % (pip, reqs))
|
import unipath
from fabric.api import *
from fabric.contrib import files
# Fab settings
env.hosts = ['ve.djangoproject.com']
# Deployment environment paths and settings and such.
env.deploy_base = unipath.Path('/home/buildbot')
env.virtualenv = env.deploy_base
env.code_dir = env.deploy_base.child('master')
env.git_url = 'git://github.com/jacobian/django-buildmaster.git'
# FIXME: make a deploy branch in this repo to deploy against.
env.default_deploy_ref = 'HEAD'
def deploy():
"""
Full deploy: new code, update dependencies, migrate, and restart services.
"""
deploy_code()
update_dependencies()
restart()
def restart():
sudo('service buildbot restart')
def deploy_code(ref=None):
"""
Update code on the servers from Git.
"""
ref = ref or env.default_deploy_ref
puts("Deploying %s" % ref)
if not files.exists(env.code_dir):
sudo('git clone %s %s' % (env.git_url, env.code_dir))
with cd(env.code_dir):
sudo('git fetch && git reset --hard %s' % ref)
def update_dependencies():
"""
Update dependencies in the virtualenv.
"""
pip = env.virtualenv.child('bin', 'pip')
reqs = env.code_dir.child('requirements.txt')
sudo('%s -q install -r %s' % (pip, reqs))Deploy as the buildbot user, not root.import unipath
from fabric.api import *
from fabric.contrib import files
# Fab settings
env.hosts = ['ve.djangoproject.com']
env.user = "buildbot"
# Deployment environment paths and settings and such.
env.deploy_base = unipath.Path('/home/buildbot')
env.virtualenv = env.deploy_base
env.code_dir = env.deploy_base.child('master')
env.git_url = 'git://github.com/jacobian/django-buildmaster.git'
# FIXME: make a deploy branch in this repo to deploy against.
env.default_deploy_ref = 'HEAD'
def deploy():
"""
Full deploy: new code, update dependencies, migrate, and restart services.
"""
deploy_code()
update_dependencies()
restart()
def restart():
pass #sudo('service buildbot restart')
def deploy_code(ref=None):
"""
Update code on the servers from Git.
"""
ref = ref or env.default_deploy_ref
puts("Deploying %s" % ref)
if not files.exists(env.code_dir):
run('git clone %s %s' % (env.git_url, env.code_dir))
with cd(env.code_dir):
run('git fetch && git reset --hard %s' % ref)
def update_dependencies():
"""
Update dependencies in the virtualenv.
"""
pip = env.virtualenv.child('bin', 'pip')
reqs = env.code_dir.child('deploy-requirements.txt')
run('%s -q install -r %s' % (pip, reqs))
|
<commit_before>import unipath
from fabric.api import *
from fabric.contrib import files
# Fab settings
env.hosts = ['ve.djangoproject.com']
# Deployment environment paths and settings and such.
env.deploy_base = unipath.Path('/home/buildbot')
env.virtualenv = env.deploy_base
env.code_dir = env.deploy_base.child('master')
env.git_url = 'git://github.com/jacobian/django-buildmaster.git'
# FIXME: make a deploy branch in this repo to deploy against.
env.default_deploy_ref = 'HEAD'
def deploy():
"""
Full deploy: new code, update dependencies, migrate, and restart services.
"""
deploy_code()
update_dependencies()
restart()
def restart():
sudo('service buildbot restart')
def deploy_code(ref=None):
"""
Update code on the servers from Git.
"""
ref = ref or env.default_deploy_ref
puts("Deploying %s" % ref)
if not files.exists(env.code_dir):
sudo('git clone %s %s' % (env.git_url, env.code_dir))
with cd(env.code_dir):
sudo('git fetch && git reset --hard %s' % ref)
def update_dependencies():
"""
Update dependencies in the virtualenv.
"""
pip = env.virtualenv.child('bin', 'pip')
reqs = env.code_dir.child('requirements.txt')
sudo('%s -q install -r %s' % (pip, reqs))<commit_msg>Deploy as the buildbot user, not root.<commit_after>import unipath
from fabric.api import *
from fabric.contrib import files
# Fab settings
env.hosts = ['ve.djangoproject.com']
env.user = "buildbot"
# Deployment environment paths and settings and such.
env.deploy_base = unipath.Path('/home/buildbot')
env.virtualenv = env.deploy_base
env.code_dir = env.deploy_base.child('master')
env.git_url = 'git://github.com/jacobian/django-buildmaster.git'
# FIXME: make a deploy branch in this repo to deploy against.
env.default_deploy_ref = 'HEAD'
def deploy():
"""
Full deploy: new code, update dependencies, migrate, and restart services.
"""
deploy_code()
update_dependencies()
restart()
def restart():
pass #sudo('service buildbot restart')
def deploy_code(ref=None):
"""
Update code on the servers from Git.
"""
ref = ref or env.default_deploy_ref
puts("Deploying %s" % ref)
if not files.exists(env.code_dir):
run('git clone %s %s' % (env.git_url, env.code_dir))
with cd(env.code_dir):
run('git fetch && git reset --hard %s' % ref)
def update_dependencies():
"""
Update dependencies in the virtualenv.
"""
pip = env.virtualenv.child('bin', 'pip')
reqs = env.code_dir.child('deploy-requirements.txt')
run('%s -q install -r %s' % (pip, reqs))
|
b9b095a2a66f79e36bbad1affaeb57b38e20803b
|
cwod_site/cwod/models.py
|
cwod_site/cwod/models.py
|
from django.db import models
# Create your models here.
class CongressionalRecordVolume(models.Model):
congress = models.IntegerField(db_index=True)
session = models.CharField(max_length=10, db_index=True)
volume = models.IntegerField()
|
from django.db import models
# Create your models here.
class CongressionalRecordVolume(models.Model):
congress = models.IntegerField(db_index=True)
session = models.CharField(max_length=10, db_index=True)
volume = models.IntegerField()
class NgramDateCount(models.Model):
"""Storing the total number of ngrams per date
allows us to show the percentage of a given ngram
on a given date, mainly for graphing purposes.
"""
n = models.IntegerField(db_index=True)
date = models.DateField(db_index=True)
count = models.IntegerField()
class Meta:
unique_together = (('n', 'date', ), )
|
Create model for storing total n-gram counts by date
|
Create model for storing total n-gram counts by date
|
Python
|
bsd-3-clause
|
sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,propublica/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,propublica/Capitol-Words
|
from django.db import models
# Create your models here.
class CongressionalRecordVolume(models.Model):
congress = models.IntegerField(db_index=True)
session = models.CharField(max_length=10, db_index=True)
volume = models.IntegerField()
Create model for storing total n-gram counts by date
|
from django.db import models
# Create your models here.
class CongressionalRecordVolume(models.Model):
congress = models.IntegerField(db_index=True)
session = models.CharField(max_length=10, db_index=True)
volume = models.IntegerField()
class NgramDateCount(models.Model):
"""Storing the total number of ngrams per date
allows us to show the percentage of a given ngram
on a given date, mainly for graphing purposes.
"""
n = models.IntegerField(db_index=True)
date = models.DateField(db_index=True)
count = models.IntegerField()
class Meta:
unique_together = (('n', 'date', ), )
|
<commit_before>from django.db import models
# Create your models here.
class CongressionalRecordVolume(models.Model):
congress = models.IntegerField(db_index=True)
session = models.CharField(max_length=10, db_index=True)
volume = models.IntegerField()
<commit_msg>Create model for storing total n-gram counts by date<commit_after>
|
from django.db import models
# Create your models here.
class CongressionalRecordVolume(models.Model):
congress = models.IntegerField(db_index=True)
session = models.CharField(max_length=10, db_index=True)
volume = models.IntegerField()
class NgramDateCount(models.Model):
"""Storing the total number of ngrams per date
allows us to show the percentage of a given ngram
on a given date, mainly for graphing purposes.
"""
n = models.IntegerField(db_index=True)
date = models.DateField(db_index=True)
count = models.IntegerField()
class Meta:
unique_together = (('n', 'date', ), )
|
from django.db import models
# Create your models here.
class CongressionalRecordVolume(models.Model):
congress = models.IntegerField(db_index=True)
session = models.CharField(max_length=10, db_index=True)
volume = models.IntegerField()
Create model for storing total n-gram counts by datefrom django.db import models
# Create your models here.
class CongressionalRecordVolume(models.Model):
congress = models.IntegerField(db_index=True)
session = models.CharField(max_length=10, db_index=True)
volume = models.IntegerField()
class NgramDateCount(models.Model):
"""Storing the total number of ngrams per date
allows us to show the percentage of a given ngram
on a given date, mainly for graphing purposes.
"""
n = models.IntegerField(db_index=True)
date = models.DateField(db_index=True)
count = models.IntegerField()
class Meta:
unique_together = (('n', 'date', ), )
|
<commit_before>from django.db import models
# Create your models here.
class CongressionalRecordVolume(models.Model):
congress = models.IntegerField(db_index=True)
session = models.CharField(max_length=10, db_index=True)
volume = models.IntegerField()
<commit_msg>Create model for storing total n-gram counts by date<commit_after>from django.db import models
# Create your models here.
class CongressionalRecordVolume(models.Model):
congress = models.IntegerField(db_index=True)
session = models.CharField(max_length=10, db_index=True)
volume = models.IntegerField()
class NgramDateCount(models.Model):
"""Storing the total number of ngrams per date
allows us to show the percentage of a given ngram
on a given date, mainly for graphing purposes.
"""
n = models.IntegerField(db_index=True)
date = models.DateField(db_index=True)
count = models.IntegerField()
class Meta:
unique_together = (('n', 'date', ), )
|
906c71ed59a6349aed83cd18248dfe8463e3a028
|
src/integrate_tool.py
|
src/integrate_tool.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bioblend import galaxy
from bioblend import toolshed
if __name__ == '__main__':
gi_url = "http://172.21.23.6:8080/"
ts_url = "http://172.21.23.6:9009/"
name = "qiime"
owner = "iuc"
tool_panel_section_id = "qiime_rRNA_taxonomic_assignation"
gi = galaxy.GalaxyInstance(url=gi_url, key='8a099e97b0a83c73ead9f5b0fe19f4be')
ts = toolshed.ToolShedInstance(url=ts_url)
changeset_revision = str(ts.repositories.get_ordered_installable_revisions(name,
owner)[-1])
gi.toolShed.install_repository_revision(ts_url, name, owner, changeset_revision,
install_tool_dependencies=True, install_repository_dependencies=True,
tool_panel_section_id=tool_panel_section_id)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
import re
from bioblend import galaxy
from bioblend import toolshed
def retrieve_changeset_revision(ts_url, name, owner):
ts = toolshed.ToolShedInstance(url=ts_url)
ts_repositories = ts.repositories.get_repositories()
ts_id = None
for repo in ts_repositories:
if str(repo['name']) == name and str(repo['owner']) == owner:
ts_id = repo['id']
if ts_id == None:
string = "No repository found for " + name + " (" + owner + ")"
string += " in toolshed at " + ts_url
raise ValueError(string)
return ts.repositories.show_repository_revision(ts_id)['changeset_revision']
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gi_url', required=True)
parser.add_argument('--ts_url', required=True)
parser.add_argument('--api_key', required=True)
parser.add_argument('--tool_owner', required=True)
parser.add_argument('--tool_name', required=True)
parser.add_argument('--tool_panel_section_id', required=True)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.gi_url, key=args.api_key)
changeset_revision = retrieve_changeset_revision(args.ts_url, args.tool_name,
args.tool_owner)
print changeset_revision
#gi.toolShed.install_repository_revision(ts_url, args.tool_name, args.tool_owner,
# changeset_revision, install_tool_dependencies=True,
# install_repository_dependencies=True,
# tool_panel_section_id=args.tool_panel_section_id)
|
Improve integrate tool wrapper with arguments
|
Improve integrate tool wrapper with arguments
|
Python
|
apache-2.0
|
ASaiM/framework,ASaiM/framework
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bioblend import galaxy
from bioblend import toolshed
if __name__ == '__main__':
gi_url = "http://172.21.23.6:8080/"
ts_url = "http://172.21.23.6:9009/"
name = "qiime"
owner = "iuc"
tool_panel_section_id = "qiime_rRNA_taxonomic_assignation"
gi = galaxy.GalaxyInstance(url=gi_url, key='8a099e97b0a83c73ead9f5b0fe19f4be')
ts = toolshed.ToolShedInstance(url=ts_url)
changeset_revision = str(ts.repositories.get_ordered_installable_revisions(name,
owner)[-1])
gi.toolShed.install_repository_revision(ts_url, name, owner, changeset_revision,
install_tool_dependencies=True, install_repository_dependencies=True,
tool_panel_section_id=tool_panel_section_id)
Improve integrate tool wrapper with arguments
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
import re
from bioblend import galaxy
from bioblend import toolshed
def retrieve_changeset_revision(ts_url, name, owner):
ts = toolshed.ToolShedInstance(url=ts_url)
ts_repositories = ts.repositories.get_repositories()
ts_id = None
for repo in ts_repositories:
if str(repo['name']) == name and str(repo['owner']) == owner:
ts_id = repo['id']
if ts_id == None:
string = "No repository found for " + name + " (" + owner + ")"
string += " in toolshed at " + ts_url
raise ValueError(string)
return ts.repositories.show_repository_revision(ts_id)['changeset_revision']
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gi_url', required=True)
parser.add_argument('--ts_url', required=True)
parser.add_argument('--api_key', required=True)
parser.add_argument('--tool_owner', required=True)
parser.add_argument('--tool_name', required=True)
parser.add_argument('--tool_panel_section_id', required=True)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.gi_url, key=args.api_key)
changeset_revision = retrieve_changeset_revision(args.ts_url, args.tool_name,
args.tool_owner)
print changeset_revision
#gi.toolShed.install_repository_revision(ts_url, args.tool_name, args.tool_owner,
# changeset_revision, install_tool_dependencies=True,
# install_repository_dependencies=True,
# tool_panel_section_id=args.tool_panel_section_id)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bioblend import galaxy
from bioblend import toolshed
if __name__ == '__main__':
gi_url = "http://172.21.23.6:8080/"
ts_url = "http://172.21.23.6:9009/"
name = "qiime"
owner = "iuc"
tool_panel_section_id = "qiime_rRNA_taxonomic_assignation"
gi = galaxy.GalaxyInstance(url=gi_url, key='8a099e97b0a83c73ead9f5b0fe19f4be')
ts = toolshed.ToolShedInstance(url=ts_url)
changeset_revision = str(ts.repositories.get_ordered_installable_revisions(name,
owner)[-1])
gi.toolShed.install_repository_revision(ts_url, name, owner, changeset_revision,
install_tool_dependencies=True, install_repository_dependencies=True,
tool_panel_section_id=tool_panel_section_id)
<commit_msg>Improve integrate tool wrapper with arguments<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
import re
from bioblend import galaxy
from bioblend import toolshed
def retrieve_changeset_revision(ts_url, name, owner):
ts = toolshed.ToolShedInstance(url=ts_url)
ts_repositories = ts.repositories.get_repositories()
ts_id = None
for repo in ts_repositories:
if str(repo['name']) == name and str(repo['owner']) == owner:
ts_id = repo['id']
if ts_id == None:
string = "No repository found for " + name + " (" + owner + ")"
string += " in toolshed at " + ts_url
raise ValueError(string)
return ts.repositories.show_repository_revision(ts_id)['changeset_revision']
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gi_url', required=True)
parser.add_argument('--ts_url', required=True)
parser.add_argument('--api_key', required=True)
parser.add_argument('--tool_owner', required=True)
parser.add_argument('--tool_name', required=True)
parser.add_argument('--tool_panel_section_id', required=True)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.gi_url, key=args.api_key)
changeset_revision = retrieve_changeset_revision(args.ts_url, args.tool_name,
args.tool_owner)
print changeset_revision
#gi.toolShed.install_repository_revision(ts_url, args.tool_name, args.tool_owner,
# changeset_revision, install_tool_dependencies=True,
# install_repository_dependencies=True,
# tool_panel_section_id=args.tool_panel_section_id)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bioblend import galaxy
from bioblend import toolshed
if __name__ == '__main__':
gi_url = "http://172.21.23.6:8080/"
ts_url = "http://172.21.23.6:9009/"
name = "qiime"
owner = "iuc"
tool_panel_section_id = "qiime_rRNA_taxonomic_assignation"
gi = galaxy.GalaxyInstance(url=gi_url, key='8a099e97b0a83c73ead9f5b0fe19f4be')
ts = toolshed.ToolShedInstance(url=ts_url)
changeset_revision = str(ts.repositories.get_ordered_installable_revisions(name,
owner)[-1])
gi.toolShed.install_repository_revision(ts_url, name, owner, changeset_revision,
install_tool_dependencies=True, install_repository_dependencies=True,
tool_panel_section_id=tool_panel_section_id)
Improve integrate tool wrapper with arguments#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
import re
from bioblend import galaxy
from bioblend import toolshed
def retrieve_changeset_revision(ts_url, name, owner):
ts = toolshed.ToolShedInstance(url=ts_url)
ts_repositories = ts.repositories.get_repositories()
ts_id = None
for repo in ts_repositories:
if str(repo['name']) == name and str(repo['owner']) == owner:
ts_id = repo['id']
if ts_id == None:
string = "No repository found for " + name + " (" + owner + ")"
string += " in toolshed at " + ts_url
raise ValueError(string)
return ts.repositories.show_repository_revision(ts_id)['changeset_revision']
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gi_url', required=True)
parser.add_argument('--ts_url', required=True)
parser.add_argument('--api_key', required=True)
parser.add_argument('--tool_owner', required=True)
parser.add_argument('--tool_name', required=True)
parser.add_argument('--tool_panel_section_id', required=True)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.gi_url, key=args.api_key)
changeset_revision = retrieve_changeset_revision(args.ts_url, args.tool_name,
args.tool_owner)
print changeset_revision
#gi.toolShed.install_repository_revision(ts_url, args.tool_name, args.tool_owner,
# changeset_revision, install_tool_dependencies=True,
# install_repository_dependencies=True,
# tool_panel_section_id=args.tool_panel_section_id)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bioblend import galaxy
from bioblend import toolshed
if __name__ == '__main__':
gi_url = "http://172.21.23.6:8080/"
ts_url = "http://172.21.23.6:9009/"
name = "qiime"
owner = "iuc"
tool_panel_section_id = "qiime_rRNA_taxonomic_assignation"
gi = galaxy.GalaxyInstance(url=gi_url, key='8a099e97b0a83c73ead9f5b0fe19f4be')
ts = toolshed.ToolShedInstance(url=ts_url)
changeset_revision = str(ts.repositories.get_ordered_installable_revisions(name,
owner)[-1])
gi.toolShed.install_repository_revision(ts_url, name, owner, changeset_revision,
install_tool_dependencies=True, install_repository_dependencies=True,
tool_panel_section_id=tool_panel_section_id)
<commit_msg>Improve integrate tool wrapper with arguments<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
import re
from bioblend import galaxy
from bioblend import toolshed
def retrieve_changeset_revision(ts_url, name, owner):
ts = toolshed.ToolShedInstance(url=ts_url)
ts_repositories = ts.repositories.get_repositories()
ts_id = None
for repo in ts_repositories:
if str(repo['name']) == name and str(repo['owner']) == owner:
ts_id = repo['id']
if ts_id == None:
string = "No repository found for " + name + " (" + owner + ")"
string += " in toolshed at " + ts_url
raise ValueError(string)
return ts.repositories.show_repository_revision(ts_id)['changeset_revision']
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gi_url', required=True)
parser.add_argument('--ts_url', required=True)
parser.add_argument('--api_key', required=True)
parser.add_argument('--tool_owner', required=True)
parser.add_argument('--tool_name', required=True)
parser.add_argument('--tool_panel_section_id', required=True)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.gi_url, key=args.api_key)
changeset_revision = retrieve_changeset_revision(args.ts_url, args.tool_name,
args.tool_owner)
print changeset_revision
#gi.toolShed.install_repository_revision(ts_url, args.tool_name, args.tool_owner,
# changeset_revision, install_tool_dependencies=True,
# install_repository_dependencies=True,
# tool_panel_section_id=args.tool_panel_section_id)
|
310d7043666726d503dc80894b072d3a7ae29f16
|
html_snapshots/utils.py
|
html_snapshots/utils.py
|
import rmc.shared.constants as c
import rmc.models as m
import mongoengine as me
import os
FILE_DIR = os.path.dirname(os.path.realpath(__file__))
HTML_DIR = os.path.join(FILE_DIR, 'html')
me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT)
def write(file_path, content):
ensure_dir(file_path)
with open(file_path, 'w') as f:
f.write(content)
def ensure_dir(file_path):
d = os.path.dirname(file_path)
if not os.path.exists(d):
os.makedirs(d)
def generate_urls():
urls = []
# Home page
urls.append('/')
# Course pages
for course in m.Course.objects:
course_id = course.id
urls.append('course/' + course_id)
return urls
|
import rmc.shared.constants as c
import rmc.models as m
import mongoengine as me
import os
FILE_DIR = os.path.dirname(os.path.realpath(__file__))
HTML_DIR = os.path.join(FILE_DIR, 'html')
me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT)
def write(file_path, content):
ensure_dir(file_path)
with open(file_path, 'w') as f:
f.write(content)
def ensure_dir(file_path):
d = os.path.dirname(file_path)
if not os.path.exists(d):
os.makedirs(d)
def generate_urls():
urls = []
# Home page
urls.append('')
# Course pages
for course in m.Course.objects:
course_id = course.id
urls.append('course/' + course_id)
return urls
|
Fix url path for homepage for sitemap
|
Fix url path for homepage for sitemap
|
Python
|
mit
|
rageandqq/rmc,ccqi/rmc,rageandqq/rmc,sachdevs/rmc,UWFlow/rmc,shakilkanji/rmc,sachdevs/rmc,duaayousif/rmc,MichalKononenko/rmc,duaayousif/rmc,sachdevs/rmc,JGulbronson/rmc,shakilkanji/rmc,UWFlow/rmc,JGulbronson/rmc,rageandqq/rmc,sachdevs/rmc,UWFlow/rmc,rageandqq/rmc,shakilkanji/rmc,shakilkanji/rmc,UWFlow/rmc,UWFlow/rmc,JGulbronson/rmc,MichalKononenko/rmc,JGulbronson/rmc,duaayousif/rmc,MichalKononenko/rmc,ccqi/rmc,ccqi/rmc,JGulbronson/rmc,ccqi/rmc,duaayousif/rmc,MichalKononenko/rmc,ccqi/rmc,sachdevs/rmc,rageandqq/rmc,shakilkanji/rmc,duaayousif/rmc,MichalKononenko/rmc
|
import rmc.shared.constants as c
import rmc.models as m
import mongoengine as me
import os
FILE_DIR = os.path.dirname(os.path.realpath(__file__))
HTML_DIR = os.path.join(FILE_DIR, 'html')
me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT)
def write(file_path, content):
ensure_dir(file_path)
with open(file_path, 'w') as f:
f.write(content)
def ensure_dir(file_path):
d = os.path.dirname(file_path)
if not os.path.exists(d):
os.makedirs(d)
def generate_urls():
urls = []
# Home page
urls.append('/')
# Course pages
for course in m.Course.objects:
course_id = course.id
urls.append('course/' + course_id)
return urls
Fix url path for homepage for sitemap
|
import rmc.shared.constants as c
import rmc.models as m
import mongoengine as me
import os
FILE_DIR = os.path.dirname(os.path.realpath(__file__))
HTML_DIR = os.path.join(FILE_DIR, 'html')
me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT)
def write(file_path, content):
ensure_dir(file_path)
with open(file_path, 'w') as f:
f.write(content)
def ensure_dir(file_path):
d = os.path.dirname(file_path)
if not os.path.exists(d):
os.makedirs(d)
def generate_urls():
urls = []
# Home page
urls.append('')
# Course pages
for course in m.Course.objects:
course_id = course.id
urls.append('course/' + course_id)
return urls
|
<commit_before>import rmc.shared.constants as c
import rmc.models as m
import mongoengine as me
import os
FILE_DIR = os.path.dirname(os.path.realpath(__file__))
HTML_DIR = os.path.join(FILE_DIR, 'html')
me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT)
def write(file_path, content):
ensure_dir(file_path)
with open(file_path, 'w') as f:
f.write(content)
def ensure_dir(file_path):
d = os.path.dirname(file_path)
if not os.path.exists(d):
os.makedirs(d)
def generate_urls():
urls = []
# Home page
urls.append('/')
# Course pages
for course in m.Course.objects:
course_id = course.id
urls.append('course/' + course_id)
return urls
<commit_msg>Fix url path for homepage for sitemap<commit_after>
|
import rmc.shared.constants as c
import rmc.models as m
import mongoengine as me
import os
FILE_DIR = os.path.dirname(os.path.realpath(__file__))
HTML_DIR = os.path.join(FILE_DIR, 'html')
me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT)
def write(file_path, content):
ensure_dir(file_path)
with open(file_path, 'w') as f:
f.write(content)
def ensure_dir(file_path):
d = os.path.dirname(file_path)
if not os.path.exists(d):
os.makedirs(d)
def generate_urls():
urls = []
# Home page
urls.append('')
# Course pages
for course in m.Course.objects:
course_id = course.id
urls.append('course/' + course_id)
return urls
|
import rmc.shared.constants as c
import rmc.models as m
import mongoengine as me
import os
FILE_DIR = os.path.dirname(os.path.realpath(__file__))
HTML_DIR = os.path.join(FILE_DIR, 'html')
me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT)
def write(file_path, content):
ensure_dir(file_path)
with open(file_path, 'w') as f:
f.write(content)
def ensure_dir(file_path):
d = os.path.dirname(file_path)
if not os.path.exists(d):
os.makedirs(d)
def generate_urls():
urls = []
# Home page
urls.append('/')
# Course pages
for course in m.Course.objects:
course_id = course.id
urls.append('course/' + course_id)
return urls
Fix url path for homepage for sitemapimport rmc.shared.constants as c
import rmc.models as m
import mongoengine as me
import os
FILE_DIR = os.path.dirname(os.path.realpath(__file__))
HTML_DIR = os.path.join(FILE_DIR, 'html')
me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT)
def write(file_path, content):
ensure_dir(file_path)
with open(file_path, 'w') as f:
f.write(content)
def ensure_dir(file_path):
d = os.path.dirname(file_path)
if not os.path.exists(d):
os.makedirs(d)
def generate_urls():
urls = []
# Home page
urls.append('')
# Course pages
for course in m.Course.objects:
course_id = course.id
urls.append('course/' + course_id)
return urls
|
<commit_before>import rmc.shared.constants as c
import rmc.models as m
import mongoengine as me
import os
FILE_DIR = os.path.dirname(os.path.realpath(__file__))
HTML_DIR = os.path.join(FILE_DIR, 'html')
me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT)
def write(file_path, content):
ensure_dir(file_path)
with open(file_path, 'w') as f:
f.write(content)
def ensure_dir(file_path):
d = os.path.dirname(file_path)
if not os.path.exists(d):
os.makedirs(d)
def generate_urls():
urls = []
# Home page
urls.append('/')
# Course pages
for course in m.Course.objects:
course_id = course.id
urls.append('course/' + course_id)
return urls
<commit_msg>Fix url path for homepage for sitemap<commit_after>import rmc.shared.constants as c
import rmc.models as m
import mongoengine as me
import os
FILE_DIR = os.path.dirname(os.path.realpath(__file__))
HTML_DIR = os.path.join(FILE_DIR, 'html')
me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT)
def write(file_path, content):
ensure_dir(file_path)
with open(file_path, 'w') as f:
f.write(content)
def ensure_dir(file_path):
d = os.path.dirname(file_path)
if not os.path.exists(d):
os.makedirs(d)
def generate_urls():
urls = []
# Home page
urls.append('')
# Course pages
for course in m.Course.objects:
course_id = course.id
urls.append('course/' + course_id)
return urls
|
90fa23d1d1b2497d65507b7930323b118f512a25
|
disco_aws_automation/disco_acm.py
|
disco_aws_automation/disco_acm.py
|
"""
Some code to manage the Amazon Certificate Service.
"""
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except (botocore.exceptions.EndpointConnectionError, botocore.vendored.requests.exceptions.ConnectionError):
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
"""
Some code to manage the Amazon Certificate Service.
"""
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except botocore.exceptions.EndpointConnectionError:
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
Revert "Swallow proxy exception from requests"
|
Revert "Swallow proxy exception from requests"
This reverts commit 8d9ccbb2bbde7c2f8dbe60b90f730d87b924d86e.
|
Python
|
bsd-2-clause
|
amplifylitco/asiaq,amplifylitco/asiaq,amplifylitco/asiaq
|
"""
Some code to manage the Amazon Certificate Service.
"""
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except (botocore.exceptions.EndpointConnectionError, botocore.vendored.requests.exceptions.ConnectionError):
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
Revert "Swallow proxy exception from requests"
This reverts commit 8d9ccbb2bbde7c2f8dbe60b90f730d87b924d86e.
|
"""
Some code to manage the Amazon Certificate Service.
"""
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except botocore.exceptions.EndpointConnectionError:
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
<commit_before>"""
Some code to manage the Amazon Certificate Service.
"""
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except (botocore.exceptions.EndpointConnectionError, botocore.vendored.requests.exceptions.ConnectionError):
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
<commit_msg>Revert "Swallow proxy exception from requests"
This reverts commit 8d9ccbb2bbde7c2f8dbe60b90f730d87b924d86e.<commit_after>
|
"""
Some code to manage the Amazon Certificate Service.
"""
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except botocore.exceptions.EndpointConnectionError:
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
"""
Some code to manage the Amazon Certificate Service.
"""
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except (botocore.exceptions.EndpointConnectionError, botocore.vendored.requests.exceptions.ConnectionError):
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
Revert "Swallow proxy exception from requests"
This reverts commit 8d9ccbb2bbde7c2f8dbe60b90f730d87b924d86e."""
Some code to manage the Amazon Certificate Service.
"""
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except botocore.exceptions.EndpointConnectionError:
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
<commit_before>"""
Some code to manage the Amazon Certificate Service.
"""
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except (botocore.exceptions.EndpointConnectionError, botocore.vendored.requests.exceptions.ConnectionError):
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
<commit_msg>Revert "Swallow proxy exception from requests"
This reverts commit 8d9ccbb2bbde7c2f8dbe60b90f730d87b924d86e.<commit_after>"""
Some code to manage the Amazon Certificate Service.
"""
import logging
import boto3
import botocore
class DiscoACM(object):
"""
A class to manage the Amazon Certificate Service
"""
def __init__(self, connection=None):
self._acm = connection
@property
def acm(self):
"""
Lazily creates ACM connection
NOTE!!! As of 2016-02-11 ACM is not supported outside the us-east-1 region.
Return None if service does not exist in current region
"""
if not self._acm:
try:
self._acm = boto3.client('acm', region_name='us-east-1')
except Exception:
logging.warning("ACM service does not exist in current region")
return None
return self._acm
def get_certificate_arn(self, dns_name):
"""Returns a Certificate ARN from the Amazon Certificate Service given the DNS name"""
if not self.acm:
return None
try:
certs = self.acm.list_certificates()["CertificateSummaryList"]
cert = [cert['CertificateArn'] for cert in certs if cert['DomainName'] == dns_name]
return cert[0] if cert else None
except botocore.exceptions.EndpointConnectionError:
# some versions of botocore(1.3.26) will try to connect to acm even if outside us-east-1
return None
|
e3c12bd54e143086dd332a51195e4eb3f7305201
|
exercise3.py
|
exercise3.py
|
#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Inputs:
Expected Outputs:
Errors:
"""
diagnose_car()
|
#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Inputs:
Expected Outputs:
Errors:
"""
# First Trouble shooting Question
print("Troubleshooting Car Issues")
print("For all questions answer y for Yes or n for No")
# First Question is Yes
question1 = raw_input("Is the car silent when you turn the key?")
if question1 == "y":
question2 = raw_input("Are the battery terminals corroded?")
if question2 == "y":
print("Clean terminals and try starting again")
elif question2 == "n":
print("Replace cables and try again!")
else:
print("Please select y or n only, Try again!")
diagnose_car()
|
Update - First question yes, small coding done
|
Update - First question yes, small coding done
|
Python
|
mit
|
xueshen3/inf1340_2015_asst1
|
#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Inputs:
Expected Outputs:
Errors:
"""
diagnose_car()Update - First question yes, small coding done
|
#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Inputs:
Expected Outputs:
Errors:
"""
# First Trouble shooting Question
print("Troubleshooting Car Issues")
print("For all questions answer y for Yes or n for No")
# First Question is Yes
question1 = raw_input("Is the car silent when you turn the key?")
if question1 == "y":
question2 = raw_input("Are the battery terminals corroded?")
if question2 == "y":
print("Clean terminals and try starting again")
elif question2 == "n":
print("Replace cables and try again!")
else:
print("Please select y or n only, Try again!")
diagnose_car()
|
<commit_before>#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Inputs:
Expected Outputs:
Errors:
"""
diagnose_car()<commit_msg>Update - First question yes, small coding done<commit_after>
|
#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Inputs:
Expected Outputs:
Errors:
"""
# First Trouble shooting Question
print("Troubleshooting Car Issues")
print("For all questions answer y for Yes or n for No")
# First Question is Yes
question1 = raw_input("Is the car silent when you turn the key?")
if question1 == "y":
question2 = raw_input("Are the battery terminals corroded?")
if question2 == "y":
print("Clean terminals and try starting again")
elif question2 == "n":
print("Replace cables and try again!")
else:
print("Please select y or n only, Try again!")
diagnose_car()
|
#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Inputs:
Expected Outputs:
Errors:
"""
diagnose_car()Update - First question yes, small coding done#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Inputs:
Expected Outputs:
Errors:
"""
# First Trouble shooting Question
print("Troubleshooting Car Issues")
print("For all questions answer y for Yes or n for No")
# First Question is Yes
question1 = raw_input("Is the car silent when you turn the key?")
if question1 == "y":
question2 = raw_input("Are the battery terminals corroded?")
if question2 == "y":
print("Clean terminals and try starting again")
elif question2 == "n":
print("Replace cables and try again!")
else:
print("Please select y or n only, Try again!")
diagnose_car()
|
<commit_before>#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Inputs:
Expected Outputs:
Errors:
"""
diagnose_car()<commit_msg>Update - First question yes, small coding done<commit_after>#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Inputs:
Expected Outputs:
Errors:
"""
# First Trouble shooting Question
print("Troubleshooting Car Issues")
print("For all questions answer y for Yes or n for No")
# First Question is Yes
question1 = raw_input("Is the car silent when you turn the key?")
if question1 == "y":
question2 = raw_input("Are the battery terminals corroded?")
if question2 == "y":
print("Clean terminals and try starting again")
elif question2 == "n":
print("Replace cables and try again!")
else:
print("Please select y or n only, Try again!")
diagnose_car()
|
b6afc5f1db5c416fde43567623161bbe2244897b
|
docs/conf.py
|
docs/conf.py
|
#!/usr/bin/env python3
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.15"
release = "0.15"
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": [
"sidebarlogo.html",
"stats.html",
"globaltoc.html",
"relations.html",
"updates.html",
"links.html",
"searchbox.html",
"gitter_sidecar.html",
]
}
html_theme_options = {
"show_powered_by": False,
"show_related": True,
"description": "Dependency Injection for Humans. It provides a simple low-impact implementation of an IoC container and resolution support for your classes.",
"github_user": "dry-python",
"github_repo": "dependencies",
"github_type": "star",
"github_count": True,
"github_banner": True,
}
|
#!/usr/bin/env python3
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.15"
release = "0.15"
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": [
"sidebarlogo.html",
"stats.html",
"globaltoc.html",
"relations.html",
"updates.html",
"links.html",
"searchbox.html",
"gitter_sidecar.html",
]
}
html_theme_options = {
"show_powered_by": False,
"show_related": True,
"show_relbars": True,
"description": "Dependency Injection for Humans. It provides a simple low-impact implementation of an IoC container and resolution support for your classes.",
"github_user": "dry-python",
"github_repo": "dependencies",
"github_type": "star",
"github_count": True,
"github_banner": True,
}
|
Add Next/Previous page links to the docs.
|
Add Next/Previous page links to the docs.
|
Python
|
bsd-2-clause
|
proofit404/dependencies,proofit404/dependencies,proofit404/dependencies,proofit404/dependencies
|
#!/usr/bin/env python3
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.15"
release = "0.15"
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": [
"sidebarlogo.html",
"stats.html",
"globaltoc.html",
"relations.html",
"updates.html",
"links.html",
"searchbox.html",
"gitter_sidecar.html",
]
}
html_theme_options = {
"show_powered_by": False,
"show_related": True,
"description": "Dependency Injection for Humans. It provides a simple low-impact implementation of an IoC container and resolution support for your classes.",
"github_user": "dry-python",
"github_repo": "dependencies",
"github_type": "star",
"github_count": True,
"github_banner": True,
}
Add Next/Previous page links to the docs.
|
#!/usr/bin/env python3
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.15"
release = "0.15"
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": [
"sidebarlogo.html",
"stats.html",
"globaltoc.html",
"relations.html",
"updates.html",
"links.html",
"searchbox.html",
"gitter_sidecar.html",
]
}
html_theme_options = {
"show_powered_by": False,
"show_related": True,
"show_relbars": True,
"description": "Dependency Injection for Humans. It provides a simple low-impact implementation of an IoC container and resolution support for your classes.",
"github_user": "dry-python",
"github_repo": "dependencies",
"github_type": "star",
"github_count": True,
"github_banner": True,
}
|
<commit_before>#!/usr/bin/env python3
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.15"
release = "0.15"
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": [
"sidebarlogo.html",
"stats.html",
"globaltoc.html",
"relations.html",
"updates.html",
"links.html",
"searchbox.html",
"gitter_sidecar.html",
]
}
html_theme_options = {
"show_powered_by": False,
"show_related": True,
"description": "Dependency Injection for Humans. It provides a simple low-impact implementation of an IoC container and resolution support for your classes.",
"github_user": "dry-python",
"github_repo": "dependencies",
"github_type": "star",
"github_count": True,
"github_banner": True,
}
<commit_msg>Add Next/Previous page links to the docs.<commit_after>
|
#!/usr/bin/env python3
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.15"
release = "0.15"
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": [
"sidebarlogo.html",
"stats.html",
"globaltoc.html",
"relations.html",
"updates.html",
"links.html",
"searchbox.html",
"gitter_sidecar.html",
]
}
html_theme_options = {
"show_powered_by": False,
"show_related": True,
"show_relbars": True,
"description": "Dependency Injection for Humans. It provides a simple low-impact implementation of an IoC container and resolution support for your classes.",
"github_user": "dry-python",
"github_repo": "dependencies",
"github_type": "star",
"github_count": True,
"github_banner": True,
}
|
#!/usr/bin/env python3
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.15"
release = "0.15"
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": [
"sidebarlogo.html",
"stats.html",
"globaltoc.html",
"relations.html",
"updates.html",
"links.html",
"searchbox.html",
"gitter_sidecar.html",
]
}
html_theme_options = {
"show_powered_by": False,
"show_related": True,
"description": "Dependency Injection for Humans. It provides a simple low-impact implementation of an IoC container and resolution support for your classes.",
"github_user": "dry-python",
"github_repo": "dependencies",
"github_type": "star",
"github_count": True,
"github_banner": True,
}
Add Next/Previous page links to the docs.#!/usr/bin/env python3
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.15"
release = "0.15"
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": [
"sidebarlogo.html",
"stats.html",
"globaltoc.html",
"relations.html",
"updates.html",
"links.html",
"searchbox.html",
"gitter_sidecar.html",
]
}
html_theme_options = {
"show_powered_by": False,
"show_related": True,
"show_relbars": True,
"description": "Dependency Injection for Humans. It provides a simple low-impact implementation of an IoC container and resolution support for your classes.",
"github_user": "dry-python",
"github_repo": "dependencies",
"github_type": "star",
"github_count": True,
"github_banner": True,
}
|
<commit_before>#!/usr/bin/env python3
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.15"
release = "0.15"
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": [
"sidebarlogo.html",
"stats.html",
"globaltoc.html",
"relations.html",
"updates.html",
"links.html",
"searchbox.html",
"gitter_sidecar.html",
]
}
html_theme_options = {
"show_powered_by": False,
"show_related": True,
"description": "Dependency Injection for Humans. It provides a simple low-impact implementation of an IoC container and resolution support for your classes.",
"github_user": "dry-python",
"github_repo": "dependencies",
"github_type": "star",
"github_count": True,
"github_banner": True,
}
<commit_msg>Add Next/Previous page links to the docs.<commit_after>#!/usr/bin/env python3
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.15"
release = "0.15"
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": [
"sidebarlogo.html",
"stats.html",
"globaltoc.html",
"relations.html",
"updates.html",
"links.html",
"searchbox.html",
"gitter_sidecar.html",
]
}
html_theme_options = {
"show_powered_by": False,
"show_related": True,
"show_relbars": True,
"description": "Dependency Injection for Humans. It provides a simple low-impact implementation of an IoC container and resolution support for your classes.",
"github_user": "dry-python",
"github_repo": "dependencies",
"github_type": "star",
"github_count": True,
"github_banner": True,
}
|
ccc98ced56ee8dda02332720c7146e1548a3b53c
|
project/project/urls.py
|
project/project/urls.py
|
"""
project URL Configuration
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url('^accounts/social/', include('allauth.socialaccount.urls')),
url('^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
|
"""
project URL Configuration
"""
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/login/$', RedirectView.as_view(url=settings.LOGIN_URL),
name='account_login'),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url(r'^accounts/social/', include('allauth.socialaccount.urls')),
url(r'^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
|
Set up redirect to login view
|
Set up redirect to login view
|
Python
|
mit
|
jonsimington/app,compsci-hfh/app,compsci-hfh/app,jonsimington/app
|
"""
project URL Configuration
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url('^accounts/social/', include('allauth.socialaccount.urls')),
url('^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
Set up redirect to login view
|
"""
project URL Configuration
"""
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/login/$', RedirectView.as_view(url=settings.LOGIN_URL),
name='account_login'),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url(r'^accounts/social/', include('allauth.socialaccount.urls')),
url(r'^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
|
<commit_before>"""
project URL Configuration
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url('^accounts/social/', include('allauth.socialaccount.urls')),
url('^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
<commit_msg>Set up redirect to login view<commit_after>
|
"""
project URL Configuration
"""
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/login/$', RedirectView.as_view(url=settings.LOGIN_URL),
name='account_login'),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url(r'^accounts/social/', include('allauth.socialaccount.urls')),
url(r'^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
|
"""
project URL Configuration
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url('^accounts/social/', include('allauth.socialaccount.urls')),
url('^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
Set up redirect to login view"""
project URL Configuration
"""
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/login/$', RedirectView.as_view(url=settings.LOGIN_URL),
name='account_login'),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url(r'^accounts/social/', include('allauth.socialaccount.urls')),
url(r'^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
|
<commit_before>"""
project URL Configuration
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url('^accounts/social/', include('allauth.socialaccount.urls')),
url('^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
<commit_msg>Set up redirect to login view<commit_after>"""
project URL Configuration
"""
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^accounts/login/$', RedirectView.as_view(url=settings.LOGIN_URL),
name='account_login'),
url(r'^accounts/logout/$', 'allauth.account.views.logout', name='account_logout'),
url(r'^accounts/social/', include('allauth.socialaccount.urls')),
url(r'^accounts/', include('allauth.socialaccount.providers.google.urls')),
url(r'^', include("project.teams.urls")),
url(r'^', include("project.profiles.urls")),
]
|
6c04c2dc0647f7103000aee2996ce243f7fe3535
|
thinc/tests/layers/test_hash_embed.py
|
thinc/tests/layers/test_hash_embed.py
|
import numpy
from thinc.api import HashEmbed
def test_init():
model = HashEmbed(64, 1000).initialize()
assert model.get_dim("nV") == 1000
assert model.get_dim("nO") == 64
assert model.get_param("E").shape == (1001, 64)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2).initialize()
model2 = HashEmbed(64, 1000, seed=1).initialize()
arr = numpy.ones((1,), dtype="uint64")
vector1 = model1.predict(arr)
vector2 = model2.predict(arr)
assert vector1.sum() != vector2.sum()
|
import numpy
from thinc.api import HashEmbed
def test_init():
model = HashEmbed(64, 1000).initialize()
assert model.get_dim("nV") == 1000
assert model.get_dim("nO") == 64
assert model.get_param("E").shape == (1000, 64)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2).initialize()
model2 = HashEmbed(64, 1000, seed=1).initialize()
arr = numpy.ones((1,), dtype="uint64")
vector1 = model1.predict(arr)
vector2 = model2.predict(arr)
assert vector1.sum() != vector2.sum()
|
Fix off-by-one in HashEmbed test
|
Fix off-by-one in HashEmbed test
|
Python
|
mit
|
explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc
|
import numpy
from thinc.api import HashEmbed
def test_init():
model = HashEmbed(64, 1000).initialize()
assert model.get_dim("nV") == 1000
assert model.get_dim("nO") == 64
assert model.get_param("E").shape == (1001, 64)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2).initialize()
model2 = HashEmbed(64, 1000, seed=1).initialize()
arr = numpy.ones((1,), dtype="uint64")
vector1 = model1.predict(arr)
vector2 = model2.predict(arr)
assert vector1.sum() != vector2.sum()
Fix off-by-one in HashEmbed test
|
import numpy
from thinc.api import HashEmbed
def test_init():
model = HashEmbed(64, 1000).initialize()
assert model.get_dim("nV") == 1000
assert model.get_dim("nO") == 64
assert model.get_param("E").shape == (1000, 64)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2).initialize()
model2 = HashEmbed(64, 1000, seed=1).initialize()
arr = numpy.ones((1,), dtype="uint64")
vector1 = model1.predict(arr)
vector2 = model2.predict(arr)
assert vector1.sum() != vector2.sum()
|
<commit_before>import numpy
from thinc.api import HashEmbed
def test_init():
model = HashEmbed(64, 1000).initialize()
assert model.get_dim("nV") == 1000
assert model.get_dim("nO") == 64
assert model.get_param("E").shape == (1001, 64)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2).initialize()
model2 = HashEmbed(64, 1000, seed=1).initialize()
arr = numpy.ones((1,), dtype="uint64")
vector1 = model1.predict(arr)
vector2 = model2.predict(arr)
assert vector1.sum() != vector2.sum()
<commit_msg>Fix off-by-one in HashEmbed test<commit_after>
|
import numpy
from thinc.api import HashEmbed
def test_init():
model = HashEmbed(64, 1000).initialize()
assert model.get_dim("nV") == 1000
assert model.get_dim("nO") == 64
assert model.get_param("E").shape == (1000, 64)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2).initialize()
model2 = HashEmbed(64, 1000, seed=1).initialize()
arr = numpy.ones((1,), dtype="uint64")
vector1 = model1.predict(arr)
vector2 = model2.predict(arr)
assert vector1.sum() != vector2.sum()
|
import numpy
from thinc.api import HashEmbed
def test_init():
model = HashEmbed(64, 1000).initialize()
assert model.get_dim("nV") == 1000
assert model.get_dim("nO") == 64
assert model.get_param("E").shape == (1001, 64)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2).initialize()
model2 = HashEmbed(64, 1000, seed=1).initialize()
arr = numpy.ones((1,), dtype="uint64")
vector1 = model1.predict(arr)
vector2 = model2.predict(arr)
assert vector1.sum() != vector2.sum()
Fix off-by-one in HashEmbed testimport numpy
from thinc.api import HashEmbed
def test_init():
model = HashEmbed(64, 1000).initialize()
assert model.get_dim("nV") == 1000
assert model.get_dim("nO") == 64
assert model.get_param("E").shape == (1000, 64)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2).initialize()
model2 = HashEmbed(64, 1000, seed=1).initialize()
arr = numpy.ones((1,), dtype="uint64")
vector1 = model1.predict(arr)
vector2 = model2.predict(arr)
assert vector1.sum() != vector2.sum()
|
<commit_before>import numpy
from thinc.api import HashEmbed
def test_init():
model = HashEmbed(64, 1000).initialize()
assert model.get_dim("nV") == 1000
assert model.get_dim("nO") == 64
assert model.get_param("E").shape == (1001, 64)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2).initialize()
model2 = HashEmbed(64, 1000, seed=1).initialize()
arr = numpy.ones((1,), dtype="uint64")
vector1 = model1.predict(arr)
vector2 = model2.predict(arr)
assert vector1.sum() != vector2.sum()
<commit_msg>Fix off-by-one in HashEmbed test<commit_after>import numpy
from thinc.api import HashEmbed
def test_init():
model = HashEmbed(64, 1000).initialize()
assert model.get_dim("nV") == 1000
assert model.get_dim("nO") == 64
assert model.get_param("E").shape == (1000, 64)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2).initialize()
model2 = HashEmbed(64, 1000, seed=1).initialize()
arr = numpy.ones((1,), dtype="uint64")
vector1 = model1.predict(arr)
vector2 = model2.predict(arr)
assert vector1.sum() != vector2.sum()
|
5dc63d9c544f0335cd037bc2f6c0ce613e7783ea
|
gerrit/documentation.py
|
gerrit/documentation.py
|
# -*- coding: utf-8 -*-
URLS = {
}
class Documentation(object):
""" This class provide documentation-related methods
Documentation related REST endpoints:
https://gerrit-review.googlesource.com/Documentation/rest-api-documentation.html
"""
def __init__(self, gerrit):
self.gerrit = gerrit
self.gerrit.URLS.update(URLS)
|
# -*- coding: utf-8 -*-
URLS = {
'SEARCH': 'Documentation/?q=%(keyword)s',
}
class Documentation(object):
""" This class provide documentation-related methods
Documentation related REST endpoints:
https://gerrit-review.googlesource.com/Documentation/rest-api-documentation.html
"""
def __init__(self, gerrit):
self.gerrit = gerrit
self.gerrit.URLS.update(URLS)
def search(self, keyword):
url = self.gerrit.url('SEARCH', keyword=keyword)
r = Request(method='GET', url=url, auth=self.gerrit.auth)
return self.gerrit.dispatch(r)
|
Add methods for Documentation Endpoints
|
Add methods for Documentation Endpoints
Signed-off-by: Huang Yaming <ce2ec9fa26f071590d1a68b9e7447b51f2c76084@gmail.com>
|
Python
|
apache-2.0
|
yumminhuang/gerrit.py
|
# -*- coding: utf-8 -*-
URLS = {
}
class Documentation(object):
""" This class provide documentation-related methods
Documentation related REST endpoints:
https://gerrit-review.googlesource.com/Documentation/rest-api-documentation.html
"""
def __init__(self, gerrit):
self.gerrit = gerrit
self.gerrit.URLS.update(URLS)
Add methods for Documentation Endpoints
Signed-off-by: Huang Yaming <ce2ec9fa26f071590d1a68b9e7447b51f2c76084@gmail.com>
|
# -*- coding: utf-8 -*-
URLS = {
'SEARCH': 'Documentation/?q=%(keyword)s',
}
class Documentation(object):
""" This class provide documentation-related methods
Documentation related REST endpoints:
https://gerrit-review.googlesource.com/Documentation/rest-api-documentation.html
"""
def __init__(self, gerrit):
self.gerrit = gerrit
self.gerrit.URLS.update(URLS)
def search(self, keyword):
url = self.gerrit.url('SEARCH', keyword=keyword)
r = Request(method='GET', url=url, auth=self.gerrit.auth)
return self.gerrit.dispatch(r)
|
<commit_before># -*- coding: utf-8 -*-
URLS = {
}
class Documentation(object):
""" This class provide documentation-related methods
Documentation related REST endpoints:
https://gerrit-review.googlesource.com/Documentation/rest-api-documentation.html
"""
def __init__(self, gerrit):
self.gerrit = gerrit
self.gerrit.URLS.update(URLS)
<commit_msg>Add methods for Documentation Endpoints
Signed-off-by: Huang Yaming <ce2ec9fa26f071590d1a68b9e7447b51f2c76084@gmail.com><commit_after>
|
# -*- coding: utf-8 -*-
URLS = {
'SEARCH': 'Documentation/?q=%(keyword)s',
}
class Documentation(object):
""" This class provide documentation-related methods
Documentation related REST endpoints:
https://gerrit-review.googlesource.com/Documentation/rest-api-documentation.html
"""
def __init__(self, gerrit):
self.gerrit = gerrit
self.gerrit.URLS.update(URLS)
def search(self, keyword):
url = self.gerrit.url('SEARCH', keyword=keyword)
r = Request(method='GET', url=url, auth=self.gerrit.auth)
return self.gerrit.dispatch(r)
|
# -*- coding: utf-8 -*-
URLS = {
}
class Documentation(object):
""" This class provide documentation-related methods
Documentation related REST endpoints:
https://gerrit-review.googlesource.com/Documentation/rest-api-documentation.html
"""
def __init__(self, gerrit):
self.gerrit = gerrit
self.gerrit.URLS.update(URLS)
Add methods for Documentation Endpoints
Signed-off-by: Huang Yaming <ce2ec9fa26f071590d1a68b9e7447b51f2c76084@gmail.com># -*- coding: utf-8 -*-
URLS = {
'SEARCH': 'Documentation/?q=%(keyword)s',
}
class Documentation(object):
""" This class provide documentation-related methods
Documentation related REST endpoints:
https://gerrit-review.googlesource.com/Documentation/rest-api-documentation.html
"""
def __init__(self, gerrit):
self.gerrit = gerrit
self.gerrit.URLS.update(URLS)
def search(self, keyword):
url = self.gerrit.url('SEARCH', keyword=keyword)
r = Request(method='GET', url=url, auth=self.gerrit.auth)
return self.gerrit.dispatch(r)
|
<commit_before># -*- coding: utf-8 -*-
URLS = {
}
class Documentation(object):
""" This class provide documentation-related methods
Documentation related REST endpoints:
https://gerrit-review.googlesource.com/Documentation/rest-api-documentation.html
"""
def __init__(self, gerrit):
self.gerrit = gerrit
self.gerrit.URLS.update(URLS)
<commit_msg>Add methods for Documentation Endpoints
Signed-off-by: Huang Yaming <ce2ec9fa26f071590d1a68b9e7447b51f2c76084@gmail.com><commit_after># -*- coding: utf-8 -*-
URLS = {
'SEARCH': 'Documentation/?q=%(keyword)s',
}
class Documentation(object):
""" This class provide documentation-related methods
Documentation related REST endpoints:
https://gerrit-review.googlesource.com/Documentation/rest-api-documentation.html
"""
def __init__(self, gerrit):
self.gerrit = gerrit
self.gerrit.URLS.update(URLS)
def search(self, keyword):
url = self.gerrit.url('SEARCH', keyword=keyword)
r = Request(method='GET', url=url, auth=self.gerrit.auth)
return self.gerrit.dispatch(r)
|
c4df7c0de4cadffc665a353763f6d5cabada1b85
|
voicerecorder/settings.py
|
voicerecorder/settings.py
|
# -*- coding: utf-8 -*-
import os
import contextlib
from PyQt5 import QtCore
from . import __app_name__
from . import helperutils
def _qsettings_group_factory(settings: QtCore.QSettings):
@contextlib.contextmanager
def qsettings_group_context(group_name: str):
settings.beginGroup(group_name)
yield settings
settings.endGroup()
return qsettings_group_context
class SettingsMeta(type):
_instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(SettingsMeta, cls).__call__(*args, **kwargs)
return cls._instance
class Settings(metaclass=SettingsMeta):
"""Stores application settings
"""
def __init__(self, parent: QtCore.QObject = None):
self._filename = os.path.normpath(
os.path.join(helperutils.get_app_config_dir(), __app_name__ + '.ini'))
self._settings = QtCore.QSettings(self._filename, QtCore.QSettings.IniFormat, parent)
self._settings_group = _qsettings_group_factory(self._settings)
@property
def filename(self):
return self._filename
@property
def group(self):
return self._settings_group
|
# -*- coding: utf-8 -*-
import os
import contextlib
from PyQt5 import QtCore
from . import __app_name__
from . import helperutils
def _qsettings_group_factory(settings: QtCore.QSettings):
@contextlib.contextmanager
def qsettings_group_context(group_name: str):
settings.beginGroup(group_name)
yield settings
settings.endGroup()
return qsettings_group_context
class SettingsMeta(type):
_instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(SettingsMeta, cls).__call__(*args, **kwargs)
return cls._instance
class Settings(metaclass=SettingsMeta):
"""Stores application settings
"""
def __init__(self, parent: QtCore.QObject = None):
self._filename = os.path.normpath(
os.path.join(helperutils.get_app_config_dir(), __app_name__ + '.ini'))
self._settings = QtCore.QSettings(self._filename, QtCore.QSettings.IniFormat, parent)
self._settings_group = _qsettings_group_factory(self._settings)
@property
def filename(self):
return self._filename
@property
def s(self):
return self._settings
@property
def group(self):
return self._settings_group
|
Add "s" attr for QSettings
|
Add "s" attr for QSettings
|
Python
|
mit
|
espdev/VoiceRecorder
|
# -*- coding: utf-8 -*-
import os
import contextlib
from PyQt5 import QtCore
from . import __app_name__
from . import helperutils
def _qsettings_group_factory(settings: QtCore.QSettings):
@contextlib.contextmanager
def qsettings_group_context(group_name: str):
settings.beginGroup(group_name)
yield settings
settings.endGroup()
return qsettings_group_context
class SettingsMeta(type):
_instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(SettingsMeta, cls).__call__(*args, **kwargs)
return cls._instance
class Settings(metaclass=SettingsMeta):
"""Stores application settings
"""
def __init__(self, parent: QtCore.QObject = None):
self._filename = os.path.normpath(
os.path.join(helperutils.get_app_config_dir(), __app_name__ + '.ini'))
self._settings = QtCore.QSettings(self._filename, QtCore.QSettings.IniFormat, parent)
self._settings_group = _qsettings_group_factory(self._settings)
@property
def filename(self):
return self._filename
@property
def group(self):
return self._settings_group
Add "s" attr for QSettings
|
# -*- coding: utf-8 -*-
import os
import contextlib
from PyQt5 import QtCore
from . import __app_name__
from . import helperutils
def _qsettings_group_factory(settings: QtCore.QSettings):
@contextlib.contextmanager
def qsettings_group_context(group_name: str):
settings.beginGroup(group_name)
yield settings
settings.endGroup()
return qsettings_group_context
class SettingsMeta(type):
_instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(SettingsMeta, cls).__call__(*args, **kwargs)
return cls._instance
class Settings(metaclass=SettingsMeta):
"""Stores application settings
"""
def __init__(self, parent: QtCore.QObject = None):
self._filename = os.path.normpath(
os.path.join(helperutils.get_app_config_dir(), __app_name__ + '.ini'))
self._settings = QtCore.QSettings(self._filename, QtCore.QSettings.IniFormat, parent)
self._settings_group = _qsettings_group_factory(self._settings)
@property
def filename(self):
return self._filename
@property
def s(self):
return self._settings
@property
def group(self):
return self._settings_group
|
<commit_before># -*- coding: utf-8 -*-
import os
import contextlib
from PyQt5 import QtCore
from . import __app_name__
from . import helperutils
def _qsettings_group_factory(settings: QtCore.QSettings):
@contextlib.contextmanager
def qsettings_group_context(group_name: str):
settings.beginGroup(group_name)
yield settings
settings.endGroup()
return qsettings_group_context
class SettingsMeta(type):
_instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(SettingsMeta, cls).__call__(*args, **kwargs)
return cls._instance
class Settings(metaclass=SettingsMeta):
"""Stores application settings
"""
def __init__(self, parent: QtCore.QObject = None):
self._filename = os.path.normpath(
os.path.join(helperutils.get_app_config_dir(), __app_name__ + '.ini'))
self._settings = QtCore.QSettings(self._filename, QtCore.QSettings.IniFormat, parent)
self._settings_group = _qsettings_group_factory(self._settings)
@property
def filename(self):
return self._filename
@property
def group(self):
return self._settings_group
<commit_msg>Add "s" attr for QSettings<commit_after>
|
# -*- coding: utf-8 -*-
import os
import contextlib
from PyQt5 import QtCore
from . import __app_name__
from . import helperutils
def _qsettings_group_factory(settings: QtCore.QSettings):
@contextlib.contextmanager
def qsettings_group_context(group_name: str):
settings.beginGroup(group_name)
yield settings
settings.endGroup()
return qsettings_group_context
class SettingsMeta(type):
_instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(SettingsMeta, cls).__call__(*args, **kwargs)
return cls._instance
class Settings(metaclass=SettingsMeta):
"""Stores application settings
"""
def __init__(self, parent: QtCore.QObject = None):
self._filename = os.path.normpath(
os.path.join(helperutils.get_app_config_dir(), __app_name__ + '.ini'))
self._settings = QtCore.QSettings(self._filename, QtCore.QSettings.IniFormat, parent)
self._settings_group = _qsettings_group_factory(self._settings)
@property
def filename(self):
return self._filename
@property
def s(self):
return self._settings
@property
def group(self):
return self._settings_group
|
# -*- coding: utf-8 -*-
import os
import contextlib
from PyQt5 import QtCore
from . import __app_name__
from . import helperutils
def _qsettings_group_factory(settings: QtCore.QSettings):
@contextlib.contextmanager
def qsettings_group_context(group_name: str):
settings.beginGroup(group_name)
yield settings
settings.endGroup()
return qsettings_group_context
class SettingsMeta(type):
_instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(SettingsMeta, cls).__call__(*args, **kwargs)
return cls._instance
class Settings(metaclass=SettingsMeta):
"""Stores application settings
"""
def __init__(self, parent: QtCore.QObject = None):
self._filename = os.path.normpath(
os.path.join(helperutils.get_app_config_dir(), __app_name__ + '.ini'))
self._settings = QtCore.QSettings(self._filename, QtCore.QSettings.IniFormat, parent)
self._settings_group = _qsettings_group_factory(self._settings)
@property
def filename(self):
return self._filename
@property
def group(self):
return self._settings_group
Add "s" attr for QSettings# -*- coding: utf-8 -*-
import os
import contextlib
from PyQt5 import QtCore
from . import __app_name__
from . import helperutils
def _qsettings_group_factory(settings: QtCore.QSettings):
@contextlib.contextmanager
def qsettings_group_context(group_name: str):
settings.beginGroup(group_name)
yield settings
settings.endGroup()
return qsettings_group_context
class SettingsMeta(type):
_instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(SettingsMeta, cls).__call__(*args, **kwargs)
return cls._instance
class Settings(metaclass=SettingsMeta):
"""Stores application settings
"""
def __init__(self, parent: QtCore.QObject = None):
self._filename = os.path.normpath(
os.path.join(helperutils.get_app_config_dir(), __app_name__ + '.ini'))
self._settings = QtCore.QSettings(self._filename, QtCore.QSettings.IniFormat, parent)
self._settings_group = _qsettings_group_factory(self._settings)
@property
def filename(self):
return self._filename
@property
def s(self):
return self._settings
@property
def group(self):
return self._settings_group
|
<commit_before># -*- coding: utf-8 -*-
import os
import contextlib
from PyQt5 import QtCore
from . import __app_name__
from . import helperutils
def _qsettings_group_factory(settings: QtCore.QSettings):
@contextlib.contextmanager
def qsettings_group_context(group_name: str):
settings.beginGroup(group_name)
yield settings
settings.endGroup()
return qsettings_group_context
class SettingsMeta(type):
_instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(SettingsMeta, cls).__call__(*args, **kwargs)
return cls._instance
class Settings(metaclass=SettingsMeta):
"""Stores application settings
"""
def __init__(self, parent: QtCore.QObject = None):
self._filename = os.path.normpath(
os.path.join(helperutils.get_app_config_dir(), __app_name__ + '.ini'))
self._settings = QtCore.QSettings(self._filename, QtCore.QSettings.IniFormat, parent)
self._settings_group = _qsettings_group_factory(self._settings)
@property
def filename(self):
return self._filename
@property
def group(self):
return self._settings_group
<commit_msg>Add "s" attr for QSettings<commit_after># -*- coding: utf-8 -*-
import os
import contextlib
from PyQt5 import QtCore
from . import __app_name__
from . import helperutils
def _qsettings_group_factory(settings: QtCore.QSettings):
@contextlib.contextmanager
def qsettings_group_context(group_name: str):
settings.beginGroup(group_name)
yield settings
settings.endGroup()
return qsettings_group_context
class SettingsMeta(type):
_instance = None
def __call__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(SettingsMeta, cls).__call__(*args, **kwargs)
return cls._instance
class Settings(metaclass=SettingsMeta):
"""Stores application settings
"""
def __init__(self, parent: QtCore.QObject = None):
self._filename = os.path.normpath(
os.path.join(helperutils.get_app_config_dir(), __app_name__ + '.ini'))
self._settings = QtCore.QSettings(self._filename, QtCore.QSettings.IniFormat, parent)
self._settings_group = _qsettings_group_factory(self._settings)
@property
def filename(self):
return self._filename
@property
def s(self):
return self._settings
@property
def group(self):
return self._settings_group
|
fd4bc228c978019a7251fefe2c92899a16b8f95d
|
demosys/scene/shaders.py
|
demosys/scene/shaders.py
|
from pyrr import Matrix33
class MeshShader:
def __init__(self, shader):
self.shader = shader
def draw(self, mesh, proj_mat, view_mat):
"""Minimal draw function. Should be overridden"""
mesh.vao.bind(self.shader)
self.shader.uniform_mat4("m_proj", proj_mat)
self.shader.uniform_mat4("m_mv", view_mat)
mesh.vao.draw()
def apply(self, mesh):
"""
Determine if this MeshShader should be applied to the mesh
Can return self or some MeshShader instance to support dynamic MeshShader creation
"""
raise NotImplementedError("apply is not implemented. Please override the MeshShader method")
def create_normal_matrix(self, modelview):
"""
Convert to mat3 and return inverse transpose.
These are normally needed when dealing with normals in shaders.
:param modelview: The modelview matrix
:return: Normal matrix
"""
normal_m = Matrix33.from_matrix44(modelview)
normal_m = normal_m.inverse
normal_m = normal_m.transpose()
return normal_m
|
from pyrr import Matrix33
class MeshShader:
def __init__(self, shader, **kwargs):
self.shader = shader
def draw(self, mesh, proj_mat, view_mat):
"""Minimal draw function. Should be overridden"""
mesh.vao.bind(self.shader)
self.shader.uniform_mat4("m_proj", proj_mat)
self.shader.uniform_mat4("m_mv", view_mat)
mesh.vao.draw()
def apply(self, mesh):
"""
Determine if this MeshShader should be applied to the mesh
Can return self or some MeshShader instance to support dynamic MeshShader creation
"""
raise NotImplementedError("apply is not implemented. Please override the MeshShader method")
def create_normal_matrix(self, modelview):
"""
Convert to mat3 and return inverse transpose.
These are normally needed when dealing with normals in shaders.
:param modelview: The modelview matrix
:return: Normal matrix
"""
normal_m = Matrix33.from_matrix44(modelview)
normal_m = normal_m.inverse
normal_m = normal_m.transpose()
return normal_m
|
Allow sending kwars to mesh shader
|
Allow sending kwars to mesh shader
|
Python
|
isc
|
Contraz/demosys-py
|
from pyrr import Matrix33
class MeshShader:
def __init__(self, shader):
self.shader = shader
def draw(self, mesh, proj_mat, view_mat):
"""Minimal draw function. Should be overridden"""
mesh.vao.bind(self.shader)
self.shader.uniform_mat4("m_proj", proj_mat)
self.shader.uniform_mat4("m_mv", view_mat)
mesh.vao.draw()
def apply(self, mesh):
"""
Determine if this MeshShader should be applied to the mesh
Can return self or some MeshShader instance to support dynamic MeshShader creation
"""
raise NotImplementedError("apply is not implemented. Please override the MeshShader method")
def create_normal_matrix(self, modelview):
"""
Convert to mat3 and return inverse transpose.
These are normally needed when dealing with normals in shaders.
:param modelview: The modelview matrix
:return: Normal matrix
"""
normal_m = Matrix33.from_matrix44(modelview)
normal_m = normal_m.inverse
normal_m = normal_m.transpose()
return normal_m
Allow sending kwars to mesh shader
|
from pyrr import Matrix33
class MeshShader:
def __init__(self, shader, **kwargs):
self.shader = shader
def draw(self, mesh, proj_mat, view_mat):
"""Minimal draw function. Should be overridden"""
mesh.vao.bind(self.shader)
self.shader.uniform_mat4("m_proj", proj_mat)
self.shader.uniform_mat4("m_mv", view_mat)
mesh.vao.draw()
def apply(self, mesh):
"""
Determine if this MeshShader should be applied to the mesh
Can return self or some MeshShader instance to support dynamic MeshShader creation
"""
raise NotImplementedError("apply is not implemented. Please override the MeshShader method")
def create_normal_matrix(self, modelview):
"""
Convert to mat3 and return inverse transpose.
These are normally needed when dealing with normals in shaders.
:param modelview: The modelview matrix
:return: Normal matrix
"""
normal_m = Matrix33.from_matrix44(modelview)
normal_m = normal_m.inverse
normal_m = normal_m.transpose()
return normal_m
|
<commit_before>from pyrr import Matrix33
class MeshShader:
def __init__(self, shader):
self.shader = shader
def draw(self, mesh, proj_mat, view_mat):
"""Minimal draw function. Should be overridden"""
mesh.vao.bind(self.shader)
self.shader.uniform_mat4("m_proj", proj_mat)
self.shader.uniform_mat4("m_mv", view_mat)
mesh.vao.draw()
def apply(self, mesh):
"""
Determine if this MeshShader should be applied to the mesh
Can return self or some MeshShader instance to support dynamic MeshShader creation
"""
raise NotImplementedError("apply is not implemented. Please override the MeshShader method")
def create_normal_matrix(self, modelview):
"""
Convert to mat3 and return inverse transpose.
These are normally needed when dealing with normals in shaders.
:param modelview: The modelview matrix
:return: Normal matrix
"""
normal_m = Matrix33.from_matrix44(modelview)
normal_m = normal_m.inverse
normal_m = normal_m.transpose()
return normal_m
<commit_msg>Allow sending kwars to mesh shader<commit_after>
|
from pyrr import Matrix33
class MeshShader:
def __init__(self, shader, **kwargs):
self.shader = shader
def draw(self, mesh, proj_mat, view_mat):
"""Minimal draw function. Should be overridden"""
mesh.vao.bind(self.shader)
self.shader.uniform_mat4("m_proj", proj_mat)
self.shader.uniform_mat4("m_mv", view_mat)
mesh.vao.draw()
def apply(self, mesh):
"""
Determine if this MeshShader should be applied to the mesh
Can return self or some MeshShader instance to support dynamic MeshShader creation
"""
raise NotImplementedError("apply is not implemented. Please override the MeshShader method")
def create_normal_matrix(self, modelview):
"""
Convert to mat3 and return inverse transpose.
These are normally needed when dealing with normals in shaders.
:param modelview: The modelview matrix
:return: Normal matrix
"""
normal_m = Matrix33.from_matrix44(modelview)
normal_m = normal_m.inverse
normal_m = normal_m.transpose()
return normal_m
|
from pyrr import Matrix33
class MeshShader:
def __init__(self, shader):
self.shader = shader
def draw(self, mesh, proj_mat, view_mat):
"""Minimal draw function. Should be overridden"""
mesh.vao.bind(self.shader)
self.shader.uniform_mat4("m_proj", proj_mat)
self.shader.uniform_mat4("m_mv", view_mat)
mesh.vao.draw()
def apply(self, mesh):
"""
Determine if this MeshShader should be applied to the mesh
Can return self or some MeshShader instance to support dynamic MeshShader creation
"""
raise NotImplementedError("apply is not implemented. Please override the MeshShader method")
def create_normal_matrix(self, modelview):
"""
Convert to mat3 and return inverse transpose.
These are normally needed when dealing with normals in shaders.
:param modelview: The modelview matrix
:return: Normal matrix
"""
normal_m = Matrix33.from_matrix44(modelview)
normal_m = normal_m.inverse
normal_m = normal_m.transpose()
return normal_m
Allow sending kwars to mesh shaderfrom pyrr import Matrix33
class MeshShader:
def __init__(self, shader, **kwargs):
self.shader = shader
def draw(self, mesh, proj_mat, view_mat):
"""Minimal draw function. Should be overridden"""
mesh.vao.bind(self.shader)
self.shader.uniform_mat4("m_proj", proj_mat)
self.shader.uniform_mat4("m_mv", view_mat)
mesh.vao.draw()
def apply(self, mesh):
"""
Determine if this MeshShader should be applied to the mesh
Can return self or some MeshShader instance to support dynamic MeshShader creation
"""
raise NotImplementedError("apply is not implemented. Please override the MeshShader method")
def create_normal_matrix(self, modelview):
"""
Convert to mat3 and return inverse transpose.
These are normally needed when dealing with normals in shaders.
:param modelview: The modelview matrix
:return: Normal matrix
"""
normal_m = Matrix33.from_matrix44(modelview)
normal_m = normal_m.inverse
normal_m = normal_m.transpose()
return normal_m
|
<commit_before>from pyrr import Matrix33
class MeshShader:
def __init__(self, shader):
self.shader = shader
def draw(self, mesh, proj_mat, view_mat):
"""Minimal draw function. Should be overridden"""
mesh.vao.bind(self.shader)
self.shader.uniform_mat4("m_proj", proj_mat)
self.shader.uniform_mat4("m_mv", view_mat)
mesh.vao.draw()
def apply(self, mesh):
"""
Determine if this MeshShader should be applied to the mesh
Can return self or some MeshShader instance to support dynamic MeshShader creation
"""
raise NotImplementedError("apply is not implemented. Please override the MeshShader method")
def create_normal_matrix(self, modelview):
"""
Convert to mat3 and return inverse transpose.
These are normally needed when dealing with normals in shaders.
:param modelview: The modelview matrix
:return: Normal matrix
"""
normal_m = Matrix33.from_matrix44(modelview)
normal_m = normal_m.inverse
normal_m = normal_m.transpose()
return normal_m
<commit_msg>Allow sending kwars to mesh shader<commit_after>from pyrr import Matrix33
class MeshShader:
def __init__(self, shader, **kwargs):
self.shader = shader
def draw(self, mesh, proj_mat, view_mat):
"""Minimal draw function. Should be overridden"""
mesh.vao.bind(self.shader)
self.shader.uniform_mat4("m_proj", proj_mat)
self.shader.uniform_mat4("m_mv", view_mat)
mesh.vao.draw()
def apply(self, mesh):
"""
Determine if this MeshShader should be applied to the mesh
Can return self or some MeshShader instance to support dynamic MeshShader creation
"""
raise NotImplementedError("apply is not implemented. Please override the MeshShader method")
def create_normal_matrix(self, modelview):
"""
Convert to mat3 and return inverse transpose.
These are normally needed when dealing with normals in shaders.
:param modelview: The modelview matrix
:return: Normal matrix
"""
normal_m = Matrix33.from_matrix44(modelview)
normal_m = normal_m.inverse
normal_m = normal_m.transpose()
return normal_m
|
83efb4c86ea34e9f51c231a3b7c96929d2ba5ee6
|
bluebottle/utils/staticfiles_finders.py
|
bluebottle/utils/staticfiles_finders.py
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTITENANT_DIR/greatbarier/static/images/logo.jpg
"""
matches = []
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return matches
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
print tenant_path
local_path = safe_join(tenant_dir, tenant_path)
print local_path
if os.path.exists(local_path):
return local_path
return
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTITENANT_DIR/greatbarier/static/images/logo.jpg
"""
matches = []
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return matches
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
print tenant_path
local_path = safe_join(tenant_dir, tenant_path)
print local_path
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return matches
|
Fix static files finder errors
|
Fix static files finder errors
Conflicts:
bluebottle/utils/staticfiles_finders.py
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTITENANT_DIR/greatbarier/static/images/logo.jpg
"""
matches = []
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return matches
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
print tenant_path
local_path = safe_join(tenant_dir, tenant_path)
print local_path
if os.path.exists(local_path):
return local_path
return
Fix static files finder errors
Conflicts:
bluebottle/utils/staticfiles_finders.py
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTITENANT_DIR/greatbarier/static/images/logo.jpg
"""
matches = []
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return matches
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
print tenant_path
local_path = safe_join(tenant_dir, tenant_path)
print local_path
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return matches
|
<commit_before>from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTITENANT_DIR/greatbarier/static/images/logo.jpg
"""
matches = []
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return matches
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
print tenant_path
local_path = safe_join(tenant_dir, tenant_path)
print local_path
if os.path.exists(local_path):
return local_path
return
<commit_msg>Fix static files finder errors
Conflicts:
bluebottle/utils/staticfiles_finders.py<commit_after>
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTITENANT_DIR/greatbarier/static/images/logo.jpg
"""
matches = []
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return matches
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
print tenant_path
local_path = safe_join(tenant_dir, tenant_path)
print local_path
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return matches
|
from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTITENANT_DIR/greatbarier/static/images/logo.jpg
"""
matches = []
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return matches
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
print tenant_path
local_path = safe_join(tenant_dir, tenant_path)
print local_path
if os.path.exists(local_path):
return local_path
return
Fix static files finder errors
Conflicts:
bluebottle/utils/staticfiles_finders.pyfrom django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTITENANT_DIR/greatbarier/static/images/logo.jpg
"""
matches = []
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return matches
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
print tenant_path
local_path = safe_join(tenant_dir, tenant_path)
print local_path
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return matches
|
<commit_before>from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTITENANT_DIR/greatbarier/static/images/logo.jpg
"""
matches = []
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return matches
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
print tenant_path
local_path = safe_join(tenant_dir, tenant_path)
print local_path
if os.path.exists(local_path):
return local_path
return
<commit_msg>Fix static files finder errors
Conflicts:
bluebottle/utils/staticfiles_finders.py<commit_after>from django.utils._os import safe_join
import os
from django.conf import settings
from django.contrib.staticfiles.finders import FileSystemFinder
from bluebottle.clients.models import Client
class TenantStaticFilesFinder(FileSystemFinder):
def find(self, path, all=False):
"""
Looks for files in the client static directories.
static/assets/greatbarier/images/logo.jpg
will translate to
MULTITENANT_DIR/greatbarier/static/images/logo.jpg
"""
matches = []
tenants = Client.objects.all()
tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None)
if not tenant_dir:
return matches
for tenant in tenants:
if "{0}/".format(tenant.client_name) in path:
tenant_path = path.replace('{0}/'.format(tenant.client_name),
'{0}/static/'.format(tenant.client_name))
print tenant_path
local_path = safe_join(tenant_dir, tenant_path)
print local_path
if os.path.exists(local_path):
if all:
return [local_path]
return local_path
return matches
|
3cab4a8252d89c05895cc7a1715afa4ec14ce6e2
|
utils/__init__.py
|
utils/__init__.py
|
import string
import struct
from collections import namedtuple
class NamedStruct(struct.Struct):
def __init__(self, fields, order='', size=0):
self.values = namedtuple("header", ' '.join(k for k, _ in fields))
format = order + ''.join([v for _, v in fields])
if size:
format += "%dx" % (size - struct.calcsize(format))
super().__init__(format)
def pack(self, *args, **kwargs):
return super().pack(*self.values(*args, **kwargs))
def unpack(self, data):
return self.values._make(super().unpack(data))
# does not handle:
# - bytes
# - escapes in bytes
def sourcable_dump(dict_):
value_esc = {
"\\": r"\\",
"'": r"'\''",
}
value_trans = str.maketrans(value_esc)
for k, v in dict_.items():
k = str(k).lstrip("0123456789")
k = ''.join([c if c in string.ascii_letters + string.digits + '_' else '_' for c in k])
v = str(v).translate(value_trans)
if k:
print("%s='%s'" % (k, v))
def pretty_dump(dict_):
items = dict_.items()
maxlen = max([len(k) for k, _ in items])
for k, v in items:
print(("{:%d} {}" % maxlen).format(k, v))
dump = pretty_dump
|
import string
import struct
from collections import namedtuple
class NamedStruct(struct.Struct):
def __init__(self, fields, order='', size=0):
self.values = namedtuple("NamedStruct", ' '.join(k for k, _ in fields))
format = order + ''.join([v for _, v in fields])
if size:
format += "%dx" % (size - struct.calcsize(format))
super().__init__(format)
def pack(self, *args, **kwargs):
return super().pack(*self.values(*args, **kwargs))
def unpack(self, data):
return self.values._make(super().unpack(data))
# does not handle:
# - bytes
# - escapes in bytes
def sourcable_dump(dict_):
value_esc = {
"\\": r"\\",
"'": r"'\''",
}
value_trans = str.maketrans(value_esc)
for k, v in dict_.items():
k = str(k).lstrip("0123456789")
k = ''.join([c if c in string.ascii_letters + string.digits + '_' else '_' for c in k])
v = str(v).translate(value_trans)
if k:
print("%s='%s'" % (k, v))
def pretty_dump(dict_):
items = dict_.items()
maxlen = max([len(k) for k, _ in items])
for k, v in items:
print(("{:%d} {}" % maxlen).format(k, v))
dump = pretty_dump
|
Make NamedStruct instance names less confusing
|
Make NamedStruct instance names less confusing
Ideally we'd want to make the name the same as the instance name,
but I'm not sure if it's possible without introducing an additional
constructor argument.
|
Python
|
unlicense
|
tsudoko/98imgtools
|
import string
import struct
from collections import namedtuple
class NamedStruct(struct.Struct):
def __init__(self, fields, order='', size=0):
self.values = namedtuple("header", ' '.join(k for k, _ in fields))
format = order + ''.join([v for _, v in fields])
if size:
format += "%dx" % (size - struct.calcsize(format))
super().__init__(format)
def pack(self, *args, **kwargs):
return super().pack(*self.values(*args, **kwargs))
def unpack(self, data):
return self.values._make(super().unpack(data))
# does not handle:
# - bytes
# - escapes in bytes
def sourcable_dump(dict_):
value_esc = {
"\\": r"\\",
"'": r"'\''",
}
value_trans = str.maketrans(value_esc)
for k, v in dict_.items():
k = str(k).lstrip("0123456789")
k = ''.join([c if c in string.ascii_letters + string.digits + '_' else '_' for c in k])
v = str(v).translate(value_trans)
if k:
print("%s='%s'" % (k, v))
def pretty_dump(dict_):
items = dict_.items()
maxlen = max([len(k) for k, _ in items])
for k, v in items:
print(("{:%d} {}" % maxlen).format(k, v))
dump = pretty_dump
Make NamedStruct instance names less confusing
Ideally we'd want to make the name the same as the instance name,
but I'm not sure if it's possible without introducing an additional
constructor argument.
|
import string
import struct
from collections import namedtuple
class NamedStruct(struct.Struct):
def __init__(self, fields, order='', size=0):
self.values = namedtuple("NamedStruct", ' '.join(k for k, _ in fields))
format = order + ''.join([v for _, v in fields])
if size:
format += "%dx" % (size - struct.calcsize(format))
super().__init__(format)
def pack(self, *args, **kwargs):
return super().pack(*self.values(*args, **kwargs))
def unpack(self, data):
return self.values._make(super().unpack(data))
# does not handle:
# - bytes
# - escapes in bytes
def sourcable_dump(dict_):
value_esc = {
"\\": r"\\",
"'": r"'\''",
}
value_trans = str.maketrans(value_esc)
for k, v in dict_.items():
k = str(k).lstrip("0123456789")
k = ''.join([c if c in string.ascii_letters + string.digits + '_' else '_' for c in k])
v = str(v).translate(value_trans)
if k:
print("%s='%s'" % (k, v))
def pretty_dump(dict_):
items = dict_.items()
maxlen = max([len(k) for k, _ in items])
for k, v in items:
print(("{:%d} {}" % maxlen).format(k, v))
dump = pretty_dump
|
<commit_before>import string
import struct
from collections import namedtuple
class NamedStruct(struct.Struct):
def __init__(self, fields, order='', size=0):
self.values = namedtuple("header", ' '.join(k for k, _ in fields))
format = order + ''.join([v for _, v in fields])
if size:
format += "%dx" % (size - struct.calcsize(format))
super().__init__(format)
def pack(self, *args, **kwargs):
return super().pack(*self.values(*args, **kwargs))
def unpack(self, data):
return self.values._make(super().unpack(data))
# does not handle:
# - bytes
# - escapes in bytes
def sourcable_dump(dict_):
value_esc = {
"\\": r"\\",
"'": r"'\''",
}
value_trans = str.maketrans(value_esc)
for k, v in dict_.items():
k = str(k).lstrip("0123456789")
k = ''.join([c if c in string.ascii_letters + string.digits + '_' else '_' for c in k])
v = str(v).translate(value_trans)
if k:
print("%s='%s'" % (k, v))
def pretty_dump(dict_):
items = dict_.items()
maxlen = max([len(k) for k, _ in items])
for k, v in items:
print(("{:%d} {}" % maxlen).format(k, v))
dump = pretty_dump
<commit_msg>Make NamedStruct instance names less confusing
Ideally we'd want to make the name the same as the instance name,
but I'm not sure if it's possible without introducing an additional
constructor argument.<commit_after>
|
import string
import struct
from collections import namedtuple
class NamedStruct(struct.Struct):
def __init__(self, fields, order='', size=0):
self.values = namedtuple("NamedStruct", ' '.join(k for k, _ in fields))
format = order + ''.join([v for _, v in fields])
if size:
format += "%dx" % (size - struct.calcsize(format))
super().__init__(format)
def pack(self, *args, **kwargs):
return super().pack(*self.values(*args, **kwargs))
def unpack(self, data):
return self.values._make(super().unpack(data))
# does not handle:
# - bytes
# - escapes in bytes
def sourcable_dump(dict_):
value_esc = {
"\\": r"\\",
"'": r"'\''",
}
value_trans = str.maketrans(value_esc)
for k, v in dict_.items():
k = str(k).lstrip("0123456789")
k = ''.join([c if c in string.ascii_letters + string.digits + '_' else '_' for c in k])
v = str(v).translate(value_trans)
if k:
print("%s='%s'" % (k, v))
def pretty_dump(dict_):
items = dict_.items()
maxlen = max([len(k) for k, _ in items])
for k, v in items:
print(("{:%d} {}" % maxlen).format(k, v))
dump = pretty_dump
|
import string
import struct
from collections import namedtuple
class NamedStruct(struct.Struct):
def __init__(self, fields, order='', size=0):
self.values = namedtuple("header", ' '.join(k for k, _ in fields))
format = order + ''.join([v for _, v in fields])
if size:
format += "%dx" % (size - struct.calcsize(format))
super().__init__(format)
def pack(self, *args, **kwargs):
return super().pack(*self.values(*args, **kwargs))
def unpack(self, data):
return self.values._make(super().unpack(data))
# does not handle:
# - bytes
# - escapes in bytes
def sourcable_dump(dict_):
value_esc = {
"\\": r"\\",
"'": r"'\''",
}
value_trans = str.maketrans(value_esc)
for k, v in dict_.items():
k = str(k).lstrip("0123456789")
k = ''.join([c if c in string.ascii_letters + string.digits + '_' else '_' for c in k])
v = str(v).translate(value_trans)
if k:
print("%s='%s'" % (k, v))
def pretty_dump(dict_):
items = dict_.items()
maxlen = max([len(k) for k, _ in items])
for k, v in items:
print(("{:%d} {}" % maxlen).format(k, v))
dump = pretty_dump
Make NamedStruct instance names less confusing
Ideally we'd want to make the name the same as the instance name,
but I'm not sure if it's possible without introducing an additional
constructor argument.import string
import struct
from collections import namedtuple
class NamedStruct(struct.Struct):
def __init__(self, fields, order='', size=0):
self.values = namedtuple("NamedStruct", ' '.join(k for k, _ in fields))
format = order + ''.join([v for _, v in fields])
if size:
format += "%dx" % (size - struct.calcsize(format))
super().__init__(format)
def pack(self, *args, **kwargs):
return super().pack(*self.values(*args, **kwargs))
def unpack(self, data):
return self.values._make(super().unpack(data))
# does not handle:
# - bytes
# - escapes in bytes
def sourcable_dump(dict_):
value_esc = {
"\\": r"\\",
"'": r"'\''",
}
value_trans = str.maketrans(value_esc)
for k, v in dict_.items():
k = str(k).lstrip("0123456789")
k = ''.join([c if c in string.ascii_letters + string.digits + '_' else '_' for c in k])
v = str(v).translate(value_trans)
if k:
print("%s='%s'" % (k, v))
def pretty_dump(dict_):
items = dict_.items()
maxlen = max([len(k) for k, _ in items])
for k, v in items:
print(("{:%d} {}" % maxlen).format(k, v))
dump = pretty_dump
|
<commit_before>import string
import struct
from collections import namedtuple
class NamedStruct(struct.Struct):
def __init__(self, fields, order='', size=0):
self.values = namedtuple("header", ' '.join(k for k, _ in fields))
format = order + ''.join([v for _, v in fields])
if size:
format += "%dx" % (size - struct.calcsize(format))
super().__init__(format)
def pack(self, *args, **kwargs):
return super().pack(*self.values(*args, **kwargs))
def unpack(self, data):
return self.values._make(super().unpack(data))
# does not handle:
# - bytes
# - escapes in bytes
def sourcable_dump(dict_):
value_esc = {
"\\": r"\\",
"'": r"'\''",
}
value_trans = str.maketrans(value_esc)
for k, v in dict_.items():
k = str(k).lstrip("0123456789")
k = ''.join([c if c in string.ascii_letters + string.digits + '_' else '_' for c in k])
v = str(v).translate(value_trans)
if k:
print("%s='%s'" % (k, v))
def pretty_dump(dict_):
items = dict_.items()
maxlen = max([len(k) for k, _ in items])
for k, v in items:
print(("{:%d} {}" % maxlen).format(k, v))
dump = pretty_dump
<commit_msg>Make NamedStruct instance names less confusing
Ideally we'd want to make the name the same as the instance name,
but I'm not sure if it's possible without introducing an additional
constructor argument.<commit_after>import string
import struct
from collections import namedtuple
class NamedStruct(struct.Struct):
def __init__(self, fields, order='', size=0):
self.values = namedtuple("NamedStruct", ' '.join(k for k, _ in fields))
format = order + ''.join([v for _, v in fields])
if size:
format += "%dx" % (size - struct.calcsize(format))
super().__init__(format)
def pack(self, *args, **kwargs):
return super().pack(*self.values(*args, **kwargs))
def unpack(self, data):
return self.values._make(super().unpack(data))
# does not handle:
# - bytes
# - escapes in bytes
def sourcable_dump(dict_):
value_esc = {
"\\": r"\\",
"'": r"'\''",
}
value_trans = str.maketrans(value_esc)
for k, v in dict_.items():
k = str(k).lstrip("0123456789")
k = ''.join([c if c in string.ascii_letters + string.digits + '_' else '_' for c in k])
v = str(v).translate(value_trans)
if k:
print("%s='%s'" % (k, v))
def pretty_dump(dict_):
items = dict_.items()
maxlen = max([len(k) for k, _ in items])
for k, v in items:
print(("{:%d} {}" % maxlen).format(k, v))
dump = pretty_dump
|
59691ed33347c60fe15014facee272e00f58ed3a
|
server/plugins/cryptstatus/cryptstatus.py
|
server/plugins/cryptstatus/cryptstatus.py
|
import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
machine_url = crypt_url
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
output = None
machine_url = crypt_url
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
|
import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
machine_url = crypt_url
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
output = None
machine_url = crypt_url
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
pass
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
|
Make sure `output` variable is in scope no matter what.
|
Make sure `output` variable is in scope no matter what.
|
Python
|
apache-2.0
|
salopensource/sal,sheagcraig/sal,salopensource/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,sheagcraig/sal
|
import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
machine_url = crypt_url
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
output = None
machine_url = crypt_url
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
Make sure `output` variable is in scope no matter what.
|
import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
machine_url = crypt_url
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
output = None
machine_url = crypt_url
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
pass
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
|
<commit_before>import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
machine_url = crypt_url
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
output = None
machine_url = crypt_url
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
<commit_msg>Make sure `output` variable is in scope no matter what.<commit_after>
|
import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
machine_url = crypt_url
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
output = None
machine_url = crypt_url
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
pass
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
|
import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
machine_url = crypt_url
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
output = None
machine_url = crypt_url
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
Make sure `output` variable is in scope no matter what.import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
machine_url = crypt_url
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
output = None
machine_url = crypt_url
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
pass
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
|
<commit_before>import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
machine_url = crypt_url
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
output = None
machine_url = crypt_url
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
<commit_msg>Make sure `output` variable is in scope no matter what.<commit_after>import requests
from collections import defaultdict
from requests.exceptions import RequestException
from django.conf import settings
from django.utils.dateparse import parse_datetime
import sal.plugin
import server.utils as utils
class CryptStatus(sal.plugin.DetailPlugin):
description = 'FileVault Escrow Status'
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, machine, **kwargs):
context = defaultdict(str)
context['title'] = self.description
crypt_url = utils.get_setting('crypt_url', None)
machine_url = crypt_url
if crypt_url:
crypt_url = crypt_url.rstrip()
if crypt_url:
try:
verify = settings.ROOT_CA
except AttributeError:
verify = True
request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial)
output = None
machine_url = crypt_url
try:
response = requests.get(request_url, verify=verify)
if response.status_code == requests.codes.ok:
output = response.json()
# Have template link to machine info page rather
# than Crypt root.
machine_url = '{}/info/{}'.format(crypt_url, machine.serial)
except RequestException:
# Either there was an error or the machine hasn't been
# seen.
pass
if output:
context['escrowed'] = output['escrowed']
if output['escrowed']:
context['date_escrowed'] = parse_datetime(output['date_escrowed'])
context['crypt_url'] = machine_url
return context
|
0033a29537740592ea47b1e372a9aa3873120c35
|
i18n/main.py
|
i18n/main.py
|
#!/usr/bin/env python
import importlib
import sys
def main():
try:
command = sys.argv[1]
except IndexError:
sys.stderr.write('must specify a command\n')
return -1
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
return module.main()
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
import importlib
import sys
from path import path
def get_valid_commands():
modules = [m.basename().split('.')[0] for m in path(__file__).dirname().files('*.py')]
commands = []
for modname in modules:
if modname == 'main':
continue
mod = importlib.import_module('i18n.%s' % modname)
if hasattr(mod, 'main'):
commands.append(modname)
return commands
def error_message():
sys.stderr.write('valid commands:\n')
for cmd in get_valid_commands():
sys.stderr.write('\t%s\n' % cmd)
return -1
def main():
try:
command = sys.argv[1]
except IndexError:
return error_message()
try:
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
except (ImportError, AttributeError):
return error_message()
return module.main()
if __name__ == '__main__':
sys.exit(main())
|
Add helpful list of subcommands.
|
Add helpful list of subcommands.
|
Python
|
apache-2.0
|
baxeico/i18n-tools,baxeico/i18n-tools,edx/i18n-tools
|
#!/usr/bin/env python
import importlib
import sys
def main():
try:
command = sys.argv[1]
except IndexError:
sys.stderr.write('must specify a command\n')
return -1
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
return module.main()
if __name__ == '__main__':
sys.exit(main())
Add helpful list of subcommands.
|
#!/usr/bin/env python
import importlib
import sys
from path import path
def get_valid_commands():
modules = [m.basename().split('.')[0] for m in path(__file__).dirname().files('*.py')]
commands = []
for modname in modules:
if modname == 'main':
continue
mod = importlib.import_module('i18n.%s' % modname)
if hasattr(mod, 'main'):
commands.append(modname)
return commands
def error_message():
sys.stderr.write('valid commands:\n')
for cmd in get_valid_commands():
sys.stderr.write('\t%s\n' % cmd)
return -1
def main():
try:
command = sys.argv[1]
except IndexError:
return error_message()
try:
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
except (ImportError, AttributeError):
return error_message()
return module.main()
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
import importlib
import sys
def main():
try:
command = sys.argv[1]
except IndexError:
sys.stderr.write('must specify a command\n')
return -1
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
return module.main()
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Add helpful list of subcommands.<commit_after>
|
#!/usr/bin/env python
import importlib
import sys
from path import path
def get_valid_commands():
modules = [m.basename().split('.')[0] for m in path(__file__).dirname().files('*.py')]
commands = []
for modname in modules:
if modname == 'main':
continue
mod = importlib.import_module('i18n.%s' % modname)
if hasattr(mod, 'main'):
commands.append(modname)
return commands
def error_message():
sys.stderr.write('valid commands:\n')
for cmd in get_valid_commands():
sys.stderr.write('\t%s\n' % cmd)
return -1
def main():
try:
command = sys.argv[1]
except IndexError:
return error_message()
try:
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
except (ImportError, AttributeError):
return error_message()
return module.main()
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
import importlib
import sys
def main():
try:
command = sys.argv[1]
except IndexError:
sys.stderr.write('must specify a command\n')
return -1
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
return module.main()
if __name__ == '__main__':
sys.exit(main())
Add helpful list of subcommands.#!/usr/bin/env python
import importlib
import sys
from path import path
def get_valid_commands():
modules = [m.basename().split('.')[0] for m in path(__file__).dirname().files('*.py')]
commands = []
for modname in modules:
if modname == 'main':
continue
mod = importlib.import_module('i18n.%s' % modname)
if hasattr(mod, 'main'):
commands.append(modname)
return commands
def error_message():
sys.stderr.write('valid commands:\n')
for cmd in get_valid_commands():
sys.stderr.write('\t%s\n' % cmd)
return -1
def main():
try:
command = sys.argv[1]
except IndexError:
return error_message()
try:
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
except (ImportError, AttributeError):
return error_message()
return module.main()
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
import importlib
import sys
def main():
try:
command = sys.argv[1]
except IndexError:
sys.stderr.write('must specify a command\n')
return -1
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
return module.main()
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Add helpful list of subcommands.<commit_after>#!/usr/bin/env python
import importlib
import sys
from path import path
def get_valid_commands():
modules = [m.basename().split('.')[0] for m in path(__file__).dirname().files('*.py')]
commands = []
for modname in modules:
if modname == 'main':
continue
mod = importlib.import_module('i18n.%s' % modname)
if hasattr(mod, 'main'):
commands.append(modname)
return commands
def error_message():
sys.stderr.write('valid commands:\n')
for cmd in get_valid_commands():
sys.stderr.write('\t%s\n' % cmd)
return -1
def main():
try:
command = sys.argv[1]
except IndexError:
return error_message()
try:
module = importlib.import_module('i18n.%s' % command)
module.main.args = sys.argv[2:]
except (ImportError, AttributeError):
return error_message()
return module.main()
if __name__ == '__main__':
sys.exit(main())
|
1cc72b836e5b6feb76898192c886e9701fc34b8f
|
saylua/modules/users/views/recover.py
|
saylua/modules/users/views/recover.py
|
from ..forms.login import RecoveryForm, login_check
from saylua.utils.email import send_email
from flask import render_template, request, flash
def recover_login():
form = RecoveryForm(request.form)
if request.method == 'POST' and form.validate():
user = login_check.user
code = user.make_password_reset_code()
send_email(user.email, 'Saylua Password Reset',
'Your password reset link is: ' + code.url())
flash('Recovery email sent! Check the email address on file for the next step.')
return render_template('login/recover.html', form=form)
def reset_password(user, code):
return render_template('login/recover.html')
|
from ..forms.login import RecoveryForm, login_check
from saylua.utils import is_devserver
from saylua.utils.email import send_email
from flask import render_template, request, flash
def recover_login():
form = RecoveryForm(request.form)
if request.method == 'POST' and form.validate():
user = login_check.user
code = user.make_password_reset_code()
if is_devserver():
flash('DEBUG MODE: Your reset code is %s' % code.url())
else:
send_email(user.email, 'Saylua Password Reset',
'Your password reset link is: ' + code.url())
flash('Recovery email sent! Check the email address on file for the next step.')
return render_template('login/recover.html', form=form)
def reset_password(user, code):
return render_template('login/recover.html')
|
Add devserver handling for password resets.
|
Add devserver handling for password resets.
|
Python
|
agpl-3.0
|
LikeMyBread/Saylua,saylua/SayluaV2,LikeMyBread/Saylua,LikeMyBread/Saylua,LikeMyBread/Saylua,saylua/SayluaV2,saylua/SayluaV2
|
from ..forms.login import RecoveryForm, login_check
from saylua.utils.email import send_email
from flask import render_template, request, flash
def recover_login():
form = RecoveryForm(request.form)
if request.method == 'POST' and form.validate():
user = login_check.user
code = user.make_password_reset_code()
send_email(user.email, 'Saylua Password Reset',
'Your password reset link is: ' + code.url())
flash('Recovery email sent! Check the email address on file for the next step.')
return render_template('login/recover.html', form=form)
def reset_password(user, code):
return render_template('login/recover.html')
Add devserver handling for password resets.
|
from ..forms.login import RecoveryForm, login_check
from saylua.utils import is_devserver
from saylua.utils.email import send_email
from flask import render_template, request, flash
def recover_login():
form = RecoveryForm(request.form)
if request.method == 'POST' and form.validate():
user = login_check.user
code = user.make_password_reset_code()
if is_devserver():
flash('DEBUG MODE: Your reset code is %s' % code.url())
else:
send_email(user.email, 'Saylua Password Reset',
'Your password reset link is: ' + code.url())
flash('Recovery email sent! Check the email address on file for the next step.')
return render_template('login/recover.html', form=form)
def reset_password(user, code):
return render_template('login/recover.html')
|
<commit_before>from ..forms.login import RecoveryForm, login_check
from saylua.utils.email import send_email
from flask import render_template, request, flash
def recover_login():
form = RecoveryForm(request.form)
if request.method == 'POST' and form.validate():
user = login_check.user
code = user.make_password_reset_code()
send_email(user.email, 'Saylua Password Reset',
'Your password reset link is: ' + code.url())
flash('Recovery email sent! Check the email address on file for the next step.')
return render_template('login/recover.html', form=form)
def reset_password(user, code):
return render_template('login/recover.html')
<commit_msg>Add devserver handling for password resets.<commit_after>
|
from ..forms.login import RecoveryForm, login_check
from saylua.utils import is_devserver
from saylua.utils.email import send_email
from flask import render_template, request, flash
def recover_login():
form = RecoveryForm(request.form)
if request.method == 'POST' and form.validate():
user = login_check.user
code = user.make_password_reset_code()
if is_devserver():
flash('DEBUG MODE: Your reset code is %s' % code.url())
else:
send_email(user.email, 'Saylua Password Reset',
'Your password reset link is: ' + code.url())
flash('Recovery email sent! Check the email address on file for the next step.')
return render_template('login/recover.html', form=form)
def reset_password(user, code):
return render_template('login/recover.html')
|
from ..forms.login import RecoveryForm, login_check
from saylua.utils.email import send_email
from flask import render_template, request, flash
def recover_login():
form = RecoveryForm(request.form)
if request.method == 'POST' and form.validate():
user = login_check.user
code = user.make_password_reset_code()
send_email(user.email, 'Saylua Password Reset',
'Your password reset link is: ' + code.url())
flash('Recovery email sent! Check the email address on file for the next step.')
return render_template('login/recover.html', form=form)
def reset_password(user, code):
return render_template('login/recover.html')
Add devserver handling for password resets.from ..forms.login import RecoveryForm, login_check
from saylua.utils import is_devserver
from saylua.utils.email import send_email
from flask import render_template, request, flash
def recover_login():
form = RecoveryForm(request.form)
if request.method == 'POST' and form.validate():
user = login_check.user
code = user.make_password_reset_code()
if is_devserver():
flash('DEBUG MODE: Your reset code is %s' % code.url())
else:
send_email(user.email, 'Saylua Password Reset',
'Your password reset link is: ' + code.url())
flash('Recovery email sent! Check the email address on file for the next step.')
return render_template('login/recover.html', form=form)
def reset_password(user, code):
return render_template('login/recover.html')
|
<commit_before>from ..forms.login import RecoveryForm, login_check
from saylua.utils.email import send_email
from flask import render_template, request, flash
def recover_login():
form = RecoveryForm(request.form)
if request.method == 'POST' and form.validate():
user = login_check.user
code = user.make_password_reset_code()
send_email(user.email, 'Saylua Password Reset',
'Your password reset link is: ' + code.url())
flash('Recovery email sent! Check the email address on file for the next step.')
return render_template('login/recover.html', form=form)
def reset_password(user, code):
return render_template('login/recover.html')
<commit_msg>Add devserver handling for password resets.<commit_after>from ..forms.login import RecoveryForm, login_check
from saylua.utils import is_devserver
from saylua.utils.email import send_email
from flask import render_template, request, flash
def recover_login():
form = RecoveryForm(request.form)
if request.method == 'POST' and form.validate():
user = login_check.user
code = user.make_password_reset_code()
if is_devserver():
flash('DEBUG MODE: Your reset code is %s' % code.url())
else:
send_email(user.email, 'Saylua Password Reset',
'Your password reset link is: ' + code.url())
flash('Recovery email sent! Check the email address on file for the next step.')
return render_template('login/recover.html', form=form)
def reset_password(user, code):
return render_template('login/recover.html')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.